+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.vf7DLxQWFF --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools [2 ymakes processing] [7860/7860 modules configured] [2486/5219 modules rendered] [2 ymakes processing] [7860/7860 modules configured] [5188/5219 modules rendered] [2 ymakes processing] [7860/7860 modules configured] [5219/5219 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7866/7866 modules configured] [5219/5219 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 3.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg | 3.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a | 4.4%| PREPARE $(VCS) - 0 bytes | 7.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a | 7.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a | 8.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a | 8.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/libessentials-parser-proto_ast.a | 8.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a | 8.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a | 8.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a | 8.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a | 8.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a | 9.1%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.a | 8.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.global.a | 9.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a | 9.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a | 9.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |10.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/ut_pg/scheme_tablecell_pg_ut.cpp |10.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |11.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |11.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |11.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |11.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |13.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |13.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |13.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |13.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |13.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |14.3%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |14.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |14.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/libconnector-api-common.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |15.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |15.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |16.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionary.cpp |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |16.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/clickhouse_client_udf.cpp |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |17.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/exceptions/libclient-ydb_types-exceptions.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |19.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_table/query_stats/libclient-ydb_table-query_stats.a |19.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_common_client/impl/libclient-ydb_common_client-impl.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/common/libclient-ydb_topic-common.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/proto/libproviders-yt-proto.a |20.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/libcpp-client-ydb_topic.a |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |20.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |20.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |20.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |21.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |21.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |21.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |21.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |21.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/log/libyt-lib-log.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/schema/libyt-lib-schema.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |22.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_table/libcpp-client-ydb_table.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/opt/libproviders-yt-opt.a |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |22.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |22.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/credentials/libclient-ydb_types-credentials.a |22.8%| PREPARE $(YMAKE_PYTHON3-4256832079) - 0 bytes |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |22.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |22.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/credentials/login/libydb_types-credentials-login.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/credentials/oauth2_token_exchange/libydb_types-credentials-oauth2_token_exchange.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/comp_nodes/llvm14/libyt-comp_nodes-llvm14.a |23.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/fatal_error_handlers/libclient-ydb_types-fatal_error_handlers.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/raw_client/libcpp-mapreduce-raw_client.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |24.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/async_io/libproviders-solomon-async_io.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/common/libproviders-yt-common.a |25.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/codec/libproviders-yt-codec.a |25.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |25.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/obfuscate/liblibrary-persqueue-obfuscate.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser_public/liblibrary-persqueue-topic_parser_public.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |26.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/comp_nodes/dq/libyt-comp_nodes-dq.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/impl/libclient-ydb_topic-impl.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |27.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/gateway/native/libyt-gateway-native.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |29.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/libcpp-client-ydb_types.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |31.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/operation/libclient-ydb_types-operation.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_value/libcpp-client-ydb_value.a |31.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |31.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |32.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |32.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |32.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |32.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |32.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |32.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |32.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |32.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |32.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |32.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |32.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |32.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |32.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |32.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |32.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |33.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |33.1%| PREPARE $(LLD_ROOT-2644097164) - 0 bytes |33.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |33.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |33.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |33.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |33.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |33.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |33.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |33.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |33.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |33.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |33.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |33.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |34.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |34.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |34.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |34.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |33.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |34.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |34.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |34.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |34.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |34.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |34.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |34.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |34.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |34.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |34.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |34.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |34.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |34.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |34.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |34.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |35.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |35.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |35.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |35.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |35.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |35.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |35.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |35.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |35.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |35.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |35.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |35.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |35.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |35.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |35.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |36.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |36.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |36.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |35.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |35.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |36.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |36.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/AvroRowInputFormat.cpp |36.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |36.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cansel_build_index.cpp |36.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |36.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Port.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ResizeProcessor.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowInputFormat.cpp |36.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/LimitTransform.cpp |36.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISimpleTransform.cpp |36.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |36.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |37.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONAsStringRowInputFormat.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowInputFormat.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowOutputFormat.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IInputFormat.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CHColumnToArrowColumn.cpp |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserAlterQuery.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseQuery.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowTablesQuery.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/queryToString.cpp |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Chunk.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatSettingName.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Executors/PollingQueue.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUserNameWithHost.cpp |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablesInSelectQuery.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSystemQuery.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablePropertiesQuery.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUnionQueryElement.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUseQuery.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWatchQuery.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDatabaseOrNone.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/Lexer.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCase.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCheckQuery.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatAST.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDescribeTableQuery.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDataType.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/QueryWithOutputSettingsPushDownVisitor.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWithElement.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowGrantsQuery.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/TokenIterator.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionListParsers.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSettingsProfileElement.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIntervalKind.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseUserName.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIdentifier.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseDatabaseAndTableName.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ConcatProcessor.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowInputFormat.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunctionWithKeyValueArguments.cpp |39.8%| PREPARE $(PYTHON) - 0 bytes |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IOutputFormat.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDatabaseOrNone.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionary.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowOutputFormat.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowOutputFormat.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/RawBLOBRowInputFormat.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowOutputFormat.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionaryAttributeDeclaration.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferValidUTF8.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromPocoSocket.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/UseSSL.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/TimeoutSetter.cpp |40.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowInputFormat.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockOutputFormat.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowOutputFormat.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/OutputStreamToOutputFormat.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowInputFormat.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IProcessor.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationMap.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IAccumulatingTransform.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISink.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/RowInputFormatWithDiagnosticInfo.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISource.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBuffer.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeAggregateFunction.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/BlockStreamProfileInfo.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/IBlockInputStream.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecMultiple.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BlockInfo.cpp |41.3%| PREPARE $(CLANG_FORMAT-2313326005) - 0 bytes |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/materializeBlock.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferBase.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/thread_local_rng.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ZooKeeper/IKeeper.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TimerDescriptor.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadStatus.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadPool.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getNumberOfPhysicalCPUCores.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProfileEvents.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadProfileEvents.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorDump.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorToString.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/MemoryTracker.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ErrorCodes.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/preciseExp10.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Exception.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/IColumn.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnTuple.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnMap.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |43.2%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) - 0 bytes |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnLowCardinality.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |43.5%| PREPARE $(FLAKE8_PY2-2255386470) - 0 bytes |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_getter.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getResource.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getPageSize.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowPrivilegesQuery.cpp |46.2%| PREPARE $(FLAKE8_PY3-1472545107) - 0 bytes |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/errnoToString.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getFQDNOrHostName.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/JSON.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/StringRef.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUT.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/demangle.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeInterval.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnConst.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUTImpl.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorWriteBinary.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Config/AbstractConfigurationComparison.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/mremap.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFunction.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionFactory.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/sleep.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionCombinatorFactory.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinality.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/shift10.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnAggregateFunction.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFixedString.cpp |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnCompressed.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/IAggregateFunction.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ClickHouseRevision.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/MaskOperations.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnArray.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnNullable.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnString.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Allocator.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnDecimal.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/AlignedBuffer.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnsCommon.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hasLinuxCapability.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/FilterDescription.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Throttler.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMetrics.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IntervalKind.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMemoryTracker.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnVector.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IPv6ToBinary.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentThread.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Epoll.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/OpenSSLHelpers.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/setThreadName.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TaskStatsInfoGetter.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/DNSResolver.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/quoteString.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getMultipleKeysFromConfig.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatIPv6.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/checkStackSize.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProcfsMetricsProvider.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/escapeForFileName.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/RemoteHostFilter.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/createHardLink.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatReadable.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFunction.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecLZ4.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/isLocalAddress.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hex.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferFromFile.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedWriteBuffer.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/randomSeed.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PipeFDs.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/parseAddress.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecNone.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/LZ4_decompress_faster.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/ColumnWithTypeAndName.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/NamesAndTypes.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/ICompressionCodec.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionFactory.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BaseSettings.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PODArray.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Field.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFixedString.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Block.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomIPv4AndIPv6.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockOutputStream.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsFields.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockInputStream.cpp |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Settings.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsEnums.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ExecutionSpeedLimits.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ColumnGathererStream.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFactory.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeArray.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomGeo.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/SizeLimits.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeEnum.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate32.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowGrantsQuery.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime64.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomSimpleAggregateFunction.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/SynchronousReader.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDecimalBase.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationWrapper.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinalityHelpers.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeMap.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNullable.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationArray.cpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeTuple.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeString.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNested.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNumberBase.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNothing.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/ISerialization.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/IDataType.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/EnumValues.cpp |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesDecimal.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesNumber.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/NestedUtils.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTupleElement.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeUUID.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationIP.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationCustomSimpleText.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate32.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationFixedString.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNothing.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationEnum.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimalBase.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimal.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime64.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |50.7%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/common_test.context |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationLowCardinality.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationUUID.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNullable.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNumber.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTuple.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/DoubleConverter.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadSettings.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationString.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationAggregateFunction.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionFactory.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/registerDataTypeDateTime.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |50.7%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |50.8%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |50.7%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/flake8_linter/flake8_linter |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |50.6%| PREPARE $(TEST_TOOL_HOST-sbr:7480276291) - 0 bytes |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/crypto/liblibs-openssl-crypto.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/verbosePrintString.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/registerFormats.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/NativeFormat.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufWriter.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/getLeastSupertype.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/CompressionMethod.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/toFixedString.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/extractTimeZoneFromFunctionArguments.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionHelpers.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFile.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFileDescriptor.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromPocoSocket.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/IFunction.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufReader.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/PeekableReadBuffer.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOnCluster.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFileDescriptor.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/FormatFactory.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteHelpers.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadHelpers.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/readFloatText.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ThreadPoolReader.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/parseDateTimeBestEffort.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ClientInfo.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/InternalTextLogsQueue.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptorDiscardOnFailure.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAsterisk.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFile.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/copyData.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryThreadLog.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQualifiedAsterisk.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryLog.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/TablesStatus.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IParserBase.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAlterQuery.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDropQuery.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunction.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileDescriptor.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/JSONEachRowUtils.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFile.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromMemory.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileDescriptor.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSettingsProfileElement.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileBase.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTBackupQuery.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnDeclaration.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ProfileEventsExt.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsMatcher.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/Progress.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTExpressionList.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/OpenedFile.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsTransformers.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileWithCache.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTLiteral.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTCreateQuery.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFile.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptor.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileBase.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTInsertQuery.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFile.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/createReadBufferFromFileBase.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTConstraintDeclaration.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIndexDeclaration.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionDeclaration.cpp |50.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/yt/provider/libproviders-yt-provider.a |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOrderByElement.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTPartition.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionSelectQuery.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetRoleQuery.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIdentifierOrStringLiteral.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTUserNameWithHost.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTRolesOrUsersSet.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectWithUnionQuery.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTKillQueryQuery.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryParameter.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOptimizeQuery.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTNameTypePair.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithTableAndOutput.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOutput.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSampleRatio.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectQuery.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTTLElement.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserBackupQuery.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSystemQuery.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetRoleQuery.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSubquery.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowTablesQuery.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserInsertQuery.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTablesInSelectQuery.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionaryAttributeDeclaration.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWindowDefinition.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithElement.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCreateQuery.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithAlias.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExplainQuery.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/CommonParsers.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDropQuery.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionsConversion.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/InsertQuerySettingsPushDownVisitor.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserKillQueryQuery.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IAST.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRolesOrUsersSet.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetQuery.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExternalDDLQuery.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionElementParsers.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserOptimizeQuery.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectWithUnionQuery.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserPartition.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectQuery.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRenameQuery.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserQuery.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/castColumn.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserProjectionSelectQuery.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSampleRatio.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/common/libcolumnshard-counters-common.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/accessor/abstract/liblibrary-formats-arrow-accessor-abstract.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_query/impl/libclient-ydb_query-impl.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/impl/libclient-ydb_persqueue_core-impl.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/libpy3benchmarks-runner-result_compare.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_import/libcpp-client-ydb_import.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/CastOverloadResolver.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_params/libcpp-client-ydb_params.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_operation/libcpp-client-ydb_operation.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_provider_impl.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_provider_context.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_wide_flow.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_block_input_filter.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_dq_integration.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_provider.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_block_input.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_gateway.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_key.cpp |51.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_op_settings.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_epoch.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_op_hash.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_intent_determination.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_join_reorder.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_io_discovery.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_extension/libcpp-client-ydb_extension.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_table_desc.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_peephole.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasource.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_optimize.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_discovery/libcpp-client-ydb_discovery.a |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Demangle/libllvm14-lib-Demangle.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/IRReader/libllvm14-lib-IRReader.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/libllvm14-lib-ExecutionEngine.a |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_helpers.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Linker/libllvm14-lib-Linker.a |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_table.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_coordination/libcpp-client-ydb_coordination.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_join_impl.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/MCParser/liblib-MC-MCParser.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ProfileData/libllvm14-lib-ProfileData.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/libllvm14-lib-MC.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/TextAPI/libllvm14-lib-TextAPI.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/transactions/libdata_sharing-common-transactions.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Support/libllvm14-lib-Support.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Analysis/libllvm14-lib-Analysis.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Utils/liblib-Transforms-Utils.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/IPO/liblib-Transforms-IPO.a |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |52.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |52.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |52.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |52.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |52.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/libcore-io_formats-arrow.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/libllvm14-lib-CodeGen.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |52.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_rate_limiter/libcpp-client-ydb_rate_limiter.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |52.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |52.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bsconfig/libydb-services-bsconfig.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx512/liblibs-hyperscan-runtime_avx512.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |50.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |50.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_factory.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_actor_settings.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_effects.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_arrow_memory_pool.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_program_builder.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_meta.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_sequencer_factory.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_table.cpp |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |48.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/status/libclient-ydb_types-status.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Remarks/libllvm14-lib-Remarks.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/libllvm14-lib-Target.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Object/libllvm14-lib-Object.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/stat/uploader/libproviders-stat-uploader.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/AsmParser/libllvm14-lib-AsmParser.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/codec/codegen/libyt-codec-codegen.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/IR/libllvm14-lib-IR.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/codec/codegen/libyt-codec-codegen.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/rule/libtx-tiering-rule.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_stats/libclient-impl-ydb_stats.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/liblib-Target-X86.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/resources/libcpp-client-resources.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |45.4%| PREPARE $(WITH_JDK11-sbr:6936090488) - 0 bytes |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |43.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/job/libproviders-yt-job.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/hash/libyt-lib-hash.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_proto/libcpp-client-ydb_proto.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |44.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |44.2%| PREPARE $(JDK_DEFAULT-4020545899) - 0 bytes |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |44.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |44.2%| PREPARE $(BLACK_LINTER-sbr:6648883615) - 0 bytes |44.2%| PREPARE $(JDK11-1325468316) - 0 bytes |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_ss_tasks/libcpp-client-ydb_ss_tasks.a |44.2%| PREPARE $(CLANG-1922233694) - 0 bytes |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/client/liblibrary-grpc-client.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_query/libcpp-client-ydb_query.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_scheme/libcpp-client-ydb_scheme.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_datastreams/libcpp-client-ydb_datastreams.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |44.5%| PREPARE $(WITH_JDK17-sbr:6941855347) - 0 bytes |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |44.5%| PREPARE $(CLANG14-1922233694) - 0 bytes |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |44.2%| PREPARE $(WITH_JDK-sbr:6941855347) - 0 bytes |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |43.7%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/black_linter/black_linter |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |43.5%| PREPARE $(JDK17-4020545899) - 0 bytes |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/config.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |43.3%| PREPARE $(GDB) - 0 bytes |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/new_fair_share_thread_pool.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/string_helpers.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/crash_handler.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error_code.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/phoenix.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/origin_attributes.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/stripped_error.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |41.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |41.4%| PREPARE $(CLANG-1735056821) - 0 bytes |41.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/libpy3tools-ydb_serializable-replay.global.a |41.5%| PREPARE $(CLANG18-390461695) - 0 bytes |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |41.4%| [BI] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/buildinfo_data.h |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/counters/liboptimizer-sbuckets-counters.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/abstract/libsbuckets-logic-abstract.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm14/libminikql-codegen-llvm14.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/libpy3tests-tools-ydb_serializable.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm14/libminikql-computation-llvm14.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/operation_id/libpublic-lib-operation_id.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/zookeeper/requests.cpp |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/zookeeper/protocol.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |40.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_map.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_grace_join_imp.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |40.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |40.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_getelem.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_chain1_map.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_coalesce.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_decimal.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_coalesce.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_discard.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_decimal_div.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_combine.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_map_join.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_sum.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fromstring.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_filter.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_grace_join.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_lazy_list.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_lookup.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |40.2%| PREPARE $(CLANG16-1380963495) - 0 bytes |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_nop.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_buffered_dynamic_table_writer.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/zookeeper/packet.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_removemember.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_range.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_size.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_source.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_take.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_scalar_apply.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_way.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_udf.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_chopper.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_while.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_zip.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_filter.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_condense.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_combine.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_exists.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_withcontext.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_collect.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_append.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_factory.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_addmember.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_aggrcount.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_count.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_apply.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_func.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_next_value.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_check_args.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_just.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_chopper.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_container.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_factory.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_chain_map.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_skiptake.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_logical.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_callable.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_some.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_contains.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_dictitems.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_enumerate.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_element.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_condense1.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_condense.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_ensure.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fromyson.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_exists.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_decimal_mul.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_extend.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_flatmap.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_multimap.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_iterable.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fold1.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fold.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_group.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_frombytes.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_flow.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_decimal_mod.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_guess.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_if.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_blocks.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_if.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_compress.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_hasitems.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_invoke.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_heap.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_iterator.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_hopping.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_logical.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_top.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_length.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_join_dict.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_multihopping.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_mapnext.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_minmax.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_map.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_safe_circular_buffer.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_listfromrange.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_tostring.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_random.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_match_recognize.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_pickle.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_null.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_now.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_map_join.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_prepend.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_reverse.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_queue.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_reduce.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_round.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_replicate.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_rh_hash.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_tooptional.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_seq.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_squeeze_to_list.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_sort.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_skip.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_switch.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_squeeze_state.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_toindexdict.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_join.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_time_order_recover.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_timezone.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_tobytes.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_unwrap.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_visitall.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_varitem.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_weakmember.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_chain_map.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_ifpresent.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_top_sort.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/libpy3simple_json_diff.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/libpy3simple_json_diff.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_driver/libcpp-client-ydb_driver.a |38.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg.cpp |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_common_client/libcpp-client-ydb_common_client.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/rule/libtx-tiering-rule.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_export/libcpp-client-ydb_export.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/statistics_workload/libpy3statistics_workload.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/libpy3benchmarks-runner-runner.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/BinaryFormat/libllvm14-lib-BinaryFormat.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/libpy3tests-tools-ydb_serializable.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/operation_id/protos/liblib-operation_id-protos.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |38.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_todict.cpp |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_table/impl/libclient-ydb_table-impl.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/impl/libclient-ydb_persqueue_public-impl.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/jwt/libpublic-lib-jwt.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/iam/common/libclient-iam-common.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/iam_private/libcpp-client-iam_private.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/draft/libcpp-client-draft.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/libpy3benchmarks-runner-result_convert.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/include/libclient-ydb_persqueue_public-include.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/resources/libcpp-client-resources.global.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/libpy3benchmarks-runner-runner.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_result/libcpp-client-ydb_result.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |38.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/libpy3local_ydb.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.global.a |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgimpl_ut.cpp |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/codecs_ut.cpp |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/libydb_persqueue_public-ut-ut_utils.a |38.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/b8c5754a195bdea98cbf907af0.auxcpp |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/libpy3benchmarks-runner-run_tests.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/libpy3benchmarks-runner-result_convert.a |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |38.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/_b8c5754a195bdea98cbf907af0.yasm |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.a |38.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/ut/ydb-core-erasure-ut |38.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |38.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/ut/ydb-core-blobstorage-crypto-ut |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |38.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/ut/ydb-core-fq-libs-signer-ut |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/libpy3tstool.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/libpy3tstool.a |38.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator/ydb-library-yaml_config-validator-ut-validator |38.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |38.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator_builder/yaml_config-validator-ut-validator_builder |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/libpy3nemesis.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_monitoring/libcpp-client-ydb_monitoring.a |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/libproviders-dq-scheduler.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/metrics/libproviders-dq-metrics.a |38.1%| [DL] $(B)/canondata_storage/1775059/2d8e13650c57e1489831ef0318288079029e2e62/resource.tar.gz{, .log} |38.1%| [DL] $(B)/canondata_storage/1899731/2ec8224db091f2a7362c5e4ce595bc50329b8311/resource.tar.gz{, .log} |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |38.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/libpy3nemesis.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |38.2%| [DL] $(B)/canondata_storage/1942173/c4d7dbc720e57397caf847cd2616b1362110ddd2/resource.tar.gz{, .log} |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |38.2%| [DL] $(B)/canondata_storage/1784826/8a1791d883c330f1ed18937b9d7a33af5031d735/resource.tar.gz{, .log} |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |38.2%| [DL] $(B)/canondata_storage/1784117/7efe22d363ef190706ea7b98ae766129bbe108ed/resource.tar.gz{, .log} |38.2%| [DL] $(B)/canondata_storage/1847551/a859756d282f251d1600ae15b6c205384047fb83/resource.tar.gz{, .log} |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |38.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |38.2%| [DL] $(B)/canondata_storage/1937367/518bbcf510ad7a43c5e77746bafd21ed0e3fdc6e/resource.tar.gz{, .log} |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/libpy3benchmarks-runner-run_tests.a |38.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |38.2%| [DL] $(B)/canondata_storage/1031349/d6f6fbd690e2387ef546b9d231ad34955cbea3f2/resource.tar.gz{, .log} |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm14/libminikql-invoke_builtins-llvm14.a |38.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |38.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/ut/ydb-core-util-ut |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/global_worker_manager/libproviders-dq-global_worker_manager.a |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/libpy3benchmarks-runner-result_compare.global.a |38.2%| [DL] $(B)/canondata_storage/937458/e65d62f7141c4085b03b6b3de39e5ba9d5868c49/resource.tar.gz{, .log} |38.3%| [DL] $(B)/canondata_storage/1130705/984266b47af8d517834a10674242c9e900f41724/resource.tar.gz{, .log} |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmem_ut.cpp |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/libpy3ydb_configure.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/statistics_workload/libpy3statistics_workload.global.a |38.3%| [DL] $(B)/canondata_storage/1937424/4471ebb7a52881a5c9ab19f8481dc8d0c8b2c21e/resource.tar.gz{, .log} |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/libpy3tools-ydb_serializable-replay.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |38.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/cluster_ordering-ut |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |38.3%| [DL] $(B)/canondata_storage/1809005/15480770433f84d119d7fe097c7c74e81f5f1e71/resource.tar.gz{, .log} |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |38.3%| [DL] $(B)/canondata_storage/1784826/f008aeb63b134f27b8208ef2f9770992b33f3c2d/resource.tar.gz{, .log} |38.3%| [DL] $(B)/canondata_storage/1871102/bf551d97ceb3ef56f786a233cb690503836fb993/resource.tar.gz{, .log} |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |38.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |38.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |38.3%| [DL] $(B)/canondata_storage/1784826/fb40e5e64a539be6615e07cc5f6e889bcc1e4564/resource.tar.gz{, .log} |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |38.3%| [DL] $(B)/canondata_storage/1599023/1a5ae6170a572008429f35f362ba7a5e8f15d2db/resource.tar.gz{, .log} |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |38.4%| [DL] $(B)/canondata_storage/1773845/2d431e7defad911934583a732f9c0713c566bb1c/resource.tar.gz{, .log} |38.4%| [DL] $(B)/canondata_storage/1936997/eb320c28d0358741aa9bf1e5ebf6f79c7b769b57/resource.tar.gz{, .log} |38.4%| [DL] $(B)/canondata_storage/1773845/5f3eb5f2d67f8810998f3ac0dc6ca4e51a76482e/resource.tar.gz{, .log} |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |38.4%| [DL] $(B)/canondata_storage/1600758/4f71a10f67b6e33415a0e548a197be96b9261557/resource.tar.gz{, .log} |38.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/yt/libdq-actors-yt.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jemalloc/libcontrib-libs-jemalloc.a |38.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |38.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |38.4%| [DL] $(B)/canondata_storage/1942100/deb1f289b9c40e713d0d9f614e8c3a720d26b7b2/resource.tar.gz{, .log} |38.4%| [DL] $(B)/canondata_storage/1936947/709cd8d6c15b98e118e6f2bf0b58e3a7a05592dc/resource.tar.gz{, .log} |38.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/result_compare |38.4%| [DL] $(B)/canondata_storage/1599023/6ea95a71ae6e3995d639ef495d263a106e521882/resource.tar.gz{, .log} |38.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug_tools/ut/ydb-core-debug_tools-ut |38.4%| [DL] $(B)/canondata_storage/1777230/231bd09491bc6ae7a605dc4342b8955354e67f2a/resource.tar.gz{, .log} |38.4%| [DL] $(B)/canondata_storage/1942100/d22a096de8f9cd6961c70a84e39de8dcd39ce45e/resource.tar.gz{, .log} |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |38.5%| [DL] $(B)/canondata_storage/1777230/ad26fa2df1ca85e707984d82325c42cb09d4b3da/resource.tar.gz{, .log} |38.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/static_validator/ut/ydb-library-yaml_config-static_validator-ut |38.5%| [DL] $(B)/canondata_storage/1937492/8cdefcd6d0d86a9eaa2af7a5c2ce6fe8014c7ac2/resource.tar.gz{, .log} |38.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/result_convert |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |38.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/ut/ydb-core-resource_pools-ut |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.a |38.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/ydb-tests-fq-yt-kqp_yt_file-part6 |38.5%| [DL] $(B)/canondata_storage/1773845/ae502fad4bd26f52e20a1f56c06b19e01b6100ad/resource.tar.gz{, .log} |38.4%| [DL] $(B)/canondata_storage/1130705/875e1c4c3d9395dfda0c99832ed76d7452435a53/resource.tar.gz{, .log} |38.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/ut_perf/ydb-core-erasure-ut_perf |38.5%| [DL] $(B)/canondata_storage/1814674/bc826e9b9202032dac82451ba4769076555fbab6/resource.tar.gz{, .log} |38.5%| [DL] $(B)/canondata_storage/1942525/7afd327b2333310058f64e7a69fd4186c099b4fa/resource.tar.gz{, .log} |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |38.6%| [DL] $(B)/canondata_storage/1871182/3b9e2e4f5dbfff6f44c6e90910cd2ae8ae44d6a8/resource.tar.gz{, .log} |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.a |38.5%| [DL] $(B)/canondata_storage/1809005/d592b29a4027fce87035ae23c7d20e060c12e900/resource.tar.gz{, .log} |38.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/ydb-tests-fq-yt-kqp_yt_file-part15 |38.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/ydb-tests-fq-yt-kqp_yt_file-part16 |38.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |38.6%| [DL] $(B)/canondata_storage/1942671/e2fe28e6febee3982bd56745d31a403e581f247f/resource.tar.gz{, .log} |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |38.6%| [DL] $(B)/canondata_storage/1809005/267ad7c5746fbf2df0910127dee92fe0f2b9e67b/resource.tar.gz{, .log} |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |38.6%| [DL] $(B)/canondata_storage/1689644/577f2591bb739414dd9c9b23b696925de3ea61fa/resource.tar.gz{, .log} |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |38.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/ydb-tests-fq-yt-kqp_yt_file-part2 |38.7%| [DL] $(B)/canondata_storage/995452/c405e325028ee1281de45ef9165ede08f8eb7da2/resource.tar.gz{, .log} |38.7%| [DL] $(B)/canondata_storage/1942278/e4e1a8efcf36e5a4266b5e39e982a61c7f5ad4d1/resource.tar.gz{, .log} |38.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |38.7%| [DL] $(B)/canondata_storage/1923547/6e472e1d38aac703a38a4ec87e075fcfb18210c0/resource.tar.gz{, .log} |38.7%| [DL] $(B)/canondata_storage/1925821/236e8d3234ab439980869279c30846a1eb87487f/resource.tar.gz{, .log} |38.7%| [DL] $(B)/canondata_storage/1814674/de906208dd4b33d2eb64a39da7463d9fe8dbd4e7/resource.tar.gz{, .log} |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |38.7%| [DL] $(B)/canondata_storage/1817427/ec2c0e753826fc58a07f9a969c60590ba2b2f7d8/resource.tar.gz{, .log} |38.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |38.7%| [DL] $(B)/canondata_storage/1899731/f13813265d02f2bab07ec1fa74995b07aef66427/resource.tar.gz{, .log} |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.a |38.8%| [DL] $(B)/canondata_storage/1903885/49cf3745e9db8d8a1887752705c18b066cdcb225/resource.tar.gz{, .log} |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyparsing/py3/libpy3python-pyparsing-py3.a |38.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyparsing/py3/libpy3python-pyparsing-py3.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |38.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/ydb-tests-fq-yt-kqp_yt_file-part7 |38.8%| [DL] $(B)/canondata_storage/1942173/65cfefa7d3092976dd84664ea3bae8eced26e317/resource.tar.gz{, .log} |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.a |38.8%| [DL] $(B)/canondata_storage/1871182/29596dbacc893115441dfee9a88f44ddb7ebc4a2/resource.tar.gz{, .log} |38.8%| [DL] $(B)/canondata_storage/1937429/8c415fc988c547984fa23f72063d4859ddd65412/resource.tar.gz{, .log} |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |38.8%| [DL] $(B)/canondata_storage/212715/d60846525976f02b9239763e1005de6db9c8ab32/resource.tar.gz{, .log} |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |38.9%| [DL] $(B)/canondata_storage/1917492/b65198c694e7e35ad3c848d38e764bf62e05535a/resource.tar.gz{, .log} |38.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/ydb-tests-fq-yt-kqp_yt_file-part12 |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.a |38.9%| [DL] $(B)/canondata_storage/1925821/779282f32823aab27221fa8bf110b57ff2bbac48/resource.tar.gz{, .log} |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |38.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/ydb-tests-fq-yt-kqp_yt_file-part4 |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |38.9%| [DL] $(B)/canondata_storage/995452/7ec622aec5d016fd39433889930514dc0a6b650d/resource.tar.gz{, .log} |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/py3/libpy3python-websocket-client-py3.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |38.9%| [DL] $(B)/canondata_storage/1773845/c2740b835a2bd143c8d121693abd92ce773d1c6b/resource.tar.gz{, .log} |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |38.9%| [DL] $(B)/canondata_storage/1937424/ccd563fc2effadff9042f6221e0daf47ca9cf4b8/resource.tar.gz{, .log} |38.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.a |39.0%| [LD] {BAZEL_DOWNLOAD} $(B)/library/recipes/docker_compose/docker_compose |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.a |39.0%| [DL] $(B)/canondata_storage/1917492/b0f4eb54959c42e0495a20239215b517297d9d01/resource.tar.gz{, .log} |39.0%| [DL] $(B)/canondata_storage/1775059/7880e12b17712e34b83d0a19290f284c039a3892/resource.tar.gz{, .log} |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/py3/libpy3python-websocket-client-py3.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.a |39.0%| [DL] $(B)/canondata_storage/1031349/076467d819158b21cec57980925415e6cf3dc8e6/resource.tar.gz{, .log} |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.a |39.0%| [DL] $(B)/canondata_storage/1917492/3e26f0a479a954710fee0605dc1c7add0903656f/resource.tar.gz{, .log} |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.a |39.0%| [DL] $(B)/canondata_storage/1889210/1f0152f3c0f7d63c8452e9855872f2e930cd4c7e/resource.tar.gz{, .log} |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |39.1%| [DL] $(B)/canondata_storage/1937150/c7bb7eb0808c7675c7bc402cc66327cbbcc95893/resource.tar.gz{, .log} |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |39.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.a |39.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.a |39.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/ydb-tests-fq-yt-kqp_yt_file-part14 |39.1%| PREPARE $(OS_SDK_ROOT-sbr:243881345) - 0 bytes |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.a |39.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/ydb-tests-fq-yt-kqp_yt_file-part13 |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |39.2%| [DL] $(B)/canondata_storage/1880306/93d030294156d877ed8ba9d30b77c11acb35db50/resource.tar.gz{, .log} |39.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |39.2%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/cpp_style_checker/cpp_style_checker |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.a |39.2%| [DL] $(B)/canondata_storage/937458/cc91b4ff58ad72a38b85199e20d2e07e805a9a2b/resource.tar.gz{, .log} |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |39.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svn_interface.c |39.2%| [DL] $(B)/canondata_storage/1942173/8cb2c995567808fa2edb42fbcac76f18f5beb954/resource.tar.gz{, .log} |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |39.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part16/ydb-library-yql-tests-sql-dq_file-part16 |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |39.2%| [DL] $(B)/canondata_storage/995452/d2b404e58f5d9cea6dc9ba9a54cf102589dc0901/resource.tar.gz{, .log} |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |39.2%| [DL] $(B)/canondata_storage/1942525/b372d3c86a68f2b33a18c3b61b0b7b8f739a0353/resource.tar.gz{, .log} |39.2%| [DL] $(B)/canondata_storage/1809005/eaed2cb21c41d953b2deee98fb8e34f4399b8dbc/resource.tar.gz{, .log} |39.2%| [DL] $(B)/canondata_storage/1925821/749a27e1f0785a298c980a8c65ff97e6fae386df/resource.tar.gz{, .log} |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |39.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/run_tests |39.3%| [DL] $(B)/canondata_storage/1777230/76c87f5149b4cc512c6f2bf26a9c9176f0baa7c3/resource.tar.gz{, .log} |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |39.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |39.3%| [DL] $(B)/canondata_storage/1937424/a5ab1e58b34f389c8ed7c2351f8aa1b0172a465c/resource.tar.gz{, .log} |39.3%| [DL] $(B)/canondata_storage/1920236/5181d12d98fbd8d9942aee7bad394d888116e5c2/resource.tar.gz{, .log} |39.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/build_info/build_info_static.cpp |39.3%| [DL] $(B)/canondata_storage/1942100/3595abeb97126d66b5812bb7d87545f013abf4c7/resource.tar.gz{, .log} |39.3%| [DL] $(B)/canondata_storage/1809005/4aecbb89e2aab10b1cb5ae10988314918fa89ed3/resource.tar.gz{, .log} |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.a |39.3%| [DL] $(B)/canondata_storage/1942100/feb30512dfd66a00939625497bf9f58185f577d7/resource.tar.gz{, .log} |39.3%| [DL] $(B)/canondata_storage/1031349/d8eb143d414178bd8981d657c74e48bf742cd256/resource.tar.gz{, .log} |39.3%| [DL] $(B)/canondata_storage/1847551/3a392ffe35b72cb523557617b3ab1c0abb94bee4/resource.tar.gz{, .log} |39.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/ydb-tests-fq-yt-kqp_yt_file-part18 |39.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |39.3%| [DL] $(B)/canondata_storage/1599023/7241ba5b766444adac2e1b71063e3efa9d1ac6af/resource.tar.gz{, .log} |39.3%| [DL] $(B)/canondata_storage/1936947/7a33d500072033ae868c5c1e2bf951de3ed26f10/resource.tar.gz{, .log} |39.3%| [DL] $(B)/canondata_storage/1847551/07a1ee212fb783eaad6ea733a57fa549a1b1dc94/resource.tar.gz{, .log} |39.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svnversion.cpp |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/libpy3local_ydb.global.a |39.4%| [DL] $(B)/canondata_storage/1937027/0366696fd0201dfaa1aecd3d2cb555d7273bb4cf/resource.tar.gz{, .log} |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.global.a |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/ydb-library-benchmarks-runner |39.4%| [DL] $(B)/canondata_storage/1942525/a6ac4439d79862fb911d22d6b67d06afe7ccdcb2/resource.tar.gz{, .log} |39.4%| [DL] $(B)/canondata_storage/1871002/1225bd6f3bfe747dac447600fe2c2b276a24bd59/resource.tar.gz{, .log} |39.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/ydb-dstool |39.4%| [DL] $(B)/canondata_storage/1814674/77d41c903ddd926a62e64221c321109c23757fa8/resource.tar.gz{, .log} |39.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/client/bin/sqs >> TPDiskErrorStateTests::Basic [GOOD] |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/ut/ydb-core-config-ut >> TPDiskErrorStateTests::BasicErrorReason [GOOD] >> TPDiskErrorStateTests::Basic2 [GOOD] >> TErasureTypeTest::TestBlock33LossOfAllPossible3 >> TErasureTypeTest::TestEo [GOOD] |39.4%| [DL] $(B)/canondata_storage/1942525/7eaf18e64bfa1d6edd49db8f4631910a91c4c75d/resource.tar.gz{, .log} |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/token_accessor_mock/recipe |39.4%| [DL] $(B)/canondata_storage/1936273/1ba42e2c47cd3429011228159c1fdf43dd1881b7/resource.tar.gz{, .log} |39.4%| [DL] $(B)/canondata_storage/1937429/59daf3f7c51f8657273dfe73b2d89a936c71e04c/resource.tar.gz{, .log} >> TErasureTypeTest::TestStripe31LossOfAllPossible1 [GOOD] >> TErasureTypeTest::TestBlock43LossOfAllPossible3 |39.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TPDiskErrorStateTests::Basic2 [GOOD] |39.4%| [DL] $(B)/canondata_storage/1937429/6057f80f29e4cbcc11dacefe2338aa3f30c80dc8/resource.tar.gz{, .log} |39.4%| [DL] $(B)/canondata_storage/1775059/080d028abc916c9b548accf3f93e07548b901e22/resource.tar.gz{, .log} |39.4%| [DL] $(B)/canondata_storage/1871182/e1454565fb4deac664d23aea283a15fc31e080a6/resource.tar.gz{, .log} |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/object_storage_ut.cpp |39.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/ydb-tests-fq-yt-kqp_yt_file-part8 |39.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestEo [GOOD] >> TErasureTypeTest::TestBlock32LossOfAllPossible2 >> TErasureTypeTest::TestStripe43LossOfAllPossible3 |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_data_source_ut.cpp |39.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe31LossOfAllPossible1 [GOOD] |39.5%| [DL] $(B)/canondata_storage/1784826/bfd74085acfc22cb51377d828fa050c7eec78e01/resource.tar.gz{, .log} |39.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a >> TErasureTypeTest::TestBlock33LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestBlock32LossOfAllPossible2 [GOOD] |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |39.5%| [DL] $(B)/canondata_storage/1871102/cd71a11115697d28bc32a43c2288da1af731328c/resource.tar.gz{, .log} |39.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock33LossOfAllPossible3 [GOOD] |39.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock32LossOfAllPossible2 [GOOD] |39.5%| [DL] $(B)/canondata_storage/1923547/c9f8df039ef6cb4cc282d85e6cf6b18304fcc59a/resource.tar.gz{, .log} >> TErasureTypeTest::TestBlock43LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestStripe42LossOfAllPossible2 >> TErasureTypeTest::TestBlock22LossOfAllPossible2 |39.5%| [DL] $(B)/canondata_storage/1946324/933392091e1245b98f15c78824d0d3d0a7b628f4/resource.tar.gz{, .log} |39.5%| [DL] $(B)/canondata_storage/1931696/76a7bb8a2aaec831535cf7ca5b0ffb62ccdb717e/resource.tar.gz{, .log} |39.5%| [DL] $(B)/canondata_storage/1775319/ca8674d999cbbb16d2b8ee3ea7569f01aeebffef/resource.tar.gz{, .log} |39.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/static_validator/ut/example_configs/static_validator-ut-example_configs >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 >> TErasureTypeTest::TestBlock42PartialRestore2 >> TErasureTypeTest::TestStripe32LossOfAllPossible2 >> TErasureTypeTest::TestStripe43LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestStripe42LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestBlock22LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 |39.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/datasource-ms_sql_server |39.5%| [DL] $(B)/canondata_storage/1937367/1710911e4cee83432c347ca77fc35e2630f78589/resource.tar.gz{, .log} |39.5%| [DL] $(B)/canondata_storage/937458/3d1a9b67a8f957c10751d36ba94e9a0235239a0d/resource.tar.gz{, .log} |39.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/ydb-tests-fq-yt-kqp_yt_file-part1 |39.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock43LossOfAllPossible3 [GOOD] |39.6%| [DL] $(B)/canondata_storage/1775059/af971073e577c583556209f50933dfff4201794c/resource.tar.gz{, .log} |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.a >> TErasureTypeTest::TestStripe32LossOfAllPossible2 [GOOD] |39.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe42LossOfAllPossible2 [GOOD] |39.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe43LossOfAllPossible3 [GOOD] |39.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock22LossOfAllPossible2 [GOOD] |39.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe32LossOfAllPossible2 [GOOD] |39.6%| [DL] $(B)/canondata_storage/1847551/b5271b855258a3785a7443c84e7858d6b86eb228/resource.tar.gz{, .log} |39.6%| [DL] $(B)/canondata_storage/1936997/e7ff46e7163e77dd9b23cd9ab89fe7871775680d/resource.tar.gz{, .log} |39.6%| [DL] $(B)/canondata_storage/1773845/4844b06fb73477a0ce945a8d2e558a686cbcbf98/resource.tar.gz{, .log} |39.6%| [DL] $(B)/canondata_storage/1936947/e11cf6314c9fd176eeb82eb6187eef2b36985a2d/resource.tar.gz{, .log} |39.6%| [DL] $(B)/canondata_storage/1942173/5421fc36d7b7a0370a706e889dcf62e4f6abf424/resource.tar.gz{, .log} |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |39.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/ydb_configure |39.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |39.6%| [DL] $(B)/canondata_storage/1871002/7df99048c8549742c36a1795f330427e8ff4cd1c/resource.tar.gz{, .log} |39.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/join/yql-providers-generic-connector-tests-join |39.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/ydb-tests-fq-yt-kqp_yt_file-part19 |39.7%| [DL] $(B)/canondata_storage/1773845/7025c8692c966d9205ab1a92960e3c48ba756e3e/resource.tar.gz{, .log} |39.6%| [DL] $(B)/canondata_storage/1937001/003af8bfc6ea23cbffd44d7103d71819e726e57c/resource.tar.gz{, .log} >> TErasureTypeTest::TestSplitDiffBlock4Plus2SpecialCase1 [GOOD] >> TErasureTypeTest::TestDifferentCasesInDiffSplitingMirror3Of4 [GOOD] |39.6%| [DL] $(B)/canondata_storage/1936947/5b90602802f2aab592030a044b196ce79f713168/resource.tar.gz{, .log} |39.7%| [DL] $(B)/canondata_storage/1936947/4e75efdf8bb6c4502b7bcfedc52bbdf182bdb39c/resource.tar.gz{, .log} >> TErasureTypeTest::TestStripe23LossOfAllPossible3 |39.7%| [DL] $(B)/canondata_storage/1781765/51d24e14e139f66d2e27548a413616e7d1e3f90d/resource.tar.gz{, .log} |39.7%| [DL] $(B)/canondata_storage/1936273/d81f255ee5d0712542ca44dabb01842e1a996c48/resource.tar.gz{, .log} |39.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestSplitDiffBlock4Plus2SpecialCase1 [GOOD] >> TCircleBufStringStreamTest::TestNotAligned [GOOD] >> TCircleBufStringStreamTest::TestOverflow [GOOD] >> TCircleBufTest::EmptyTest [GOOD] >> TCircleBufTest::OverflowTest [GOOD] >> TErasureTypeTest::TestStripe33LossOfAllPossible3 |39.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/tests-datasource-clickhouse |39.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestDifferentCasesInDiffSplitingMirror3Of4 [GOOD] |39.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |39.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/tests-datasource-postgresql |39.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init >> TErasureTypeTest::TestBlock42PartialRestore0 >> TErasureTypeTest::TestMirror3LossOfAllPossible3 [GOOD] |39.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |39.7%| [DL] $(B)/canondata_storage/1937429/6f0b11a050cc6c6080ee22988ff3362313e9fdca/resource.tar.gz{, .log} |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/libpy3ydb_configure.a >> TErasureTypeTest::TestStripe23LossOfAllPossible3 [GOOD] |39.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufTest::OverflowTest [GOOD] |39.7%| [DL] $(B)/canondata_storage/1925821/e6400c81a69303c23d02b835c07822136f1644aa/resource.tar.gz{, .log} |39.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |39.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |39.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestMirror3LossOfAllPossible3 [GOOD] |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |39.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe23LossOfAllPossible3 [GOOD] |39.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |39.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut >> TErasureTypeTest::TestStripe33LossOfAllPossible3 [GOOD] |39.8%| RESOURCE $(sbr:4966407557) - 0 bytes |39.8%| [DL] $(B)/canondata_storage/1784117/be18c27ada732fb4f62d659dc78acd8896fe727c/resource.tar.gz{, .log} |39.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |39.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut >> TErasureTypeTest::TestBlock23LossOfAllPossible3 |39.8%| [DL] $(B)/canondata_storage/1936997/4c96904082a08349976603bc8711f3c66e870d86/resource.tar.gz{, .log} >> TErasureTypeTest::TestAllSpecies1of2 |39.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe33LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore1 >> TErasureTypeTest::isSplittedDataEqualsToOldVerion [GOOD] |39.8%| [DL] $(B)/canondata_storage/1936947/bdcead4c91b8f1faeec3f1fbf12ee03ca7392e44/resource.tar.gz{, .log} |39.8%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql >> ErasureBrandNew::Block42_encode >> TErasureTypeTest::TestBlock23LossOfAllPossible3 [GOOD] >> TTrackable::TBuffer [GOOD] >> TResizableCircleBufTest::Test1 [GOOD] >> TErasureTypeTest::TestBlockByteOrder [GOOD] >> TResizableCircleBufTest::Test2 [GOOD] |39.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |39.8%| [DL] $(B)/canondata_storage/1784826/cd0b26ce347141d51c45b6088135a15077b56483/resource.tar.gz{, .log} |39.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::isSplittedDataEqualsToOldVerion [GOOD] |39.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TResizableCircleBufTest::Test2 [GOOD] |39.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |39.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock23LossOfAllPossible3 [GOOD] |39.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlockByteOrder [GOOD] |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |39.9%| [DL] $(B)/canondata_storage/1903885/36b7eb9d918e0ee90b18e7dfac3ec36336c26b5e/resource.tar.gz{, .log} |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |39.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut >> TVDiskConfigTest::RtmrProblem1 [GOOD] >> TVDiskConfigTest::RtmrProblem2 [GOOD] >> TVDiskConfigTest::ThreeLevels [GOOD] >> TErasureTypeTest::TestStripe22LossOfAllPossible2 |39.9%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |39.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::ThreeLevels [GOOD] >> TErasureTypeTest::TestStripe22LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestBlock31LossOfAllPossible1 [GOOD] |39.9%| [DL] $(B)/canondata_storage/1817427/c7633434a5ba4df67f128f604a8708310158a1a6/resource.tar.gz{, .log} |39.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe22LossOfAllPossible2 [GOOD] |39.9%| [DL] $(B)/canondata_storage/1889210/02c3d838178ec7378a674d9517d94cf16e6f7cb4/resource.tar.gz{, .log} |39.9%| [DL] $(B)/canondata_storage/1925842/610b04b045139518e5a3bec8a997851c724acae2/resource.tar.gz{, .log} |39.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock31LossOfAllPossible1 [GOOD] >> TFragmentedBufferTest::TestReplaceWithSetMonolith [GOOD] >> TFragmentedBufferTest::TestSetMonolith [GOOD] >> THazardTest::AutoProtectedPointers [GOOD] >> THyperLogCounterTest::TestAddRandom >> TFragmentedBufferTest::TestWriteRead [GOOD] >> THazardTest::CachedPointers [GOOD] >> THyperLogCounterTest::TestGetSet [GOOD] >> TFragmentedBufferTest::TestOverwriteRead [GOOD] >> TFragmentedBufferTest::TestIsNotMonolith [GOOD] >> THyperLogCounterTest::TestIncrement [GOOD] >> THyperLogCounterTest::TestAddRandom [GOOD] >> THyperLogCounterTest::TestAddFixed [GOOD] >> THyperLogCounterTest::TestHybridIncrement [GOOD] >> THyperLogCounterTest::TestHybridAdd [GOOD] >> TIntervalSetTest::IntervalMapTestEmpty [GOOD] >> TIntervalSetTest::IntervalMapTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapIntersection [GOOD] >> TVDiskConfigTest::Basic [GOOD] >> TVDiskConfigTest::JustConfig [GOOD] >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] >> TBlobStorageSyncNeighborsTest::CheckIsMyDomain [GOOD] >> TBlobStorageBarriersTreeTest::Tree [GOOD] >> TBlobStorageSyncNeighborsTest::CheckFailDomainsIterators [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplace [GOOD] |39.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/tstool >> TBlobStorageSyncNeighborsTest::CheckRevLookup [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplaceSelf [GOOD] |39.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/ydb-tests-functional-config >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] >> TIntervalSetTest::IntervalMapDifference [GOOD] >> TBlobStorageSyncNeighborsTest::SerDes [GOOD] >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskIterators [GOOD] >> TBlobStorageSyncNeighborsTest::IterateOverAllDisks [GOOD] >> TCircleBufStringStreamTest::TestAligned [GOOD] |39.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |39.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TErasureTypeTest::TestDifferentCasesInDiffSplitingBlock4Plus2 [GOOD] |39.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] |39.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] |39.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] |39.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::Tree [GOOD] |39.9%| [DL] $(B)/canondata_storage/1942278/103eb8901f0e15b8dd5b192c5876a19cf4d707c6/resource.tar.gz{, .log} >> TErasureTypeTest::TestBlock42PartialRestore3 >> ErasureBrandNew::Block42_restore |39.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufStringStreamTest::TestAligned [GOOD] |39.9%| [DL] $(B)/canondata_storage/1903885/76dd143f5f10ca68ad8503d2a14f2098d64e2e72/resource.tar.gz{, .log} |39.9%| [DL] $(B)/canondata_storage/1925821/8a81c7c999d37062d42776827b3f982555d3bdd4/resource.tar.gz{, .log} >> TLsnAllocTrackerTests::Test1 [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse >> TCircleBufTest::SimpleTest [GOOD] >> TCircleBufTest::PtrTest [GOOD] |39.9%| [DL] $(B)/canondata_storage/1925821/7d03c02318ec52ae0e54d4999f68346bc093fa47/resource.tar.gz{, .log} |39.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestDifferentCasesInDiffSplitingBlock4Plus2 [GOOD] |40.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |39.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |40.0%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |40.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |40.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator_checks/yaml_config-validator-ut-validator_checks >> TErasureTypeTest::TestBlock42LossOfAllPossible2 >> TCowBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TCowBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::RandomInsertInplace |40.0%| [DL] $(B)/canondata_storage/1773845/29384e5593c1d8c2c9ee9307be07d1d1504ae89a/resource.tar.gz{, .log} |40.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/ut/ydb-core-fq-libs-hmac-ut |40.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dynumber/ydb-tests-functional-dynumber >> TIntervalSetTest::IntervalVecUnion [GOOD] >> TIntervalSetTest::IntervalVecTestSubtract [GOOD] >> TIntervalSetTest::IntervalVecTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestAdd [GOOD] >> TIntervalSetTest::IntervalVecUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalVecUnionInplace [GOOD] >> TIntervalSetTest::IntervalVecIntersection >> TIntervalSetTest::IntervalVecTestEmpty [GOOD] >> TIntervalSetTest::IntervalVecTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestAddSubtract [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse2Threads >> TIntervalSetTest::IntervalVecIntersection [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecDifference [GOOD] >> TIntervalSetTest::IntervalVecDifferenceInplaceSelf [GOOD] >> TIntrusiveFixedHashSetTest::TestEmptyFind [GOOD] >> TIntrusiveFixedHashSetTest::TestPushFindClear [GOOD] >> TTrackable::TVector [GOOD] >> TIntrusiveHeapTest::TestEmpty [GOOD] >> TIntrusiveHeapTest::TestAddRemove [GOOD] >> TTrackable::TString [GOOD] >> TTrackable::TList [GOOD] >> TIntrusiveHeapTest::TestUpdateNoChange [GOOD] >> TErasureTypeTest::TestBlock42LossOfAllPossible2 [GOOD] |40.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication >> TIntervalSetTest::IntervalSetTestSubtract [GOOD] >> ErasureBrandNew::Block42_encode [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight2 [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] >> TIntrusiveHeapTest::TestUpdateIncrease [GOOD] >> TIntervalSetTest::IntervalSetTestAddAgainstReference [GOOD] >> TIntrusiveHeapTest::TestUpdateDecrease [GOOD] >> TIntervalSetTest::IntervalSetTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalMapTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestAdd [GOOD] >> TIntervalSetTest::IntervalSetTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestEmpty [GOOD] >> TIntervalSetTest::IntervalSetTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalSetTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapUnion >> TIntervalSetTest::IntervalMapTestSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtractAgainstReference [GOOD] >> ErasureBrandNew::Block42_chunked >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenNothingStarted [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue2 [GOOD] >> TCircularOperationQueueTest::UseMinOperationRepeatDelayWhenTimeout [GOOD] >> TCircularOperationQueueTest::ShouldStartEmpty [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenHasWaitingAndStart [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue1 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue100 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue3 [GOOD] >> TCircularOperationQueueTest::ShouldTryToStartAnotherOneWhenStartFails [GOOD] >> TCircularOperationQueueTest::ShouldReturnExecTime [GOOD] >> TIntervalSetTest::IntervalMapUnion [GOOD] >> TCircularOperationQueueTest::BasicRPSCheck [GOOD] >> TCircularQueueTest::ShouldNextMulti [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight3 [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotExisting [GOOD] >> TCircularOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> TIntervalSetTest::IntervalSetUnion [GOOD] >> TCacheTest::EnsureNoLeakAfterLruCacheDtor [GOOD] >> TCircularQueueTest::ShouldGetQueue [GOOD] >> TIntervalSetTest::IntervalMapUnionInplace [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted2 [GOOD] >> TIntervalSetTest::IntervalSetUnionInplace [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue10 [GOOD] >> TCircularOperationQueueTest::CheckOnDoneNotExisting [GOOD] >> TIntervalSetTest::IntervalMapUnionInplaceSelf [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight100 [GOOD] >> TCacheTest::EnsureNoLeakAfterUnboundedCacheOnMapDtor [GOOD] >> TIntervalSetTest::IntervalSetUnionInplaceSelf [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight1 [GOOD] >> TCircularOperationQueueTest::CheckRemoveWaiting [GOOD] >> TIntervalSetTest::IntervalSetIntersection >> TCircularOperationQueueTest::CheckOnDoneInflight2 [GOOD] |40.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TIntrusiveHeapTest::TestUpdateDecrease [GOOD] >> TCacheTest::EnsureNoLeakAfterQ2CacheDtor [GOOD] |40.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so >> TCacheTest::TestUpdateItemSize [GOOD] |40.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TList [GOOD] >> TCacheCacheTest::Random [GOOD] |40.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] >> TCircularOperationQueueTest::ShouldTolerateInaccurateTimer [GOOD] >> TCircularOperationQueueTest::CheckWakeupAfterStop [GOOD] |40.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42LossOfAllPossible2 [GOOD] >> TIntervalSetTest::IntervalSetIntersection [GOOD] >> TCircularOperationQueueTest::CheckRemoveRunning [GOOD] >> TCircularOperationQueueTest::ShouldShuffle [GOOD] >> TCacheTest::TestLruCache [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight10 [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplace [GOOD] >> TCircularOperationQueueTest::CheckStartAfterStop [GOOD] >> TCircularOperationQueueTest::RemoveNonExistingWhenShuffle [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplaceSelf [GOOD] >> TCacheTest::TestUnboundedMapCache [GOOD] >> TCircularQueueTest::ShouldNextSingleItem [GOOD] >> TIntervalSetTest::IntervalSetDifference [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotRunning [GOOD] >> TIntervalSetTest::IntervalSetDifferenceInplaceSelf [GOOD] >> TCircularOperationQueueTest::BasicRPSCheckWithRound [GOOD] >> TIntervalSetTest::IntervalSetTestIterator [GOOD] |40.0%| [DL] $(B)/canondata_storage/1936947/a51b566b93f106351c93790e8ed778e57174fe45/resource.tar.gz{, .log} >> TCircularOperationQueueTest::CheckOnDoneInflight1 [GOOD] >> TCircularQueueTest::Empty [GOOD] |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a >> TCacheTest::TestSizeBasedOverflowCallback [GOOD] |40.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TCacheTest::Test2QCache [GOOD] |40.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/oracle/tests-datasource-oracle >> TCircularOperationQueueTest::RemoveExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted [GOOD] >> TCircularOperationQueueTest::CheckTimeoutWhenFirstItemRemoved [GOOD] >> TCircularOperationQueueTest::CheckTimeout [GOOD] >> TStrongTypeTest::DefaultConstructorValue [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityWaitingQueue [GOOD] >> TWildcardTest::TestWildcards [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityReadyQueue [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriority [GOOD] >> TTokenBucketTest::DelayCalculation [GOOD] >> TPriorityQueueTest::TestOrder [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriorityWithRemove [GOOD] >> TQueueInplaceTests::CleanInDestructor [GOOD] >> TULID::Generate [GOOD] >> TSimpleCacheTest::TestSimpleCache [GOOD] >> TULID::HeadByteOrder [GOOD] >> TULID::TailByteOrder [GOOD] >> TPriorityOperationQueueTest::ShouldReturnExecTimeWhenUpdateRunningPriority [GOOD] >> TStrongTypeTest::DefaultConstructorDeleted [GOOD] >> TULID::EveryBitOrder [GOOD] >> TULID::ParseAndFormat [GOOD] >> TWildcardTest::TestWildcard [GOOD] >> TPriorityOperationQueueTest::ShouldStartEmpty [GOOD] >> TQueueInplaceTests::TestSimpleInplace [GOOD] >> TTokenBucketTest::Unlimited [GOOD] >> TPriorityOperationQueueTest::UpdateNonExistingShouldReturnFalse [GOOD] >> TSimpleCacheTest::TestNotSoSimpleCache [GOOD] |40.0%| [DL] $(B)/canondata_storage/1916746/7215649f3d46d5bcf38f953188bce90d92d5269d/resource.tar.gz{, .log} |40.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalSetTestIterator [GOOD] |40.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TCircularQueueTest::Empty [GOOD] |40.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |40.0%| [DL] $(B)/canondata_storage/1937429/c8fa98e2ff8b4277f546c7744d6a553dc5cce2e0/resource.tar.gz{, .log} >> TTokenBucketTest::Limited [GOOD] |40.0%| [DL] $(B)/canondata_storage/1942278/fa8d61d23d54178691359d36c79c3aeb38e8d3a9/resource.tar.gz{, .log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TCircularOperationQueueTest::CheckTimeout [GOOD] Test command err: 0.27358 |40.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut >> TCircularQueueTest::ShouldNotPushTwice [GOOD] >> ErasureBrandNew::Block42_chunked [GOOD] >> TCircularQueueTest::ShouldRemove [GOOD] >> TCircularQueueTest::ShouldRemoveCurrentLast [GOOD] >> TConcurrentRWHashTest::TEmptyGetTest [GOOD] >> TCircularQueueTest::ShouldPush [GOOD] >> TCircularQueueTest::ShouldNotRemoveMissing [GOOD] >> TCircularQueueTest::ShouldRemoveCurrent [GOOD] >> TConcurrentRWHashTest::TEraseTest [GOOD] >> TConcurrentRWHashTest::TInsertTest [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTest [GOOD] >> TCowBTreeTest::Basics [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTestFunc [GOOD] >> TConcurrentRWHashTest::TRemoveTest [GOOD] >> TCowBTreeTest::Empty [GOOD] >> TCowBTreeTest::MultipleSnapshots >> TCowBTreeTest::ClearAndReuse [GOOD] >> TIntrusiveStackTest::TestPushPop [GOOD] |40.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TTokenBucketTest::Limited [GOOD] |40.1%| [DL] $(B)/canondata_storage/1946324/c7905148ccc6742256ee4a209186276ffdcd07ac/resource.tar.gz{, .log} |40.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator >> TIntrusiveStackTest::TestEmptyPop [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty >> AddressClassifierTest::TestClassfierWithAllIpTypes [GOOD] >> AddressClassifierTest::TestAddressParsing [GOOD] >> TBitsTest::TestNaiveClz [GOOD] >> TBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> AddressClassifierTest::TestLabeledClassifierFromNetData [GOOD] >> TBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TBTreeTest::ClearAndReuse [GOOD] >> TBTreeTest::SeekForwardPermutationsInplace [GOOD] >> AddressClassifierTest::TestAddressExtraction [GOOD] >> TBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TBTreeTest::Basics [GOOD] |40.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |40.1%| [DL] $(B)/canondata_storage/1784826/2b974a6b59b529ca8b3df4eb934abf7f3c449c69/resource.tar.gz{, .log} >> TBTreeTest::RandomInsertInplace >> AddressClassifierTest::TestLabeledClassifier [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> ErasureBrandNew::Block42_chunked [GOOD] Test command err: totalSize# 499019610 period1# 1.444000s period2# 0.365712s MB/s1# 345.5814474 MB/s2# 1364.515274 factor# 3.948462178 |40.1%| [DL] $(B)/canondata_storage/1936842/8cd90dfd54f3010a76d89484812059b56b435e61/resource.tar.gz{, .log} |40.1%| [DL] $(B)/canondata_storage/1881367/e42a6d3bf5f7cfd5174c33ae18a047e043b3972e/resource.tar.gz{, .log} |40.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |40.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |40.1%| [DL] $(B)/canondata_storage/1923547/995d9d96bbba94053a60009ae7ba99979f31a5bf/resource.tar.gz{, .log} |40.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] |40.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |40.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |40.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |40.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |40.1%| [DL] $(B)/canondata_storage/1889210/39a2e296a18100a101286d021c3bbaf0dd7c910d/resource.tar.gz{, .log} |40.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |40.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] >> test.py::py2_flake8 [GOOD] |40.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |40.1%| [DL] $(B)/canondata_storage/1942100/0aeaf9869b0ddd879ea5b962964545b2e8d4ee29/resource.tar.gz{, .log} |40.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/ydb-tests-fq-yt-kqp_yt_file-part11 |40.2%| [DL] $(B)/canondata_storage/1936842/4c93d91b791c8afc87ba8ddb554fd8416c95c5a3/resource.tar.gz{, .log} >> Init::TWithDefaultParser [GOOD] |40.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |40.1%| [DL] $(B)/canondata_storage/1817427/540457314503b1dd9fa4c7f6a441cd55328f0f12/resource.tar.gz{, .log} |40.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/ydb-tests-fq-yt-kqp_yt_file-part3 |40.2%| [DL] $(B)/canondata_storage/1775319/3d397bae2149eca31d2d1d70d1f869cf3ef06a27/resource.tar.gz{, .log} |40.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/ydb-tests-fq-yt-kqp_yt_file-part0 >> TCowBTreeTest::RandomInsertInplace [GOOD] >> TCowBTreeTest::RandomInsertThreadSafe |40.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/ydb-tests-fq-yt-kqp_yt_file-part10 |40.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |40.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/init/ut/unittest >> Init::TWithDefaultParser [GOOD] >> test.py::py2_flake8 [GOOD] |40.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sequenceshard/public/ut/unittest |40.2%| [DL] $(B)/canondata_storage/1923547/7c8c70a333bc60c54015f8d05a34f8e2b252b396/resource.tar.gz{, .log} |40.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so >> conftest.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] |40.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |40.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |40.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |40.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |40.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |40.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/e244fbe4e765a00d66150f4ec9.auxcpp |40.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms >> TLsnMngrTests::AllocLsnForLocalUse2Threads [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse10Threads |40.2%| [DL] $(B)/canondata_storage/1920236/35006d56f02bf6830f30b607dded3342ab6fe947/resource.tar.gz{, .log} |40.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |40.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/flake8 >> test.py::flake8 [GOOD] |40.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |40.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |40.2%| [DL] $(B)/canondata_storage/1784826/2a831b2d73099cc758734ddc249903ecf064810d/resource.tar.gz{, .log} |40.3%| [DL] $(B)/canondata_storage/1871102/fc5ed8103fd812712bf2a97977898a961a27156e/resource.tar.gz{, .log} >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] |40.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |40.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |40.3%| [DL] $(B)/canondata_storage/1937027/24b357f9aec37ec57afebd08a4612bbe97bcf4ca/resource.tar.gz{, .log} >> test.py::py2_flake8 [GOOD] >> TBTreeTest::RandomInsertInplace [GOOD] >> TBTreeTest::RandomInsertThreadSafe |40.3%| [DL] $(B)/canondata_storage/212715/05112758aa31c86216a47b30fa10eee1e52db258/resource.tar.gz{, .log} |40.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |40.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/ydb-tests-functional-tpc |40.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |40.3%| [DL] $(B)/canondata_storage/1847551/6cbebf34318d9de1322f2f5dba4ee3de59c62096/resource.tar.gz{, .log} |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp >> TCowBTreeTest::MultipleSnapshots [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithGc |40.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.3%| [DL] $(B)/canondata_storage/1937001/352ec8625074d0f91076901e1e506e122e8a0fe6/resource.tar.gz{, .log} |40.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/jemalloc/libcpp-malloc-jemalloc.a |40.3%| [DL] $(B)/canondata_storage/1871002/6b2cac692d2e9ff1dd117cc22a0fd99527bb7e4e/resource.tar.gz{, .log} >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |40.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgwriter_ut.cpp |40.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} >> test.py::py2_flake8 [GOOD] |40.3%| [DL] $(B)/canondata_storage/1936273/e97f761c072d3e8f44f6bc8a298df5508572dd64/resource.tar.gz{, .log} |40.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/runner |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/ut/ydb-core-io_formats-arrow-ut |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |40.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h >> TErasureTypeTest::TestBlock42PartialRestore2 [GOOD] |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |40.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/replay/flake8 >> __main__.py::flake8 [GOOD] |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |40.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so >> ConsoleDumper::Basic [GOOD] >> ConsoleDumper::CoupleMerge [GOOD] >> ConsoleDumper::CoupleOverwrite [GOOD] >> ConsoleDumper::CoupleMergeOverwriteRepeated [GOOD] >> ConsoleDumper::ReverseMerge [GOOD] >> ConsoleDumper::ReverseOverwrite [GOOD] >> ConsoleDumper::ReverseMergeOverwriteRepeated [GOOD] >> ConsoleDumper::Different [GOOD] >> ConsoleDumper::SimpleNode [GOOD] >> ConsoleDumper::JoinSimilar [GOOD] >> ConsoleDumper::DontJoinDifferent [GOOD] >> ConsoleDumper::SimpleTenant [GOOD] >> ConsoleDumper::SimpleNodeTenant [GOOD] >> ConsoleDumper::SimpleHostId [GOOD] >> ConsoleDumper::SimpleNodeId [GOOD] >> ConsoleDumper::DontJoinNodeTenant [GOOD] |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |40.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |40.4%| [DL] $(B)/canondata_storage/1937429/21da85bae1b4363f9d35ac14bdb3122767615cb5/resource.tar.gz{, .log} >> ConsoleDumper::JoinMultipleSimple [GOOD] >> ConsoleDumper::MergeNode [GOOD] >> ConsoleDumper::MergeOverwriteRepeatedNode [GOOD] >> ConsoleDumper::Ordering [GOOD] >> ConsoleDumper::IgnoreUnmanagedItems [GOOD] >> YamlConfig::CollectLabels [GOOD] >> YamlConfig::MaterializeSpecificConfig [GOOD] >> YamlConfig::MaterializeAllConfigSimple [GOOD] >> YamlConfig::MaterializeAllConfigs >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> YamlConfig::MaterializeAllConfigs [GOOD] >> test_big_state.py::flake8 [GOOD] |40.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} >> test_continue_mode.py::flake8 [GOOD] |40.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] |40.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore2 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> YamlConfig::AppendVolatileConfig [GOOD] >> collection.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> YamlConfig::AppendAndResolve [GOOD] >> conftest.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> YamlConfig::GetMetadata [GOOD] >> select_datetime.py::flake8 [GOOD] >> YamlConfig::ReplaceMetadata [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> YamlConfigParser::Iterate [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> YamlConfigParser::ProtoBytesFieldDoesNotDecodeBase64 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] |40.4%| [DL] $(B)/canondata_storage/1777230/c67090a00be45ad908b79606c276697ef3208c16/resource.tar.gz{, .log} >> YamlConfigParser::PdiskCategoryFromString [GOOD] >> test_pq_read_write.py::flake8 [GOOD] |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so >> YamlConfigParser::AllowDefaultHostConfigId [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> YamlConfigParser::IncorrectHostConfigIdFails [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty [GOOD] >> YamlConfigParser::NoMixedHostConfigIds [GOOD] >> test_recovery.py::flake8 [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention >> test_recovery_match_recognize.py::flake8 [GOOD] >> YamlConfigProto2Yaml::StorageConfig [GOOD] |40.4%| [DL] $(B)/canondata_storage/1937001/205481a8623c17e2bed6fe61c2cf8cadb9a35844/resource.tar.gz{, .log} >> test_recovery_mz.py::flake8 [GOOD] >> TCowBTreeTest::RandomInsertThreadSafe [GOOD] |40.4%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} >> TErasureTypeTest::TestBlock42PartialRestore0 [GOOD] >> TCowBTreeTest::SnapshotCascade [GOOD] |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |40.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |40.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |40.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |40.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> TCowBTreeTest::SnapshotRollback >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |40.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut/unittest >> YamlConfigProto2Yaml::StorageConfig [GOOD] Test command err: host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" "\/dev\/disk\/by-partlabel\/kikimr_nvme_02" host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" host_configs: - host_config_id: 1 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: NVME expected_slot_count: 9 - path: /dev/disk/by-partlabel/kikimr_nvme_02 type: NVME expected_slot_count: 9 - host_config_id: 2 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: SSD expected_slot_count: 9 hosts: - host: sas8-6954.search.yandex.net port: 19000 host_config_id: 1 - host: sas8-6955.search.yandex.net port: 19000 host_config_id: 2 item_config_generation: 0 |40.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore0 [GOOD] |40.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |40.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |40.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |40.5%| [DL] $(B)/canondata_storage/1924537/56ebaf0fac792671b356863555a2c9672be8a889/resource.tar.gz{, .log} >> test_dynumber.py::flake8 [GOOD] |40.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |40.5%| [DL] $(B)/canondata_storage/1903885/bdae153e5f47955d12d65f85a611b12ff6a92b2f/resource.tar.gz{, .log} |40.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |40.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |40.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |40.5%| [DL] $(B)/canondata_storage/1937001/4dd27f939fe3c71df6f01e05fa91d84a683d38c8/resource.tar.gz{, .log} |40.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} >> TErasureTypeTest::TestBlock42PartialRestore1 [GOOD] >> HmacSha::HmacSha1 [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse10Threads [GOOD] >> TOutOfSpaceStateTests::TestLocal [GOOD] >> TOutOfSpaceStateTests::TestGlobal [GOOD] |40.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/ydb-tests-fq-yt-kqp_yt_file-part5 |40.5%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |40.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/dynumber/flake8 >> test_dynumber.py::flake8 [GOOD] >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |40.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/ut/test_connection_ut.cpp |40.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |40.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore1 [GOOD] |40.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} >> test.py::py2_flake8 [GOOD] |40.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |40.5%| [DL] $(B)/canondata_storage/1871182/fa5250ae9e3c72e63f7ba97c09d348f117270160/resource.tar.gz{, .log} |40.6%| [DL] $(B)/canondata_storage/1871002/baafe386a63dbfebad074ea63e64c14ff9aa7ce2/resource.tar.gz{, .log} |40.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TOutOfSpaceStateTests::TestGlobal [GOOD] |40.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |40.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/hmac/ut/unittest >> HmacSha::HmacSha1 [GOOD] |40.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |40.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] >> MdbEndpoingGenerator::Generic_NoTransformHost [GOOD] >> FormatCSV::Common >> MdbEndpoingGenerator::Generic_WithTransformHost [GOOD] >> FormatCSV::Instants [GOOD] >> test_query_cache.py::flake8 [GOOD] >> MdbEndpoingGenerator::Legacy [GOOD] >> FormatCSV::EmptyData [GOOD] |40.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |40.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable |40.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |40.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |40.6%| [DL] $(B)/canondata_storage/1925842/5d65124ba39bfe8e6fec32f860c67c97e48531b0/resource.tar.gz{, .log} >> TBTreeTest::RandomInsertThreadSafe [GOOD] >> collection.py::flake8 [GOOD] >> FormatCSV::Common [GOOD] >> TBTreeTest::DuplicateKeysInplace |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |40.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} >> FormatCSV::Strings [GOOD] >> FormatCSV::Nulls [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] >> TBTreeTest::DuplicateKeysInplace [GOOD] >> TBTreeTest::DuplicateKeysThreadSafe |40.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |40.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |40.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |40.6%| [DL] $(B)/canondata_storage/1946324/4bc0ddf4ca6b7e4f4e1de76060425e0c30bc65fb/resource.tar.gz{, .log} |40.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |40.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} >> tstool.py::flake8 [GOOD] >> runner::import_test [GOOD] |40.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/ydb-tests-fq-yt-kqp_yt_file-part17 |40.6%| [DL] $(B)/canondata_storage/1900335/510f56d38547b11bc3a5c27da03d6e71466bb828/resource.tar.gz{, .log} |40.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/events.pb.{h, cc} |40.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest >> MdbEndpoingGenerator::Legacy [GOOD] |40.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |40.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/ydb-tests-fq-common ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/io_formats/arrow/ut/unittest >> FormatCSV::Nulls [GOOD] Test command err: 12000000 Cannot read CSV: no columns specified Cannot read CSV: Invalid: Empty CSV file d'Artagnan '"' Jeanne d'Arc "'" 'd'Artagnan' ''"'' 'Jeanne d'Arc' '"'"' d'Artagnan '"' Jeanne d'Arc "'" src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: \N,"","" \N,"\N","\N" \N,\N,\N parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,\N,\N ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: NULL,"","" NULL,"NULL","NULL" NULL,NULL,NULL parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,NULL,NULL ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ >> TBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TBTreeTest::ShouldCallDtorsInplace [GOOD] >> TBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TBTreeTest::Concurrent |40.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |40.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |40.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/ydb-tests-fq-yt-kqp_yt_file-part9 >> TCowBTreeTest::SnapshotRollback [GOOD] >> TCowBTreeTest::SnapshotRollbackEarlyErase >> TestS3UrlEscape::EscapeUnescapeForceRet [GOOD] |40.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |40.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |40.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/tstool/flake8 >> tstool.py::flake8 [GOOD] >> TestS3UrlEscape::EscapeAdditionalSymbols [GOOD] >> TestS3UrlEscape::EscapeEscapedForce [GOOD] >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithGc [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClear |40.7%| [DL] $(B)/canondata_storage/1937150/c49758d527ec85011ab8f1e29da739cbd14731c8/resource.tar.gz{, .log} |40.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |40.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kv_workload/ydb-tests-functional-kv_workload |40.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/runner/import_test >> runner::import_test [GOOD] |40.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |40.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |40.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/defrag/ut/unittest >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] |40.7%| [DL] $(B)/canondata_storage/212715/94c4be0ca75f4b548ec8f83ac182396ab697f86d/resource.tar.gz{, .log} |40.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |40.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |40.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |40.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |40.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/nemesis |40.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |40.8%| [DL] $(B)/canondata_storage/1871182/6f64c40116f70a4d635a7fc8d77b59f1c2902999/resource.tar.gz{, .log} |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |40.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |40.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/s3/common/ut/unittest >> TestS3UrlEscape::EscapeEscapedForce [GOOD] |40.8%| [DL] $(B)/canondata_storage/1924537/1b41ca0aa67dd1e0b3321b69cbca20d119f1cde1/resource.tar.gz{, .log} |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/endpoint.pb.{h, cc} >> test_sql.py::flake8 [GOOD] |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |40.8%| [DL] $(B)/canondata_storage/1784826/3f48892aaa657aedfc21dffbd75e86e3221b71e3/resource.tar.gz{, .log} |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/data_source.pb.{h, cc} |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} >> ydb-tests-fq-multi_plane::import_test [GOOD] |40.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part9/ydb-library-yql-tests-sql-dq_file-part9 |40.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a >> test_stability.py::flake8 [GOOD] |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} >> TPGTest::TestLogin [GOOD] |40.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/py3test >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |40.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |40.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |40.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |40.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |40.9%| [DL] $(B)/canondata_storage/1689644/b1f7674a51f07a84444f5ebf4e68dd7decb2debc/resource.tar.gz{, .log} >> TWeighedOrderingTest::SimpleSelectionTest [GOOD] >> TWeighedOrderingTest::WeighedSelectionTest [GOOD] >> TWeighedOrderingTest::WeighedOrderingTest [GOOD] |40.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/operation_id/protos/operation_id.pb.{h, cc} |40.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |40.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/import_test >> ydb-tests-fq-multi_plane::import_test [GOOD] |40.9%| [DL] $(B)/canondata_storage/1031349/b4f16899a4015d5c80fc4b9db6742f6406940336/resource.tar.gz{, .log} |40.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |40.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |40.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |40.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/ydb/flake8 >> test_stability.py::flake8 [GOOD] >> TCowBTreeTest::SnapshotRollbackEarlyErase [GOOD] >> test_quoting.py::flake8 [GOOD] |40.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/pgproxy/ut/unittest >> TPGTest::TestLogin [GOOD] Test command err: 2024-11-21T17:41:34.702170Z :PGWIRE INFO: Listening on [::]:9628 2024-11-21T17:41:34.702974Z :PGWIRE DEBUG: (#10,[::1]:59044) incoming connection opened 2024-11-21T17:41:34.703006Z :PGWIRE DEBUG: (#10,[::1]:59044) -> [1] 'i' "Initial" Size(15) protocol(0x00000300) user=user 2024-11-21T17:41:34.703055Z :PGWIRE DEBUG: (#10,[::1]:59044) <- [1] 'R' "Auth" Size(4) OK >> TCowBTreeTest::ShouldCallDtorsInplace [GOOD] >> TCowBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TEventPriorityQueueTest::TestPriority [GOOD] >> TFastTlsTest::IterationAfterThreadDeath >> hive_matchers.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] >> TFastTlsTest::IterationAfterThreadDeath [GOOD] >> conftest.py::flake8 [GOOD] >> TFastTlsTest::ManyThreadLocals [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |40.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/portion_info.pb.{h, cc} |40.9%| [DL] $(B)/canondata_storage/1903885/d90cd049c4b259907eb16c654dc3e99a98aca749/resource.tar.gz{, .log} >> test.py::py2_flake8 [GOOD] >> TFastTlsTest::ManyConcurrentKeys >> compare.py::flake8 [GOOD] |40.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/unittest >> TWeighedOrderingTest::WeighedOrderingTest [GOOD] >> TFastTlsTest::ManyConcurrentKeys [GOOD] >> TFifoQueueTest::ShouldPushPop [GOOD] |40.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |40.9%| [DL] $(B)/canondata_storage/1925842/341fc37dc897d9710faf3e494b064e93d76d8c61/resource.tar.gz{, .log} |40.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead2 [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead3 [GOOD] >> TFragmentedBufferTest::Test3WriteRead [GOOD] >> TFragmentedBufferTest::Test5WriteRead [GOOD] |40.9%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |40.9%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/kqp_expr_nodes.{gen.h ... defs.inl.h} |40.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/mysql/connector-tests-datasource-mysql |41.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |41.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |41.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |41.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/solomon/py2_flake8 >> test.py::py2_flake8 [GOOD] |41.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} >> TFragmentedBufferTest::TestGetMonolith [GOOD] >> TFragmentedBufferTest::CopyFrom [GOOD] >> TBTreeTest::Concurrent [GOOD] >> TBTreeTest::IteratorDestructor [GOOD] >> TCacheCacheTest::MoveToWarm [GOOD] >> TFragmentedBufferTest::ReadWriteRandom >> TCacheCacheTest::EvictNext [GOOD] >> CompressionTest::lz4_generator_basic [GOOD] >> CompressionTest::lz4_generator_deflates [GOOD] >> StLog::Basic [GOOD] |41.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |41.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |41.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} >> test_generator.py::flake8 [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention [GOOD] >> test_init.py::flake8 [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty >> TErasureTypeTest::TestBlock42PartialRestore3 [GOOD] |41.0%| [DL] $(B)/canondata_storage/1689644/90acca9b171ecce398f65b70b237cc25e6006718/resource.tar.gz{, .log} |41.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |41.0%| [DL] $(B)/canondata_storage/937458/8cf48c219a2939bf3e0b54c55a5f53cb19e8be63/resource.tar.gz{, .log} |41.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_compare/flake8 >> compare.py::flake8 [GOOD] |41.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> StLog::Basic [GOOD] Test command err: Producer 0 worked for 0.6143348876 seconds Producer 1 worked for 0.6160329913 seconds Consumer 0 worked for 1.409846727 seconds Consumer 1 worked for 1.528912742 seconds Consumer 2 worked for 2.626580526 seconds Consumer 3 worked for 2.659401905 seconds |41.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |41.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/mdb_mock/recipe >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |41.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/flake8 >> test_init.py::flake8 [GOOD] |41.0%| [DL] $(B)/canondata_storage/1923547/a231b4de20366f8952df108d9a4eee6ccfa653a7/resource.tar.gz{, .log} |41.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |41.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore3 [GOOD] |41.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so >> ResourcePoolClassifierTest::IntSettingsParsing [GOOD] >> ResourcePoolClassifierTest::StringSettingsParsing [GOOD] >> ResourcePoolClassifierTest::SettingsExtracting [GOOD] >> ResourcePoolClassifierTest::SettingsValidation [GOOD] >> ResourcePoolTest::IntSettingsParsing [GOOD] >> ResourcePoolTest::SecondsSettingsParsing [GOOD] >> ResourcePoolTest::PercentSettingsParsing [GOOD] >> ResourcePoolTest::SettingsExtracting [GOOD] >> ResourcePoolTest::SettingsValidation [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClear [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |41.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |41.0%| [DL] $(B)/canondata_storage/1942525/795fb6df3f52b9e4c9442c13b6e21f3c55c8e287/resource.tar.gz{, .log} |41.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} >> Mvp::TokenatorGetMetadataTokenGood |41.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit >> Mvp::TokenatorGetMetadataTokenGood [GOOD] >> Mvp::TokenatorRefreshMetadataTokenGood |41.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/resource_pools/ut/unittest >> ResourcePoolTest::SettingsValidation [GOOD] |41.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |41.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |41.1%| [DL] $(B)/canondata_storage/1871182/07b7b4e8c24b11ac495e88374330f1c5e22423bd/resource.tar.gz{, .log} |41.1%| [DL] $(B)/canondata_storage/1937150/d999d25ecf89b5fdaab059bd630b998ae547ae2c/resource.tar.gz{, .log} |41.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> TMemoryPoolTest::LongRollback [GOOD] >> UtilString::ShrinkToFit [GOOD] >> TMemoryPoolTest::Transactions [GOOD] >> TMemoryPoolTest::AppendString [GOOD] >> TMemoryPoolTest::TransactionsWithAlignment [GOOD] >> TMemoryPoolTest::AllocOneByte [GOOD] |41.1%| [DL] $(B)/canondata_storage/1923547/afaeadd931dc450ce4874920f37bf8ac420697e3/resource.tar.gz{, .log} |41.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp >> TBlobStorageCrypto::TestOffsetStreamCypher >> TBlobStorageCrypto::TestMixedStreamCypher [GOOD] >> ValidatorBuilder::CanCreateAllTypesOfNodes [GOOD] >> ValidatorBuilder::BuildSimpleValidator [GOOD] >> ValidatorBuilder::CanHaveDuplicateType [GOOD] >> ValidatorBuilder::CanHaveMultipleType [GOOD] >> ValidatorBuilder::CreateMultitypeNode [GOOD] >> TBlobStorageCrypto::TestOffsetStreamCypher [GOOD] >> TBlobStorageCrypto::TestInplaceStreamCypher [GOOD] >> TBlobStorageCrypto::PerfTestStreamCypher [GOOD] >> TBlobStorageCrypto::UnalignedTestStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestEqualInplaceStreamCypher |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp >> TFragmentedBufferTest::ReadWriteRandom [GOOD] |41.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_util/unittest >> TMemoryPoolTest::AllocOneByte [GOOD] >> ydb-tests-functional-suite_tests::import_test [GOOD] |41.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/validator/ut/validator_builder/unittest >> ValidatorBuilder::CreateMultitypeNode [GOOD] |41.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |41.1%| [DL] $(B)/canondata_storage/1931696/82ea795a06df7c3a825b7d991893b35cf6220bf5/resource.tar.gz{, .log} |41.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/datasource/ydb/connector-tests-datasource-ydb |41.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TFragmentedBufferTest::ReadWriteRandom [GOOD] >> StaticValidator::HostConfigs [GOOD] >> StaticValidator::DomainsConfig [GOOD] >> StaticValidator::Hosts [GOOD] |41.1%| [DL] $(B)/canondata_storage/1871182/3f2a613c9c9875708b545bc50785cef27e54013e/resource.tar.gz{, .log} |41.1%| [DL] $(B)/canondata_storage/1814674/30771d6d06c81affd1305529445a0377b856ae73/resource.tar.gz{, .log} |41.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |41.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/import_test >> ydb-tests-functional-suite_tests::import_test [GOOD] |41.1%| [DL] $(B)/canondata_storage/1600758/9ce2ecd88b57ecedef1530f890d737b6cf95d1ba/resource.tar.gz{, .log} >> TFunctionsMetadataTest::Serialization >> RuntimeFeatureFlags::UpdatingRuntimeFlags [GOOD] >> RuntimeFeatureFlags::DefaultValues [GOOD] >> RuntimeFeatureFlags::ConversionFromProto [GOOD] >> RuntimeFeatureFlags::ConversionToProto [GOOD] |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |41.1%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |41.1%| [DL] $(B)/canondata_storage/1925842/cbf83d6f077878c7a82321913f58b44bec270ff4/resource.tar.gz{, .log} |41.1%| [TA] $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFunctionsMetadataTest::Serialization [GOOD] |41.1%| [DL] $(B)/canondata_storage/1936997/4ecf9d16af51c71c1bd05cb4c6d3b08f52610ba0/resource.tar.gz{, .log} |41.1%| [DL] $(B)/canondata_storage/1899731/f47ce36b219d8f50f3f3fb1cfb49b6138993e5d3/resource.tar.gz{, .log} |41.1%| [DL] $(B)/canondata_storage/1937001/96df220872bbb62db85fbbf2896ad6c42e1ea831/resource.tar.gz{, .log} |41.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |41.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |41.2%| [DL] $(B)/canondata_storage/1937027/d3548eb0e12456df7d78d12789a5f6e6325fd027/resource.tar.gz{, .log} >> simple_json_diff::import_test [GOOD] >> TBlobStorageCryptoRope::TestEqualInplaceStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestEqualMixedStreamCypher |41.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |41.2%| [DL] $(B)/canondata_storage/1881367/a251b5ef11aa21f92a7b9ccbb7bd43cb18874df3/resource.tar.gz{, .log} |41.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/static_validator/ut/unittest >> StaticValidator::Hosts [GOOD] |41.2%| [DL] $(B)/canondata_storage/1775059/8b34c49761913e81a10df2ad921137b17e044791/resource.tar.gz{, .log} |41.2%| [DL] $(B)/canondata_storage/1924537/2285eda3e0498786ce5e558b2150b7c3df203fa2/resource.tar.gz{, .log} |41.2%| [DL] $(B)/canondata_storage/1925842/78a06865e4e9078622d4dcf470d14e1b733b818e/resource.tar.gz{, .log} |41.2%| [DL] $(B)/canondata_storage/1937001/b50cbf33819e30c3dbd3f1d7aa3988fe10d7563d/resource.tar.gz{, .log} |41.2%| [DL] $(B)/canondata_storage/1814674/2001d4056ebb44c911c0d3db631fa11dcd77416c/resource.tar.gz{, .log} |41.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/client/metadata/ut/unittest >> TFunctionsMetadataTest::Serialization [GOOD] |41.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part8/ydb-library-yql-tests-sql-dq_file-part8 >> ydb-tests-functional-autoconfig::import_test [GOOD] |41.2%| [DL] $(B)/canondata_storage/1599023/ec28b7e0cb376a1e45f470b7991522c343aa2f7e/resource.tar.gz{, .log} |41.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/tools/simple_json_diff/import_test >> simple_json_diff::import_test [GOOD] |41.2%| [DL] $(B)/canondata_storage/995452/cd615849d57fceb08a91b5ec6193f9c9b5284bc5/resource.tar.gz{, .log} |41.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/base/generated/ut/unittest >> RuntimeFeatureFlags::ConversionToProto [GOOD] |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |41.2%| [DL] $(B)/canondata_storage/1899731/9a6c84379323b592de124cd879525cc20bc7dae9/resource.tar.gz{, .log} |41.2%| [DL] $(B)/canondata_storage/1599023/c740047c261980b1e01d1f5aa2d5ef7442556a50/resource.tar.gz{, .log} |41.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/import_test >> ydb-tests-functional-autoconfig::import_test [GOOD] |41.2%| [DL] $(B)/canondata_storage/1871002/761cff393d72758da30485c6468fa0482f26f7e9/resource.tar.gz{, .log} |41.3%| [DL] $(B)/canondata_storage/1031349/6aa08de3e733fc9c427c11399a2b1f3449285551/resource.tar.gz{, .log} |41.3%| [DL] $(B)/canondata_storage/1889210/d428a31e20bca6fc83066589a64f342c0bce07ea/resource.tar.gz{, .log} |41.3%| [DL] $(B)/canondata_storage/1936947/6e1bb4d77970d55a143ad038e277a4ab9866d9a5/resource.tar.gz{, .log} |41.3%| [DL] $(B)/canondata_storage/1942173/b8a648bc4016a8f5127fa55fb15319a716377608/resource.tar.gz{, .log} |41.3%| [DL] $(B)/canondata_storage/1942173/4d0058e024fc0472a561e1e5fbadce0ca3ce8a5c/resource.tar.gz{, .log} >> ydb-tests-functional-cms::import_test [GOOD] |41.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |41.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |41.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |41.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |41.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |41.3%| [DL] $(B)/canondata_storage/1781765/db7f9f8ef3e4bbba649bbea05cce38a1eb4d74a1/resource.tar.gz{, .log} |41.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |41.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/import_test >> ydb-tests-functional-cms::import_test [GOOD] |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |41.3%| [DL] $(B)/canondata_storage/1923547/0012db846c37fd32ba35beed8bb1867e995553e6/resource.tar.gz{, .log} |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |41.4%| [DL] $(B)/canondata_storage/1777230/5929a5f68e30c5cd123eff09f1b1487815a8578e/resource.tar.gz{, .log} >> ydb-tests-functional-tpc::import_test [GOOD] |41.2%| [DL] $(B)/canondata_storage/1880306/fe6ed74453624d8e9abdbfac070631bc503097dd/resource.tar.gz{, .log} >> OperationLog::Size29 [GOOD] >> OperationLog::Size1 [GOOD] >> OperationLog::Size8 [GOOD] >> OperationLog::Size1000 >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc [GOOD] >> TCowBTreeTest::DuplicateKeysInplace |41.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |41.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas >> ydb-tests-functional-encryption::import_test [GOOD] >> Signer::Basic [GOOD] |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.a |41.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/local_ydb |41.4%| [DL] $(B)/canondata_storage/1889210/e2ed52d5150d0296ed106dcece44652f0506ca42/resource.tar.gz{, .log} |41.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/import_test >> ydb-tests-functional-tpc::import_test [GOOD] >> OperationLog::Size1000 [GOOD] >> Validator::MultitypeNodeValidation [GOOD] >> OperationLog::ConcurrentWrites >> Validator::MapValidation [GOOD] >> Validator::BoolValidation [GOOD] >> Validator::OpaqueMaps [GOOD] >> Validator::Enums [GOOD] >> Validator::StringValidation [GOOD] >> Validator::IntValidation [GOOD] |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |41.4%| [DL] $(B)/canondata_storage/1775319/171cd905ac5402be308349251477ae77481201b4/resource.tar.gz{, .log} |41.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/import_test >> ydb-tests-functional-encryption::import_test [GOOD] |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a >> OperationLog::ConcurrentWrites [GOOD] >> Validator::IntArrayValidation [GOOD] |41.4%| [DL] $(B)/canondata_storage/1924537/67045fc137b7aae73c1137f6fa60894b964dec45/resource.tar.gz{, .log} >> TCowBTreeTest::DuplicateKeysInplace [GOOD] |41.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/signer/ut/unittest >> Signer::Basic [GOOD] >> TCowBTreeTest::DuplicateKeysThreadSafe |41.4%| [DL] $(B)/canondata_storage/1942415/8c185f02ebd1857df69e888726609303d69b0657/resource.tar.gz{, .log} |41.4%| [DL] $(B)/canondata_storage/1942415/c4bc3e1ce2d8446b0a147e62db95c2cae64947db/resource.tar.gz{, .log} |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.a |41.4%| [DL] $(B)/canondata_storage/1777230/65685e9d54d416f54450defb84f83fe3b04456b0/resource.tar.gz{, .log} |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |41.5%| [DL] $(B)/canondata_storage/1942173/4c1cefaf51bea7c00d33359856fe5c9bd35e17a5/resource.tar.gz{, .log} |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.a |41.5%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.a |41.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/validator/ut/validator/unittest >> Validator::IntArrayValidation [GOOD] |41.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/debug_tools/ut/unittest >> OperationLog::ConcurrentWrites [GOOD] |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.a |41.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |41.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |41.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.a |41.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tools/statistics_workload/statistics_workload |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |41.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a >> TCowBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TCowBTreeTest::IteratorDestructor [GOOD] >> TCowBTreeTest::Concurrent >> ydb-public-tools-lib-cmds-ut::import_test [GOOD] |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpython-symbols-python.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |41.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |41.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/import_test >> ydb-public-tools-lib-cmds-ut::import_test [GOOD] |41.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo >> TErasurePerfTest::Split >> integrations_test.py::test_read_jtest_results[o/OK] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed1] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed2] [GOOD] >> integrations_test.py::test_read_jtest_results[f/error1] [GOOD] >> TErasurePerfTest::Split [GOOD] >> TErasurePerfTest::Restore >> integrations_test.py::test_read_jtest_results[s/skipped1] [GOOD] >> result_convert::import_test [GOOD] |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.a |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp >> integrations_test.py::test_read_jtest_results[s/skipped2] [GOOD] |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp >> integrations_test.py::test_read_jtest_with_one_result [GOOD] >> LongTxServicePublicTypes::Snapshot [GOOD] >> LongTxServicePublicTypes::SnapshotMaxTxId [GOOD] >> LongTxServicePublicTypes::SnapshotReadOnly [GOOD] >> LongTxServicePublicTypes::LongTxId [GOOD] |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |41.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |41.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_topic_v1.{pb.h ... grpc.pb.h} |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/provider/ut/pushdown/pushdown_ut.cpp |41.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut >> ydb-tests-functional-query_cache::import_test [GOOD] |41.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |41.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_convert/import_test >> result_convert::import_test [GOOD] >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] >> conftest.py::black [GOOD] >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention |41.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/import_test >> ydb-tests-functional-query_cache::import_test [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/py3test >> integrations_test.py::test_read_jtest_with_one_result [GOOD] Test command err: /home/runner/.ya/build/build_root/6fwh/0008be/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:26: PytestCollectionWarning: cannot collect test class 'TestCase' because it has a __init__ constructor (from: integrations_test.py) /home/runner/.ya/build/build_root/6fwh/0008be/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:20: PytestCollectionWarning: cannot collect test class 'TestState' because it has a __init__ constructor (from: integrations_test.py) |41.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |41.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/long_tx_service/public/ut/unittest >> LongTxServicePublicTypes::LongTxId [GOOD] |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a >> ydb-tests-functional-tenants::import_test [GOOD] |41.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] |41.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] >> ydb_recipe::import_test [GOOD] |41.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |41.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/import_test >> ydb-tests-functional-tenants::import_test [GOOD] >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] |41.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |41.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/ydb_recipe/import_test >> ydb_recipe::import_test [GOOD] |41.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |41.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |41.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |41.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/json_parser_ut.cpp ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/mvp/core/ut/unittest >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] Test command err: 2024-11-21T17:41:36.497207Z :MVP DEBUG: Refreshing token metadataTokenName 2024-11-21T17:41:36.497276Z :MVP DEBUG: Updating metadata token 2024-11-21T17:41:36.500119Z :MVP DEBUG: Refreshing token metadataTokenName 2024-11-21T17:41:36.500156Z :MVP DEBUG: Updating metadata token 2024-11-21T17:41:41.500289Z :MVP DEBUG: Refreshing token metadataTokenName 2024-11-21T17:41:41.500363Z :MVP DEBUG: Updating metadata token |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/json_filter_ut.cpp |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp >> TCowBTreeTest::Concurrent [GOOD] >> TCowBTreeTest::Alignment [GOOD] |41.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut >> Mvp::OpenIdConnectRequestWithIamTokenYandex [GOOD] >> Mvp::OpenIdConnectRequestWithIamTokenNebius [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodYandex [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieYandex [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieNebius [GOOD] >> Mvp::OpenIdConnectProxyOnHttpsHost >> Mvp::OpenIdConnectProxyOnHttpsHost [GOOD] >> Mvp::OpenIdConnectFixLocationHeader [GOOD] >> Mvp::OpenIdConnectExchangeNebius [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TCowBTreeTest::Alignment [GOOD] Test command err: Producer 0 worked for 0.2988933339 seconds Producer 1 worked for 0.2651438849 seconds Consumer 0 worked for 0.769532835 seconds on a snapshot of size 80000 Consumer 1 worked for 1.27450778 seconds on a snapshot of size 160000 Consumer 2 worked for 1.515787208 seconds on a snapshot of size 240000 Consumer 3 worked for 1.902293203 seconds on a snapshot of size 320000 Consumers had 11999932 successful seeks |41.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans >> Mvp::OpenIdConnectSessionServiceCheckAuthorizationFail [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlow >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAuthorizationFail [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalid [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalidAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateOpenIdScopeMissed [GOOD] >> Mvp::OpenIdConnectAllowedHostsList |41.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |41.5%| [DL] $(B)/canondata_storage/1936842/5d467f81c7a480afb7bd5525dfdd3f1e79630494/resource.tar.gz{, .log} |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a >> Mvp::OpenIdConnectAllowedHostsList [GOOD] >> Mvp::OpenIdConnectHandleNullResponseFromProtectedResource [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateNotFoundCookie [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateGetWrongStateAndWrongCookie [GOOD] |41.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |41.6%| [DL] $(B)/canondata_storage/1937424/6ef1f652a39663221b6531b56b64227e1ee24197/resource.tar.gz{, .log} |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |41.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.a >> ydb-tests-fq-common::import_test [GOOD] >> test.py::py2_flake8 [GOOD] |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.a |41.6%| [DL] $(B)/canondata_storage/1942415/2b3301623e3d03377711deea0f47238c2650379b/resource.tar.gz{, .log} |41.6%| [DL] $(B)/canondata_storage/1130705/91b310c0e1986f35aa28766f3cd6650d916966a7/resource.tar.gz{, .log} |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/mvp/oidc_proxy/ut/unittest >> Mvp::OpenIdConnectSessionServiceCreateGetWrongStateAndWrongCookie [GOOD] Test command err: 2024-11-21T17:41:42.508399Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.508481Z :MVP DEBUG: Incoming response for protected resource: 200 2024-11-21T17:41:42.513688Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.513736Z :MVP DEBUG: Incoming response for protected resource: 200 2024-11-21T17:41:42.515287Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.515337Z :MVP DEBUG: Incoming response for protected resource: 204 2024-11-21T17:41:42.516989Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.517022Z :MVP DEBUG: Incoming response for protected resource: 204 2024-11-21T17:41:42.519134Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.519214Z :MVP DEBUG: Incoming response for protected resource: 204 2024-11-21T17:41:42.520625Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.520651Z :MVP DEBUG: Incoming response for protected resource: 204 2024-11-21T17:41:42.527561Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T17:41:42.527589Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.527790Z :MVP DEBUG: Incoming response for protected resource: 400 2024-11-21T17:41:42.527799Z :MVP DEBUG: Try to send request to HTTPS port 2024-11-21T17:41:42.527803Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.527840Z :MVP DEBUG: Incoming response for protected resource: 200 2024-11-21T17:41:42.529723Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T17:41:42.529827Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.529873Z :MVP DEBUG: Incoming response for protected resource: 400 2024-11-21T17:41:42.541467Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T17:41:42.541495Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.541536Z :MVP DEBUG: Incoming response for protected resource: 307 2024-11-21T17:41:42.543251Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T17:41:42.543272Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.543302Z :MVP DEBUG: Incoming response for protected resource: 302 2024-11-21T17:41:42.544941Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T17:41:42.544957Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.544983Z :MVP DEBUG: Incoming response for protected resource: 302 2024-11-21T17:41:42.546685Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T17:41:42.546700Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.546726Z :MVP DEBUG: Incoming response for protected resource: 302 2024-11-21T17:41:42.548118Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T17:41:42.548132Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.548166Z :MVP DEBUG: Incoming response for protected resource: 302 2024-11-21T17:41:42.561282Z :MVP DEBUG: Start OIDC process 2024-11-21T17:41:42.562023Z :MVP DEBUG: Using session cookie (__Host_session_cookie_79632E6F617574682E7964622D766965776572: c2Vz****aWU= (CE0CB168)) 2024-11-21T17:41:42.562032Z :MVP DEBUG: Exchange session token 2024-11-21T17:41:42.562072Z :MVP DEBUG: Getting access token: 200 2024-11-21T17:41:42.562088Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.562109Z :MVP DEBUG: Incoming response for protected resource: 200 2024-11-21T17:41:42.580740Z :MVP DEBUG: SessionService.Check(): 401 2024-11-21T17:41:42.612254Z :MVP DEBUG: SessionService.Check(): 400 2024-11-21T17:41:42.612471Z :MVP DEBUG: Incoming response from authorization server: 200 2024-11-21T17:41:42.620254Z :MVP DEBUG: SessionService.Create(): OK 2024-11-21T17:41:42.623201Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T17:41:42.623223Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.623265Z :MVP DEBUG: Incoming response for protected resource: 200 2024-11-21T17:41:42.638528Z :MVP DEBUG: SessionService.Check(): 400 2024-11-21T17:41:42.638738Z :MVP DEBUG: Incoming response from authorization server: 200 2024-11-21T17:41:42.640245Z :MVP DEBUG: SessionService.Create(): OK 2024-11-21T17:41:42.641724Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T17:41:42.641739Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.641771Z :MVP DEBUG: Incoming response for protected resource: 200 2024-11-21T17:41:42.647702Z :MVP DEBUG: Check state failed: Calculated digest is not equal expected digest 2024-11-21T17:41:42.663631Z :MVP DEBUG: Check state failed: Calculated digest is not equal expected digest 2024-11-21T17:41:42.668270Z :MVP DEBUG: Incoming response from authorization server: 200 2024-11-21T17:41:42.672279Z :MVP DEBUG: SessionService.Create(): 401 2024-11-21T17:41:42.677159Z :MVP DEBUG: Incoming response from authorization server: 200 2024-11-21T17:41:42.679037Z :MVP DEBUG: SessionService.Create(): 400 2024-11-21T17:41:42.684037Z :MVP DEBUG: Incoming response from authorization server: 200 2024-11-21T17:41:42.686011Z :MVP DEBUG: SessionService.Create(): 400 2024-11-21T17:41:42.692261Z :MVP DEBUG: Incoming response from authorization server: 200 2024-11-21T17:41:42.697423Z :MVP DEBUG: SessionService.Create(): 412 2024-11-21T17:41:42.717790Z :MVP DEBUG: SessionService.Check(): 400 2024-11-21T17:41:42.725426Z :MVP DEBUG: SessionService.Check(): 400 2024-11-21T17:41:42.727947Z :MVP DEBUG: SessionService.Check(): 400 2024-11-21T17:41:42.734746Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T17:41:42.734789Z :MVP DEBUG: Can not process request to protected resource: GET /counters HTTP/1.1 Host: ydb.viewer.page Accept: */* Accept-Encoding: deflate Authorization: 2024-11-21T17:41:42.737815Z :MVP DEBUG: Restore oidc context failed: Cannot find cookie ydb_oidc_cookie 2024-11-21T17:41:42.742918Z :MVP DEBUG: Check state failed: Calculated digest is not equal expected digest |41.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/import_test >> ydb-tests-fq-common::import_test [GOOD] |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.a |41.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |41.6%| [DL] $(B)/canondata_storage/1936947/a5f83e5d38179c14126d53519dc062cef98113ec/resource.tar.gz{, .log} |41.6%| [DL] $(B)/canondata_storage/1942173/d348e22e98330a9e377a94d0f46bb524828381c2/resource.tar.gz{, .log} |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |41.6%| [DL] $(B)/canondata_storage/1031349/11dbfe14d756754bc2571a76f87cd1781da1b241/resource.tar.gz{, .log} |41.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |41.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/ut/ydb-core-base-ut |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |41.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |41.7%| [DL] $(B)/canondata_storage/1923547/cc66e2ee27834e5a88500b4e8ce11e850cd2de16/resource.tar.gz{, .log} |41.6%| [DL] $(B)/canondata_storage/1936273/9f3848d6f996b16a08afdfa5e23ec58b8aab3b97/resource.tar.gz{, .log} |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |41.7%| [DL] $(B)/canondata_storage/1889210/485a27a6615ffb9228e2aa44a9e60526928bedc1/resource.tar.gz{, .log} |41.7%| [DL] $(B)/canondata_storage/1784117/b9574b141cedb261a8563a97d9a372d9277da2ce/resource.tar.gz{, .log} |41.7%| [DL] $(B)/canondata_storage/1942100/dea54d4f81130d9797cdfcdac410831269e46559/resource.tar.gz{, .log} >> ydb_serializable::import_test [GOOD] |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |41.7%| [DL] $(B)/canondata_storage/1924537/c187e1509ff19817db0786b643e77f46ea364ba0/resource.tar.gz{, .log} |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/terminate_policy/libudf-service-terminate_policy.global.a |41.6%| [DL] $(B)/canondata_storage/1689644/d939c79f1c25569f7b8f4e5b740e070ad72d7ad7/resource.tar.gz{, .log} |41.6%| [DL] $(B)/canondata_storage/1946324/a73667b195068cad6a1c7af344e8899b2a9f8586/resource.tar.gz{, .log} |41.6%| [DL] $(B)/canondata_storage/1937429/b87108417827dee5e78de8f3f8c67e6b30765fd1/resource.tar.gz{, .log} >> ydb-tests-library-ut::import_test [GOOD] >> nemesis::import_test [GOOD] >> ydb-tests-functional-large_serializable::import_test [GOOD] |41.7%| [DL] $(B)/canondata_storage/1600758/7c4439739defd21f99d1592092fc7cbb0e2c282d/resource.tar.gz{, .log} |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |41.7%| [DL] $(B)/canondata_storage/1871182/f81e0439a02cfc84ec46562f3fada5312be3e21d/resource.tar.gz{, .log} |41.8%| [DL] $(B)/canondata_storage/1847551/cea98224a0242fa122932bfd335599c5107ce35b/resource.tar.gz{, .log} |41.8%| [DL] $(B)/canondata_storage/1936842/34f8feb0276ff4ef51cbeb94b6e56bd74f191048/resource.tar.gz{, .log} |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |41.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/import_test >> ydb-tests-library-ut::import_test [GOOD] |41.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/import_test >> ydb_serializable::import_test [GOOD] |41.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/driver/import_test >> nemesis::import_test [GOOD] |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |41.8%| [DL] $(B)/canondata_storage/1942173/50b4ae48e906d86b27ee0b68ed5a08b5ad6bf50e/resource.tar.gz{, .log} |41.8%| [DL] $(B)/canondata_storage/1931696/cc756dc950b218e9f3589a791267d21773207f44/resource.tar.gz{, .log} |41.8%| [DL] $(B)/canondata_storage/1937001/6a20500553ba8b1dbf218cdb9db234c852b93f8e/resource.tar.gz{, .log} |41.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/import_test >> ydb-tests-functional-large_serializable::import_test [GOOD] |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |41.9%| [DL] $(B)/canondata_storage/1936997/4d6442f2c45d3e77a2ced29c096325d3ced197d8/resource.tar.gz{, .log} |41.9%| [DL] $(B)/canondata_storage/1871102/c16b260c9474b6209b41c05f68145ad16f292a86/resource.tar.gz{, .log} |41.9%| [DL] $(B)/canondata_storage/1880306/234eadcde1cd54bffae64f4516628981e02b093d/resource.tar.gz{, .log} |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a >> ydb-tests-functional-wardens::import_test [GOOD] |41.9%| [DL] $(B)/canondata_storage/1775319/f708187e40ffeec8e975cd1bd21f8ca26d85dbd7/resource.tar.gz{, .log} |41.9%| [DL] $(B)/canondata_storage/1871002/e0ca733858945e7ec95821f93c3af63825d4d919/resource.tar.gz{, .log} |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |41.9%| [DL] $(B)/canondata_storage/1924537/23db2f3171675edbfb7d81888413e9e5893c1dcb/resource.tar.gz{, .log} |41.9%| [DL] $(B)/canondata_storage/1031349/cff576920ea1bdea444026e6597e9d9719a47154/resource.tar.gz{, .log} |41.9%| [DL] $(B)/canondata_storage/1599023/84d6e9c08e0a2fb91653fb36e754c1950d7f4a7e/resource.tar.gz{, .log} |41.9%| [DL] $(B)/canondata_storage/1903885/79c2973401eb1c01d914beb88eb4f2fdf68caaee/resource.tar.gz{, .log} |41.9%| [DL] $(B)/canondata_storage/1773845/1e4cdc9374a98062b8e39a6ad511b5fc378113ba/resource.tar.gz{, .log} |41.9%| [DL] $(B)/canondata_storage/1923547/673d4b5ed96219bc5abbb4d4204d1361da772ae8/resource.tar.gz{, .log} |41.9%| [DL] $(B)/canondata_storage/1937492/e9b42bd48624d6b2ad306186fefd6a9293482be2/resource.tar.gz{, .log} |41.9%| [DL] $(B)/canondata_storage/1814674/8ecd58672b8e77093dcc9d63519f6e20b8155e91/resource.tar.gz{, .log} |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |42.0%| [DL] $(B)/canondata_storage/1031349/596c297595e75709124ce2ef96947a7ecc9a2056/resource.tar.gz{, .log} |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |42.0%| [DL] $(B)/canondata_storage/212715/01c0c681be6c56b02f31f87454cd0dd3cd0e4ade/resource.tar.gz{, .log} |42.0%| [DL] $(B)/canondata_storage/1942278/37f44c727aef72a7e55462a303dd42938366b6c2/resource.tar.gz{, .log} |42.0%| [DL] $(B)/canondata_storage/1937001/79c03f49d007d946fea55dca11f80af0a81dd047/resource.tar.gz{, .log} |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |42.0%| [DL] $(B)/canondata_storage/1777230/9ab8710baf20ae69a72eb29447a9d2bf1039585c/resource.tar.gz{, .log} |42.0%| [DL] $(B)/canondata_storage/1784826/cbf6ad4c227ab017bb5ebc2f4ab5719247fa9785/resource.tar.gz{, .log} |42.0%| [DL] $(B)/canondata_storage/1847551/e0a8e24122315ced755797fc5a2fd65992e28ce2/resource.tar.gz{, .log} |42.0%| [DL] $(B)/canondata_storage/1937424/6348070f1b3d5f51aed3ecef47d584233aafa986/resource.tar.gz{, .log} |42.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/import_test >> ydb-tests-functional-wardens::import_test [GOOD] |42.0%| [DL] $(B)/canondata_storage/1942671/586e1cad89b59a85e10d70e6019aeefccc0f0382/resource.tar.gz{, .log} |42.0%| [DL] $(B)/canondata_storage/1937367/a0981807726fa8e5aad90985bda23ee6596b1473/resource.tar.gz{, .log} |42.0%| [DL] $(B)/canondata_storage/1847551/2505f7fa026ee9e2d5013e7854c2b1b29ddac476/resource.tar.gz{, .log} >> ydb-tests-functional-sqs-cloud::import_test [GOOD] >> ydbd_slice::import_test [GOOD] >> gen-report.py::flake8 [GOOD] |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |42.1%| [DL] $(B)/canondata_storage/1777230/915011f8f5c826c23aaf0fd2e67aa8d2fb1f93cd/resource.tar.gz{, .log} |42.1%| [DL] $(B)/canondata_storage/1942100/9017d73811f974261c96a9a333f3aaff5abeafff/resource.tar.gz{, .log} |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |42.1%| [DL] $(B)/canondata_storage/1937150/8a03b22cb41a5d45a74b6bace2f08e86727532d3/resource.tar.gz{, .log} |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |42.1%| [DL] $(B)/canondata_storage/1130705/02b49b8f7de27e2bd653274fbb3967717e0d56f2/resource.tar.gz{, .log} |42.1%| [DL] $(B)/canondata_storage/1871102/62570278011b2c51fb3ba23cde15a7bb184e27c4/resource.tar.gz{, .log} >> ydb-tests-functional-blobstorage::import_test [GOOD] |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |42.1%| [DL] $(B)/canondata_storage/1931696/b4c3ab5b0044d3419bb02eb27807ba3b9627f831/resource.tar.gz{, .log} |42.1%| [DL] $(B)/canondata_storage/1031349/12c4584507630678ad646e234c8e75078785de4d/resource.tar.gz{, .log} |42.1%| [DL] $(B)/canondata_storage/1924537/86b5be80156a54ea70bd90adf255e133305e5fa4/resource.tar.gz{, .log} |42.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/import_test >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |42.1%| [DL] $(B)/canondata_storage/1031349/e4ef3d587c2530bf47672a5783f4b0d0a4b560fa/resource.tar.gz{, .log} |42.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/ydbd_slice/bin/import_test >> ydbd_slice::import_test [GOOD] |42.1%| [DL] $(B)/canondata_storage/1942100/f94ab3eb2009e356ba2cba2e6a416914ebfc9469/resource.tar.gz{, .log} |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |42.2%| [DL] $(B)/canondata_storage/1809005/e8a59d866b0d94fc2277cc98140dae6c5e6c1510/resource.tar.gz{, .log} |42.2%| [DL] $(B)/canondata_storage/1937492/eae233b11f0c715bdde5a31914dc1e293f0a9fbe/resource.tar.gz{, .log} |42.2%| [DL] $(B)/canondata_storage/1917492/cae80a2ca59a3c25b589fb2f7fdc5fafe6beed6a/resource.tar.gz{, .log} |42.1%| [DL] $(B)/canondata_storage/1942671/2b244e8340f9afab99b1136c03a3466d54265dea/resource.tar.gz{, .log} |42.2%| [DL] $(B)/canondata_storage/1871182/ddbad7d2ae0c078ae93ebccd9d41a0f24a8479bc/resource.tar.gz{, .log} |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |42.2%| [DL] $(B)/canondata_storage/1871002/243e3192fd5358940dd64b70c1fba15f8aaa24b7/resource.tar.gz{, .log} |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_sample_k.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |42.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_convert/flake8 >> gen-report.py::flake8 [GOOD] |42.2%| [DL] $(B)/canondata_storage/1942415/b6b41eb77627490bfce387dccb1eea7766e2bb71/resource.tar.gz{, .log} |42.2%| [DL] $(B)/canondata_storage/1775319/26ad9e02e51a11466b2f499d792316509ca07d30/resource.tar.gz{, .log} |42.2%| [DL] $(B)/canondata_storage/1775059/79f40817d9be6347f8a0a937bdd3c46c326ab7d3/resource.tar.gz{, .log} |42.2%| [DL] $(B)/canondata_storage/1809005/91364d63a5af40ff53018c04d9aede4888eea14e/resource.tar.gz{, .log} |42.2%| [DL] $(B)/canondata_storage/1889210/86d0f0a9f5fd231dca8140f5809c568e15366735/resource.tar.gz{, .log} |42.2%| [DL] $(B)/canondata_storage/1889210/1220fbc43e6c9913dd69b912a91a021b32a209aa/resource.tar.gz{, .log} |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |42.2%| [DL] $(B)/canondata_storage/1777230/f087d19aefc64f43c561b1716c8824e128ac8093/resource.tar.gz{, .log} |42.2%| [DL] $(B)/canondata_storage/1925842/3bbdcdb1d64d89357b8a4a5a80903c46df42d63e/resource.tar.gz{, .log} |42.3%| [DL] $(B)/canondata_storage/1881367/db84cf65a0fe23688d717b9be3cef15f9249c865/resource.tar.gz{, .log} |42.3%| [DL] $(B)/canondata_storage/1924537/4c8ab803df15749c76bd45c30e057cec19cf79d5/resource.tar.gz{, .log} |42.3%| [DL] $(B)/canondata_storage/1773845/6676eb441f225906913d6af3ed308493a06ab168/resource.tar.gz{, .log} |42.3%| [DL] $(B)/canondata_storage/1900335/94df111ee1e3cf59de6d62f855e8e85690405b51/resource.tar.gz{, .log} |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |42.3%| [DL] $(B)/canondata_storage/1937027/3adc3df76c101683c5032deee2bcc54230c6a1b0/resource.tar.gz{, .log} |42.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/import_test >> ydb-tests-functional-blobstorage::import_test [GOOD] |42.3%| [DL] $(B)/canondata_storage/1942525/253d2e760c7825837b0d53337823dd5ebf4d6d89/resource.tar.gz{, .log} |42.3%| [DL] $(B)/canondata_storage/1903885/6dbb180a1fb0bec551465ca4925bd6f374b599c7/resource.tar.gz{, .log} |42.3%| [DL] $(B)/canondata_storage/1936273/313b77ac54cb289ac0c886126fe9dfbb6b4d0cf6/resource.tar.gz{, .log} |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |42.3%| [DL] $(B)/canondata_storage/1936842/636e5b20baf2cf59d9f38821f96eeb0a152b1897/resource.tar.gz{, .log} |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/gateway/file/libyt-gateway-file.a >> ydb-tests-functional-ydb_cli::import_test [GOOD] |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |42.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |42.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun >> ydb-tests-functional-sqs-multinode::import_test [GOOD] |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |42.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/import_test >> ydb-tests-functional-ydb_cli::import_test [GOOD] |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp >> ydb-tests-functional-kv_workload::import_test [GOOD] |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |42.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/import_test >> ydb-tests-functional-sqs-multinode::import_test [GOOD] |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.a |42.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part5/ydb-library-yql-tests-sql-hybrid_file-part5 >> ydb-tests-functional-scheme_tests::import_test [GOOD] |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |42.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kv_workload/import_test >> ydb-tests-functional-kv_workload::import_test [GOOD] |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |42.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/import_test >> ydb-tests-functional-scheme_tests::import_test [GOOD] |42.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part12/ydb-library-yql-tests-sql-dq_file-part12 |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.a |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.a |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp >> TBlobStorageCryptoRope::TestEqualMixedStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestMixedStreamCypher |42.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |42.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut >> TStateStorageConfig::TestReplicaSelection >> TBlobStorageCryptoRope::TestMixedStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestOffsetStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestInplaceStreamCypher >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_DifferentHosts [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_ExternalConsumption_OneHost [GOOD] >> Path::CanonizedStringIsSame1 [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.a |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp >> TableIndex::CompatibleSecondaryIndex [GOOD] >> TMemoryStatsAggregator::Aggregate_Single [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_ExternalConsumption_DifferentHosts [GOOD] >> Path::CanonizedStringIsSame2 [GOOD] >> Path::Name_ExtraSymbols [GOOD] >> TMemoryStatsAggregator::Aggregate_ExternalConsumption_CollidingHosts [GOOD] >> TLogoBlobTest::LogoBlobSort [GOOD] >> TMemoryStatsAggregator::Aggregate_Empty [GOOD] >> Path::Name_AllSymbols [GOOD] >> TableIndex::NotCompatibleSecondaryIndex [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_OneHost [GOOD] >> Path::Name_EnglishAlphabet [GOOD] >> Path::CanonizeFast [GOOD] >> TBlobStorageGroupTypeTest::TestCorrectLayout [GOOD] >> TBlobStorageCryptoRope::TestInplaceStreamCypher [GOOD] >> TGuardianImpl::FollowerTracker [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentTabletId [GOOD] >> ydb-tests-tools-nemesis-ut::import_test [GOOD] >> TableIndex::CompatibleVectorIndex [GOOD] >> Path::Name_RussianAlphabet_SetLocale_C [GOOD] >> TGuardianImpl::FollowerTrackerDuplicates [GOOD] |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a >> TLocalDbTest::BackupTaskNameChangedAtLoadTime [GOOD] >> Path::CanonizeOld [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp >> TBlobStorageCryptoRope::PerfTestStreamCypher [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentSteps [GOOD] >> TableIndex::NotCompatibleVectorIndex [GOOD] >> Path::Name_RussianAlphabet [GOOD] >> TBlobStorageCryptoRope::UnalignedTestStreamCypher [GOOD] >> TLogoBlobTest::LogoBlobParse [GOOD] >> TBlobStorageGroupTypeTest::OutputInfoAboutErasureSpecies [GOOD] >> TLogoBlobIdHashTest::SimpleTest [GOOD] >> TChaCha::KeystreamTest1 [GOOD] >> Path::Name_RussianAlphabet_SetLocale_C_UTF8 [GOOD] >> TLogoBlobTest::LogoBlobCompare [GOOD] >> TLogoBlobIdHashTest::SimpleTestPartIdDoesNotMatter [GOOD] >> Path::Name_WeirdLocale_RegularName [GOOD] >> TChaCha::KeystreamTest2 [GOOD] >> Path::Name_WeirdLocale_WeirdName [GOOD] >> functional-sqs-merge_split_common_table-std::import_test [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.a |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a >> TLogoBlobIdHashTest::SimpleTestBlobSizeDoesNotMatter [GOOD] >> TChaCha::KeystreamTest3 [GOOD] >> ydb-tests-postgres_integrations-go-libpq::import_test [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentChannel [GOOD] >> TChaCha::KeystreamTest4 [GOOD] >> TChaCha::KeystreamTest5 [GOOD] >> ydb-tests-tools-kqprun-tests::import_test [GOOD] >> TChaCha::KeystreamTest6 [GOOD] >> TChaCha::KeystreamTest7 [GOOD] >> TChaCha::KeystreamTest8 [GOOD] >> TChaCha::MultiEncipherOneDecipher [GOOD] >> TChaCha::SecondBlock [GOOD] >> TChaCha512::KeystreamTest1 [GOOD] >> TChaCha512::KeystreamTest2 [GOOD] >> TChaCha512::KeystreamTest3 [GOOD] >> TChaCha512::KeystreamTest4 [GOOD] |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.a |42.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TableIndex::NotCompatibleVectorIndex [GOOD] |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |42.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TLocalDbTest::BackupTaskNameChangedAtLoadTime [GOOD] |42.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/import_test >> ydb-tests-tools-nemesis-ut::import_test [GOOD] |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/utils/actors/http_sender_actor_ut.cpp >> TChaCha512::KeystreamTest5 [GOOD] >> TChaCha512::KeystreamTest6 [GOOD] >> TChaCha512::KeystreamTest7 [GOOD] >> TChaCha512::KeystreamTest8 [GOOD] >> TChaCha512::MultiEncipherOneDecipher [GOOD] >> TChaCha512::SecondBlock [GOOD] >> TChaCha512::CompatibilityTest >> TChaCha512::CompatibilityTest [GOOD] >> TChaChaVec::KeystreamTest1 [GOOD] >> TChaChaVec::KeystreamTest2 [GOOD] >> TChaChaVec::KeystreamTest3 [GOOD] >> TChaChaVec::KeystreamTest4 [GOOD] >> TChaChaVec::KeystreamTest5 [GOOD] >> TChaChaVec::KeystreamTest6 [GOOD] >> TChaChaVec::KeystreamTest7 [GOOD] >> TChaChaVec::KeystreamTest8 [GOOD] >> TChaChaVec::MultiEncipherOneDecipher [GOOD] >> TChaChaVec::SecondBlock [GOOD] >> TChaChaVec::CompatibilityTest |42.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_OneHost [GOOD] Test command err: AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 ExternalConsumption: 306 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 80 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 |42.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TLogoBlobTest::LogoBlobCompare [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TMemoryStatsAggregator::Aggregate_Empty [GOOD] Test command err: AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 65 MemAvailable: 85 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 145 SoftLimit: 165 TargetUtilization: 185 ExternalConsumption: 194 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 |42.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> Path::Name_RussianAlphabet [GOOD] |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |42.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> Path::CanonizeOld [GOOD] |42.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TLogoBlobIdHashTest::SimpleTestWithDifferentChannel [GOOD] |42.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> Path::Name_WeirdLocale_WeirdName [GOOD] |42.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/import_test >> functional-sqs-merge_split_common_table-std::import_test [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention [GOOD] >> TLogPriorityMuteTests::MuteUntilTest [GOOD] >> TLogPriorityMuteTests::AtomicMuteUntilTest [GOOD] >> TLogPriorityMuteTests::UnmuteTest [GOOD] >> TLogPriorityMuteTests::AtomicUnmuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteDurationTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteDurationTest [GOOD] >> TOneOneQueueTests::TestSimpleEnqueueDequeue [GOOD] >> TOneOneQueueTests::CleanInDestructor [GOOD] >> TOneOneQueueTests::ReadIterator [GOOD] >> TPageMapTest::TestResize [GOOD] >> TPageMapTest::TestRandom >> TChaChaVec::CompatibilityTest [GOOD] >> TPoly1305::TestVector1 [GOOD] >> TPoly1305::TestVector2 [GOOD] >> TPoly1305::TestVector3 [GOOD] >> TPoly1305::TestVector4 [GOOD] >> TPoly1305Vec::TestVector1 [GOOD] >> TPoly1305Vec::TestVector2 [GOOD] >> TPoly1305Vec::TestVector3 [GOOD] >> TPoly1305Vec::TestVector4 [GOOD] >> TTest_t1ha::TestZeroInputHashIsNotZero [GOOD] >> TTest_t1ha::PerfTest [GOOD] >> TTest_t1ha::T1haHashResultsStablilityTest [GOOD] >> TStateStorageConfig::TestReplicaSelection [GOOD] >> TStateStorageConfig::TestMultiReplicaFailDomains |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/libpy3connector-api-common.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |42.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/import_test >> ydb-tests-postgres_integrations-go-libpq::import_test [GOOD] |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/libpy3connector-api-common.global.a |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |42.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.a |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |42.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/import_test >> ydb-tests-tools-kqprun-tests::import_test [GOOD] |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |42.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |42.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |42.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |42.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/crypto/ut/unittest >> TTest_t1ha::T1haHashResultsStablilityTest [GOOD] |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |42.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/helpers_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp >> conftest.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] >> ydb-tests-functional-restarts::import_test [GOOD] |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp >> TStateStorageConfig::TestMultiReplicaFailDomains [GOOD] >> TStateStorageConfig::TestReplicaSelectionUniqueCombinations |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] >> ydb-tests-functional-audit::import_test [GOOD] >> ydb-tests-functional-compatibility::import_test [GOOD] >> TBlobStorageCompStrat::Test1 |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |42.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |42.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |42.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/import_test >> ydb-tests-functional-restarts::import_test [GOOD] >> TBlobStorageCompStrat::Test1 [GOOD] |42.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |42.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp >> ydb-tests-fq-mem_alloc::import_test [GOOD] >> ydb-tests-functional-hive::import_test [GOOD] >> TPageMapTest::TestRandom [GOOD] >> TPageMapTest::TestIntrusive [GOOD] >> TPageMapTest::TestSimplePointer [GOOD] >> TPageMapTest::TestSharedPointer [GOOD] >> TPageMapTest::TestSimplePointerFull [GOOD] >> TPriorityOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> ydb-tests-functional-serverless::import_test [GOOD] |42.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/compatibility/import_test >> ydb-tests-functional-compatibility::import_test [GOOD] |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |42.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/import_test >> ydb-tests-functional-audit::import_test [GOOD] |42.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |43.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |43.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp >> ErasureBrandNew::Block42_restore [GOOD] >> ErasureBrandNew::Block42_restore_benchmark >> TErasurePerfTest::Restore [GOOD] >> TErasureSmallBlobSizePerfTest::StringErasureMode [GOOD] >> TErasureSmallBlobSizePerfTest::ConvertToRopeMode [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |43.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/import_test >> ydb-tests-fq-mem_alloc::import_test [GOOD] |43.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TBlobStorageCompStrat::Test1 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |43.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/import_test >> ydb-tests-functional-hive::import_test [GOOD] >> ydb-tests-postgres_integrations-library-ut::import_test [GOOD] >> ydb-tests-fq-http_api::import_test [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |43.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/import_test >> ydb-tests-functional-serverless::import_test [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |43.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/util/ut/unittest >> TPriorityOperationQueueTest::ShouldNotStartUntilStart [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ut_helpers.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |43.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut_perf/unittest >> TErasureSmallBlobSizePerfTest::ConvertToRopeMode [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] |43.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/import_test >> ydb-tests-postgres_integrations-library-ut::import_test [GOOD] |43.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |43.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/import_test >> ydb-tests-fq-http_api::import_test [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp >> kqprun_recipe::import_test [GOOD] >> test.py::py2_flake8 [GOOD] >> ydb-tests-functional-api::import_test [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_id_dict_ut.cpp |43.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/import_test >> kqprun_recipe::import_test [GOOD] |43.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |43.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/import_test >> ydb-tests-functional-api::import_test [GOOD] |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/utils/actors/ut/ydb-library-yql-utils-actors-ut |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a >> local_ydb::import_test [GOOD] >> test_disk.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |43.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |43.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.a >> __main__.py::flake8 [GOOD] |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] [GOOD] |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.a >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |43.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/local_ydb/import_test >> local_ydb::import_test [GOOD] |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a >> TTimeGridTest::TimeGrid [GOOD] >> TStreamRequestUnitsCalculatorTest::Basic [GOOD] >> ydb-tests-functional-rename::import_test [GOOD] |43.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/cfg/bin/flake8 >> __main__.py::flake8 [GOOD] |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.a |43.3%| [TA] $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |43.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/local_ydb/flake8 >> __main__.py::flake8 [GOOD] |43.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |43.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/ydb_recipe/flake8 >> __main__.py::flake8 [GOOD] |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.a >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> ydb-tests-fq-yds::import_test [GOOD] >> test.py::py2_flake8 [GOOD] |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.a >> ClosedIntervalSet::Union |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.a |43.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/import_test >> ydb-tests-functional-rename::import_test [GOOD] |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] >> TStateStorageConfig::TestReplicaSelectionUniqueCombinations [GOOD] >> TStateStorageConfig::UniformityTest |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.a |43.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/import_test >> ydb-tests-fq-yds::import_test [GOOD] |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.a |43.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/metering/ut/unittest >> TStreamRequestUnitsCalculatorTest::Basic [GOOD] >> replay::import_test [GOOD] >> ydb-tests-fq-restarts::import_test [GOOD] >> test_serializable.py::flake8 [GOOD] |43.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/tools/simple_json_diff/flake8 >> __main__.py::flake8 [GOOD] |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.a |43.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a >> ClosedIntervalSet::Union [GOOD] >> ClosedIntervalSet::Difference >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] |43.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/replay/import_test >> replay::import_test [GOOD] |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |43.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/import_test >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] |43.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/import_test >> ydb-tests-fq-restarts::import_test [GOOD] |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a >> test_kqprun_recipe.py::flake8 [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |43.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/flake8 >> test_serializable.py::flake8 [GOOD] |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |43.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |43.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/_dc9abab7075b555a3ef54c0d31.yasm >> test.py::py2_flake8 [GOOD] >> ydb-tests-functional-serializable::import_test [GOOD] >> test_transform.py::flake8 [GOOD] |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/type_codecs_ut.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/microseconds_sliding_window_ut.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |43.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |43.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/dc9abab7075b555a3ef54c0d31.auxcpp >> runner.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |43.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |43.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/import_test >> ydb-tests-functional-serializable::import_test [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |43.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |43.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/flake8 >> test_transform.py::flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |43.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/runner/flake8 >> runner.py::flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp >> ErasureBrandNew::Block42_restore_benchmark [GOOD] >> ydb-tests-functional-sqs-common::import_test [GOOD] >> __main__.py::flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |43.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |43.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |43.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |43.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |43.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/import_test >> ydb-tests-functional-sqs-common::import_test [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |43.6%| [DL] $(B)/canondata_storage/1784826/01a49f49738931cb8fe58f887acf08cd22b8816f/resource.tar.gz{, .log} |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp >> statistics_workload::import_test [GOOD] >> conftest.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> BufferWithGaps::Basic [GOOD] |43.6%| [DL] $(B)/canondata_storage/1936273/ad3717b195ca16459f341d5ab440a7fc0685a6bb/resource.tar.gz{, .log} |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp >> TBatchedVecTest::TestToStringInt [GOOD] |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp >> BufferWithGaps::IsReadable [GOOD] |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp >> PtrTest::Test1 [GOOD] |43.7%| [DL] $(B)/canondata_storage/1937367/24a3ed09a524cab36402a50f39546eeec677142d/resource.tar.gz{, .log} >> TBatchedVecTest::TestOutputTOutputType [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> ErasureBrandNew::Block42_restore_benchmark [GOOD] Test command err: totalSize# 498671692 period1# 1.191507s period2# 0.601519s MB/s1# 418.5218316 MB/s2# 829.0206826 factor# 1.980830198 |43.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |43.7%| [DL] $(B)/canondata_storage/1871002/e9746e2cfbb706bb72321a7abf9a224f0ef61b45/resource.tar.gz{, .log} |43.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/driver/flake8 >> __main__.py::flake8 [GOOD] |43.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |43.7%| [DL] $(B)/canondata_storage/1775319/2fe33d3feab8838bd96c496a2503fc8b7760e1af/resource.tar.gz{, .log} |43.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/statistics_workload/import_test >> statistics_workload::import_test [GOOD] |43.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |43.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] |43.7%| [DL] $(B)/canondata_storage/995452/8d06d1c638f81a03e22880d706b0ed36b13787b3/resource.tar.gz{, .log} |43.7%| [DL] $(B)/canondata_storage/1599023/fd394c006ab90839bd43e8a0999dbcda754af8eb/resource.tar.gz{, .log} |43.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |43.7%| [DL] $(B)/canondata_storage/1784826/5acf470cb57912e11b4cd6083ac398f4eb2ce3b5/resource.tar.gz{, .log} >> kikimr_config.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> ydb-dstool::import_test [GOOD] |43.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |43.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/bsconfig/bsconfig_ut.cpp |43.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |43.7%| [DL] $(B)/canondata_storage/1925821/e5366a80242cb3e6ad8a288604782493b360be86/resource.tar.gz{, .log} |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |43.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |43.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |43.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] |43.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/apps/dstool/import_test >> ydb-dstool::import_test [GOOD] |43.8%| [DL] $(B)/canondata_storage/1942671/2f792d15a60e5018e659b61b46adbe398e438ffb/resource.tar.gz{, .log} |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |43.8%| [DL] $(B)/canondata_storage/1942173/f8a5d34ee2135f3e8e692d721f4410199915185e/resource.tar.gz{, .log} |43.8%| [DL] $(B)/canondata_storage/1031349/5fede2b676e5759a71fc5ee84a5366ea2398c3eb/resource.tar.gz{, .log} |43.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/base/ut/gtest >> TBatchedVecTest::TestOutputTOutputType [GOOD] |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |43.7%| [DL] $(B)/canondata_storage/1936947/900ed4b07b3e497bdca6ea0063b227dc2b03c52d/resource.tar.gz{, .log} |43.8%| [DL] $(B)/canondata_storage/1775319/1b7c1e5298ad827e3c0e08d1d3f96ba4f42d8217/resource.tar.gz{, .log} |43.8%| [DL] $(B)/canondata_storage/937458/4bf72cbe06e1a5a68a1245f072f61a840c65f346/resource.tar.gz{, .log} |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |43.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |43.8%| [DL] $(B)/canondata_storage/1130705/0b2e5d7ac211de720dc0c4641c32a1cc0d2b67b0/resource.tar.gz{, .log} |43.8%| [DL] $(B)/canondata_storage/1916746/23de079a06c649cbc7ea9c207ee17f83d4a16a8d/resource.tar.gz{, .log} |43.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.8%| [DL] $(B)/canondata_storage/1937367/f0fc4d0046eeecd5aedc367d24e7c146f804556a/resource.tar.gz{, .log} |43.8%| [DL] $(B)/canondata_storage/1942100/00ac27cb3793ebb6e30aaa3a242eb80980877725/resource.tar.gz{, .log} >> test.py::py2_flake8 [GOOD] |43.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |43.8%| [DL] $(B)/canondata_storage/1600758/8cab8973f2ea39497a139c994f146f17f194bc88/resource.tar.gz{, .log} |43.8%| [DL] $(B)/canondata_storage/1847551/8a02f6b80ca1ec66d793b87dd2cd04bc727861e5/resource.tar.gz{, .log} |43.8%| [DL] $(B)/canondata_storage/1775059/dfb29f074743c819be85b8e41c2f4f4f5970e492/resource.tar.gz{, .log} |43.9%| [DL] $(B)/canondata_storage/1871182/6bfabc62940fdb174a55acbc4fb75e975f32df0b/resource.tar.gz{, .log} |43.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.9%| [DL] $(B)/canondata_storage/1900335/be004cdbc67866ce1cb15f7c85503d8962b948a4/resource.tar.gz{, .log} |43.9%| [DL] $(B)/canondata_storage/1937001/2391ee3b82c774fbfdf3b5fe09aeba01826624f7/resource.tar.gz{, .log} |43.9%| [DL] $(B)/canondata_storage/1809005/381bdd936adada83f5b48f5d53fe44e3adc4ea7d/resource.tar.gz{, .log} >> ydb-tests-functional-sqs-messaging::import_test [GOOD] |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |43.9%| [DL] $(B)/canondata_storage/995452/78b37afc56674ddcf0dda847c991261f95c763f2/resource.tar.gz{, .log} |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/mkql_proto/mkql_proto_ut.cpp |43.9%| [DL] $(B)/canondata_storage/1784826/6c4e23b08b618ad38a21babd86e439d03aa22777/resource.tar.gz{, .log} |43.9%| [DL] $(B)/canondata_storage/1936947/77a3a4e86d91d5fe22d4a25bee6ca3f56b15653e/resource.tar.gz{, .log} |43.9%| [DL] $(B)/canondata_storage/1936947/f00fcb6d97bcb608640b679ee786567fc190891c/resource.tar.gz{, .log} |43.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |43.9%| [DL] $(B)/canondata_storage/1936997/7795d91e16dc8934afb9cac9de729a7e77d64422/resource.tar.gz{, .log} |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp >> test.py::py2_flake8 [GOOD] |43.9%| [DL] $(B)/canondata_storage/1130705/75c67aa1b52e1003c4244d8776963fa4e2ddd3be/resource.tar.gz{, .log} |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |43.9%| [DL] $(B)/canondata_storage/1881367/ee39bf74f87a4d157fac936390f8e3e30882b7ef/resource.tar.gz{, .log} |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |43.9%| [DL] $(B)/canondata_storage/1937027/670f34f9f911e780c84d06ea0c4a9f3d52e759cc/resource.tar.gz{, .log} |44.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/import_test >> ydb-tests-functional-sqs-messaging::import_test [GOOD] |44.0%| [DL] $(B)/canondata_storage/1931696/83451cbd5c05baf359743802ffeb9d68445da80c/resource.tar.gz{, .log} |44.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |43.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |43.9%| [DL] $(B)/canondata_storage/1936997/9b38bc90047b0c4770ffeeb948e381476a3c0703/resource.tar.gz{, .log} |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |44.0%| [DL] $(B)/canondata_storage/1899731/ccde90ce5670ebf87e5c6a2c04f40658049b2aee/resource.tar.gz{, .log} |44.0%| [DL] $(B)/canondata_storage/1900335/c9d0ca605faf2698cf00f33d61c4609eb54f9408/resource.tar.gz{, .log} |44.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |44.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} >> ClosedIntervalSet::Difference [GOOD] >> ClosedIntervalSet::Contains [GOOD] >> ClosedIntervalSet::EnumInRange >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] >> test.py::py2_flake8 [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] |44.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |44.0%| [DL] $(B)/canondata_storage/1936842/0add181b74256050def99faca33d757d5b5d9213/resource.tar.gz{, .log} |44.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |44.0%| [DL] $(B)/canondata_storage/1923547/d0656f40d6f986668efaffc1efccda90baa2359a/resource.tar.gz{, .log} >> TStateStorageConfig::UniformityTest [GOOD] >> http_client.py::flake8 [GOOD] >> query_results.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] >> ClosedIntervalSet::EnumInRange [GOOD] >> ClosedIntervalSet::EnumInRangeReverse |43.9%| [DL] $(B)/canondata_storage/1809005/c97035bd7617563b46a0820134ee4ac1af15af52/resource.tar.gz{, .log} |43.9%| [DL] $(B)/canondata_storage/1784826/10e350a94dce68db6577b228f7e65934ac1fc85e/resource.tar.gz{, .log} |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |44.0%| [DL] $(B)/canondata_storage/1900335/45fa7b62bfc436d95c883178870ebc86b564d87c/resource.tar.gz{, .log} |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |44.0%| [DL] $(B)/canondata_storage/1871002/7fafa598e7f20625a7c57887ea10ebeee83ea3a5/resource.tar.gz{, .log} |44.0%| [DL] $(B)/canondata_storage/1937001/891378d9aa3b40f9bb17b962cd367cec5ae2d3e9/resource.tar.gz{, .log} |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view >> test.py::py2_flake8 [GOOD] |44.0%| [DL] $(B)/canondata_storage/1942525/425976ea7290bcb7451d335640e8599ffb55fa7a/resource.tar.gz{, .log} |44.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |44.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.0%| [DL] $(B)/canondata_storage/1031349/6bb24df9da4b98bf8c413d1c1c448000ac90a40d/resource.tar.gz{, .log} |44.0%| [DL] $(B)/canondata_storage/1775059/cd7ea330e6f31e023b144d9e8cd414d6b5bfa5da/resource.tar.gz{, .log} |44.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/http_api_client/flake8 >> query_results.py::flake8 [GOOD] |44.0%| [DL] $(B)/canondata_storage/1599023/3620bf59870617da29d0c99266d709935d1c2b9b/resource.tar.gz{, .log} |44.0%| [DL] $(B)/canondata_storage/1600758/743fec0dc57746c777c8f1b3b8fd0c82a29a2914/resource.tar.gz{, .log} |44.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> ClosedIntervalSet::EnumInRangeReverse [GOOD] >> GivenIdRange::IssueNewRange [GOOD] >> GivenIdRange::Trim [GOOD] >> GivenIdRange::Subtract |44.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |44.0%| [DL] $(B)/canondata_storage/1937001/009017e496ccb73a3d6e033003c2ef47734562eb/resource.tar.gz{, .log} |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |44.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |44.0%| [DL] $(B)/canondata_storage/212715/8c116101a7f8683fa1e963a6b0079bf9c213f4e3/resource.tar.gz{, .log} |44.0%| [DL] $(B)/canondata_storage/1775319/3a09dd529e9fa310fb6dffe9fe276926ac1befbd/resource.tar.gz{, .log} |44.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |44.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |44.1%| [DL] $(B)/canondata_storage/937458/dab17048102e83ee373249c500ad7a1a9718d95c/resource.tar.gz{, .log} >> main.py::flake8 [GOOD] >> ydb_configure::import_test [GOOD] >> GivenIdRange::Subtract [GOOD] >> GivenIdRange::Points >> ydb-tests-fq-s3::import_test [GOOD] |44.1%| [DL] $(B)/canondata_storage/1903280/1302f1777838aa638bf5151db4710571d26da566/resource.tar.gz{, .log} |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut |44.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.1%| [DL] $(B)/canondata_storage/937458/c396c80e8362440af36c99b3904127e9b520ebb6/resource.tar.gz{, .log} |44.1%| [DL] $(B)/canondata_storage/1937001/7bff8f98ab448f07ac3e80a4af0d2aed91a791f3/resource.tar.gz{, .log} |44.1%| [DL] $(B)/canondata_storage/1889210/99053ce259e5acf0cd21c3100078510bb975c3cf/resource.tar.gz{, .log} |44.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut/unittest >> TStateStorageConfig::UniformityTest [GOOD] |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |44.1%| [DL] $(B)/canondata_storage/1942173/9e555000605607f149a299077d52b9ef3a944215/resource.tar.gz{, .log} |44.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |44.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a >> GivenIdRange::Points [GOOD] >> GivenIdRange::Runs [GOOD] >> GivenIdRange::Allocate |44.1%| [DL] $(B)/canondata_storage/1880306/c238261ddd77e921e2cea2a78db88e0431b4cb3e/resource.tar.gz{, .log} |44.1%| [DL] $(B)/canondata_storage/1597364/74246415d71e5dc9c8dae1626e184fae0faa778c/resource.tar.gz{, .log} |44.1%| [DL] $(B)/canondata_storage/1781765/79e3b478a7ff22ae5ad7d4f8b04d16d423583c5d/resource.tar.gz{, .log} |44.1%| [DL] $(B)/canondata_storage/1871002/7b1fef4883196ccbe709169b603793723ad3e469/resource.tar.gz{, .log} |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |44.2%| [DL] $(B)/canondata_storage/1903885/e0147df24000ea90170041cff25000baa7559abf/resource.tar.gz{, .log} |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |44.2%| [DL] $(B)/canondata_storage/1784117/bb10ae9ea87fb7aac538ebffcd58fdc507d9f394/resource.tar.gz{, .log} |44.2%| [DL] $(B)/canondata_storage/1900335/773a1202d72424e925be03bb8ba15e6cc71fa3f4/resource.tar.gz{, .log} |44.2%| [DL] $(B)/canondata_storage/1931696/221cedac6157fdff4d16e16ac8e9133139de7efd/resource.tar.gz{, .log} |44.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.2%| [DL] $(B)/canondata_storage/1600758/6a579ce429b85915b6ff135574bc65e433d5b02a/resource.tar.gz{, .log} >> tstool::import_test [GOOD] >> GivenIdRange::Allocate [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |44.2%| [DL] $(B)/canondata_storage/1937001/589144012e0eb6b64ff634e7a287dfd36b22cdfd/resource.tar.gz{, .log} |44.2%| [DL] $(B)/canondata_storage/1931696/0a5f01ad7bf7c863b92eab0e8aff7f87ecb60e51/resource.tar.gz{, .log} |44.2%| [DL] $(B)/canondata_storage/1936997/7e9b92fe2e738b03ed247110d482d009fa289ed5/resource.tar.gz{, .log} |44.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |44.2%| [DL] $(B)/canondata_storage/1925842/bc33ee76e726ade051594823272684ec4117339d/resource.tar.gz{, .log} |44.2%| [DL] $(B)/canondata_storage/1880306/2c99349d3c5fcb053d4b6ce0a8557550c848af18/resource.tar.gz{, .log} |44.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |44.2%| [DL] $(B)/canondata_storage/1925842/1c73675b2ef22d3db833d7bb81e6d092b9398bca/resource.tar.gz{, .log} |44.3%| [DL] $(B)/canondata_storage/1775319/57a50169b57016de03af26313596e9e552bfb0b3/resource.tar.gz{, .log} |44.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/apps/dstool/flake8 >> main.py::flake8 [GOOD] |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a >> ydb-tests-functional-clickbench::import_test [GOOD] |44.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/cfg/bin/import_test >> ydb_configure::import_test [GOOD] |44.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/import_test >> ydb-tests-fq-s3::import_test [GOOD] |44.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/tstool/import_test >> tstool::import_test [GOOD] |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |44.3%| [DL] $(B)/canondata_storage/1880306/01f3c1debba8d61b9fa1fa512eb76ffb3cb3838e/resource.tar.gz{, .log} |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |44.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blob_depot/ut/unittest >> GivenIdRange::Allocate [GOOD] |44.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.a >> test_liveness_wardens.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |44.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/import_test >> ydb-tests-functional-clickbench::import_test [GOOD] |44.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |44.3%| [DL] $(B)/canondata_storage/1597364/674fcbbcaed1c9cfba2b74e3352aab82299cbfca/resource.tar.gz{, .log} |44.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |44.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} >> test_actorsystem.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |44.4%| [DL] $(B)/canondata_storage/1871002/e8ac0c0b88a39f7e5d48369b8a24b2e049e5bf3d/resource.tar.gz{, .log} |44.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/flake8 >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |44.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |44.4%| [DL] $(B)/canondata_storage/1942525/80cf40971a29683f0f53d3784e5ea988eae1a473/resource.tar.gz{, .log} |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |44.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] >> test_leader_start_inflight.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |44.4%| [DL] $(B)/canondata_storage/1597364/bb2a478f5e9c5bfeb5e6ba7fde27b7879d5d2f67/resource.tar.gz{, .log} |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |44.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |44.4%| [DL] $(B)/canondata_storage/1900335/6da4e798745eb2a68f5231cd7d5c7f35ec91c905/resource.tar.gz{, .log} |44.4%| [DL] $(B)/canondata_storage/1924537/8552d3077ff10f95a9d6ae208c13d115e0b5bdf8/resource.tar.gz{, .log} |44.4%| [DL] $(B)/canondata_storage/995452/e78675a82a4300d32887a13f4b9e86cb1608f590/resource.tar.gz{, .log} |44.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |44.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |44.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd >> DoubleIndexedTests::TestErase [GOOD] >> DoubleIndexedTests::TestFind [GOOD] >> DoubleIndexedTests::TestUpsertBySingleKey [GOOD] >> DoubleIndexedTests::TestUpsertByBothKeys [GOOD] >> DoubleIndexedTests::TestMerge [GOOD] >> YdbVersion::StoredReadableBy [GOOD] >> VersionParser::Basic [GOOD] >> YdbVersion::NewNbsIncompatibleCurrent [GOOD] >> YdbVersion::CompatibleWithSelf [GOOD] >> YdbVersion::DefaultPrevYear [GOOD] >> YdbVersion::CurrentCanLoadFrom [GOOD] >> YdbVersion::CurrentCanLoadFromAllOlder [GOOD] >> YdbVersion::TrunkAndStable [GOOD] >> OldFormat::SameVersion [GOOD] >> YdbVersion::SomeRulesAndOtherForbidden [GOOD] >> YdbVersion::DefaultDifferentBuildIncompatible [GOOD] >> OldFormat::DefaultRules [GOOD] >> YdbVersion::OneAcceptedVersion [GOOD] >> YdbVersion::LimitNew [GOOD] >> YdbVersion::DefaultPrevMajor [GOOD] >> YdbVersion::ForbiddenMinor [GOOD] >> YdbVersion::DefaultRulesWithExtraForbidden [GOOD] >> YdbVersion::OtherComponent [GOOD] |44.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} >> YdbVersion::StoredWithRules [GOOD] >> ydb-tests-stability-ydb::import_test [GOOD] >> OldFormat::PrevYear [GOOD] >> tpc_tests.py::flake8 [GOOD] >> ConfigProto::ForbidNewRequired [GOOD] >> YdbVersion::OldNbsIncompatibleStored [GOOD] >> YdbVersion::NewNbsCurrent [GOOD] >> YdbVersion::LimitOld [GOOD] >> YdbVersion::CurrentCanLoadFromIncompatible [GOOD] >> OldFormat::Trunk [GOOD] >> YdbVersion::DefaultSameVersion [GOOD] >> YdbVersion::DefaultNextYear [GOOD] >> YdbVersion::YDBAndNbs [GOOD] >> YdbVersion::CurrentStoresReadableBy [GOOD] >> YdbVersion::PrintCurrentVersionProto [GOOD] >> YdbVersion::DefaultOldMajor [GOOD] >> OldFormat::UnexpectedTrunk [GOOD] >> YdbVersion::StoredReadableByIncompatible [GOOD] >> YdbVersion::DifferentYdbVersionsWithNBSRules [GOOD] >> common.cpp::clang_format [GOOD] >> OldFormat::OldNbs [GOOD] >> common.h::clang_format [GOOD] >> YdbVersion::DefaultCompatible [GOOD] >> YdbVersion::DefaultHotfix [GOOD] >> YdbVersion::ExtraAndForbidden [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] >> YdbVersion::DefaultNextMajor [GOOD] |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |44.4%| [DL] $(B)/canondata_storage/1881367/55ee657a8e2fea05538badc8317b24fcb3a4115c/resource.tar.gz{, .log} |44.4%| [DL] $(B)/canondata_storage/1903885/e5bf03e50274ceb0e0a7794568205a1dbe547554/resource.tar.gz{, .log} |44.4%| [TA] $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |44.5%| [DL] $(B)/canondata_storage/1784826/ca2edc7f1a2a5c0080870544a7f83cdc18543a05/resource.tar.gz{, .log} |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a >> YdbVersion::StoredWithRulesIncompatible [GOOD] >> YdbVersion::Component [GOOD] >> YdbVersion::TrunkYDBAndNbs [GOOD] >> YdbVersion::DefaultDifferentBuild [GOOD] >> YdbVersion::OldNbsStored [GOOD] >> YdbVersion::DefaultNewMajor [GOOD] >> OldFormat::TooOld [GOOD] |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |44.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/flake8 >> tpc_tests.py::flake8 [GOOD] |44.5%| [DL] $(B)/canondata_storage/1937492/63930c59f78bd833253a0a3dd62479c8ad6cb321/resource.tar.gz{, .log} |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot >> Checks::OpaqueMaps [GOOD] >> Checks::MapValidation [GOOD] >> Checks::IntArrayValidation [GOOD] >> Checks::ErrorInCheck [GOOD] >> Checks::BasicStringChecks [GOOD] >> Checks::BasicIntChecks [GOOD] >> ConfigValidation::StaticGroupSizesGrow [GOOD] |44.5%| [DL] $(B)/canondata_storage/1942173/0831c8429f2eb96fec38ae943a6ac1e22d739948/resource.tar.gz{, .log} |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/common_ut.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |44.5%| [DL] $(B)/canondata_storage/1936273/b293975a7642b91c5614f8db12d1bd08a0069400/resource.tar.gz{, .log} |44.5%| [DL] $(B)/canondata_storage/1847551/682469d9195325562bb93194e1d96345f651ca93/resource.tar.gz{, .log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/version/ut/unittest >> OldFormat::TooOld [GOOD] Test command err: Application: "ydb" |44.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/ydb/import_test >> ydb-tests-stability-ydb::import_test [GOOD] >> ConfigValidation::SameStaticGroup [GOOD] >> ConfigValidation::VDiskChanged [GOOD] >> ConfigValidation::TooManyVDiskChanged [GOOD] >> test_cp_ic.py::flake8 [GOOD] >> ConfigValidation::StaticGroupSizesShrink [GOOD] >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] |44.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_double_indexed/unittest >> DoubleIndexedTests::TestMerge [GOOD] |44.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/ut/unittest >> ConfigProto::ForbidNewRequired [GOOD] |44.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/common/clang_format >> common.h::clang_format [GOOD] |44.5%| [DL] $(B)/canondata_storage/937458/623d3bd7a832446925fc7d56bc3639f7411705b4/resource.tar.gz{, .log} |44.5%| [DL] $(B)/canondata_storage/1923547/4a11bf336fd7fb8da5f5162c16271b830cef13e4/resource.tar.gz{, .log} |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |44.5%| [DL] $(B)/canondata_storage/1773845/06fab929582e640fdce3e7cdf48ad02f2a7fe75f/resource.tar.gz{, .log} |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compression_ut.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |44.6%| [DL] $(B)/canondata_storage/1937001/48471e6d9c7324ace71b9be0fd74072f683de033/resource.tar.gz{, .log} |44.6%| [DL] $(B)/canondata_storage/1889210/25bb12516fb50fd6341f375d4bd251cc1316e0aa/resource.tar.gz{, .log} |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/provider/yql_s3_listing_strategy_ut.cpp >> test.py::py2_flake8 [GOOD] >> ydb-tests-functional-postgresql::import_test [GOOD] |44.6%| [DL] $(B)/canondata_storage/212715/b05f0bb536c8488f970e72a13051cd475b3d6a41/resource.tar.gz{, .log} |44.6%| [DL] $(B)/canondata_storage/1931696/7e23ac78acb0bf6a43a98f8d07393450402130e6/resource.tar.gz{, .log} |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |44.6%| [DL] $(B)/canondata_storage/1773845/0823c71a646fdb00c7b391638ef2b27d82ee864d/resource.tar.gz{, .log} |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |44.6%| [DL] $(B)/canondata_storage/1900335/706a47a293cd09905f9fb37c502faaa4d8e51b5e/resource.tar.gz{, .log} |44.6%| [DL] $(B)/canondata_storage/1946324/cf38a9e18bcb2d145a9ceedb60a30cd36c433437/resource.tar.gz{, .log} |44.6%| [DL] $(B)/canondata_storage/1925821/6aa17395400a3fafbb1eacfdd2dd26ec26994660/resource.tar.gz{, .log} |44.6%| [DL] $(B)/canondata_storage/1937367/9e6103f3844abd305fb3ecba5a38bd2939f032ea/resource.tar.gz{, .log} |44.6%| [DL] $(B)/canondata_storage/1880306/dcc32ce026896abde9b111c8c4cae06611f4a004/resource.tar.gz{, .log} >> test.py::py2_flake8 [GOOD] >> ydb-tests-functional-limits::import_test [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> run_tests::import_test [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] |44.6%| [DL] $(B)/canondata_storage/212715/84f834803ff8b4e4bab2716894be9d659edfc198/resource.tar.gz{, .log} |44.6%| [DL] $(B)/canondata_storage/1937492/caaa9a42499278fe8d2abe06fe2b17bceeb09e18/resource.tar.gz{, .log} |44.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |44.7%| [DL] $(B)/canondata_storage/1931696/a36bf900a22fac0635ebe830de6bcae1c73133c0/resource.tar.gz{, .log} |44.7%| [DL] $(B)/canondata_storage/1942671/4927147e0e1a576d9194a32faf141dd18c8830e8/resource.tar.gz{, .log} |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |44.7%| [DL] $(B)/canondata_storage/1773845/0758bece23c981ada2b0dd6767d862a51f17b041/resource.tar.gz{, .log} |44.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/import_test >> ydb-tests-functional-postgresql::import_test [GOOD] |44.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |44.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/validator/ut/validator_checks/unittest >> Checks::BasicIntChecks [GOOD] |44.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |44.7%| [DL] $(B)/canondata_storage/1809005/6c0b793ded39fed6215e26ef8284b30340b9dfac/resource.tar.gz{, .log} |44.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |44.7%| [DL] $(B)/canondata_storage/1871102/680e072d487740a733846c6fb8acae02496a7035/resource.tar.gz{, .log} |44.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/validation/ut/unittest >> ConfigValidation::StaticGroupSizesShrink [GOOD] |44.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |44.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/import_test >> ydb-tests-functional-limits::import_test [GOOD] |44.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |44.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |44.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/run_tests/import_test >> run_tests::import_test [GOOD] |44.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |44.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ut_utils/libydb_topic-ut-ut_utils.a |44.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |44.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |44.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] |44.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part7/ydb-library-yql-tests-sql-dq_file-part7 |44.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> test_sql_logic.py::flake8 [GOOD] >> test_stream_query.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp >> integrations_test.py::flake8 [GOOD] >> ydb-tests-functional-dynumber::import_test [GOOD] |44.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |44.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |44.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp >> result_compare::import_test [GOOD] >> test.py::py2_flake8 [GOOD] |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |44.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/flake8 >> test_stream_query.py::flake8 [GOOD] |44.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/flake8 >> test_tenants.py::flake8 [GOOD] |44.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/flake8 >> integrations_test.py::flake8 [GOOD] |44.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_schemeshard_limits.py::flake8 [GOOD] |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |44.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/dynumber/import_test >> ydb-tests-functional-dynumber::import_test [GOOD] |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |44.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] >> ydb-library-yaml_config-ut_transform::import_test [GOOD] >> test_kv_workload.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |44.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_compare/import_test >> result_compare::import_test [GOOD] |44.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |44.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} >> Json::BasicRendering [GOOD] >> StaticConfigExamples::SingleNodeWithFile [GOOD] >> StaticConfigExamples::BLOCK42 [GOOD] >> StaticConfigExamples::MIRROR_3_DC_NODES [GOOD] >> StaticConfigExamples::SINGLE_NODE_IN_MEMORY [GOOD] >> StaticConfigExamples::MIRROR_3_DC_NODES_IN_MEMORY [GOOD] >> StaticConfigExamples::MIRROR_3_DC_9_NODES [GOOD] |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |44.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/import_test >> ydb-library-yaml_config-ut_transform::import_test [GOOD] |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |44.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kv_workload/flake8 >> test_kv_workload.py::flake8 [GOOD] |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> JsonEnvelopeTest::Simple [GOOD] >> JsonEnvelopeTest::BinaryData [GOOD] >> JsonEnvelopeTest::NoReplace [GOOD] >> JsonEnvelopeTest::Escape [GOOD] >> JsonEnvelopeTest::ArrayItem [GOOD] >> MetaCache::BasicForwarding [GOOD] >> MetaCache::TimeoutFallback [GOOD] |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |44.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |44.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |44.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |44.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |45.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} >> test.py::flake8 [GOOD] |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |45.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |45.0%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp >> ydb-tests-functional-canonical::import_test [GOOD] |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |44.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/json/ut/unittest >> Json::BasicRendering [GOOD] |45.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |45.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |45.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/log_backend/ut/unittest >> JsonEnvelopeTest::ArrayItem [GOOD] |45.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |45.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/static_validator/ut/example_configs/unittest >> StaticConfigExamples::MIRROR_3_DC_9_NODES [GOOD] |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/mvp/meta/ut/unittest >> MetaCache::TimeoutFallback [GOOD] Test command err: 2024-11-21T17:41:55.076437Z :HTTP INFO: Listening on http://[::]:4438 2024-11-21T17:41:55.076573Z :HTTP INFO: Listening on http://[::]:27770 2024-11-21T17:41:55.076698Z :HTTP DEBUG: resolving 127.0.0.1:4438 2024-11-21T17:41:55.076713Z :HTTP DEBUG: connecting 2024-11-21T17:41:55.076890Z :HTTP DEBUG: (#17,127.0.0.1:4438) outgoing connection opened 2024-11-21T17:41:55.076897Z :HTTP DEBUG: (#17,127.0.0.1:4438) <- (GET /server) 2024-11-21T17:41:55.076969Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:47094) incoming connection opened 2024-11-21T17:41:55.077017Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:47094) -> (GET /server) 2024-11-21T17:41:55.077036Z :HTTP DEBUG: Updating ownership http://127.0.0.1:27770 with deadline 2024-11-21T17:42:55.077030Z 2024-11-21T17:41:55.077041Z :HTTP DEBUG: SetRefreshTime "/server" to 2024-11-21T17:42:55.077030Z (+1732210975.077030s) 2024-11-21T17:41:55.077051Z :HTTP DEBUG: IncomingForward /server to http://127.0.0.1:27770 timeout 30.000000s 2024-11-21T17:41:55.077072Z :HTTP DEBUG: resolving 127.0.0.1:27770 2024-11-21T17:41:55.077081Z :HTTP DEBUG: connecting 2024-11-21T17:41:55.077145Z :HTTP DEBUG: (#19,127.0.0.1:27770) outgoing connection opened 2024-11-21T17:41:55.077150Z :HTTP DEBUG: (#19,127.0.0.1:27770) <- (GET /server) 2024-11-21T17:41:55.077187Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:50446) incoming connection opened 2024-11-21T17:41:55.077210Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:50446) -> (GET /server) 2024-11-21T17:41:55.077248Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:50446) <- (200 Found) 2024-11-21T17:41:55.077273Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:50446) connection closed 2024-11-21T17:41:55.077357Z :HTTP DEBUG: (#19,127.0.0.1:27770) -> (200 Found) 2024-11-21T17:41:55.077364Z :HTTP DEBUG: (#19,127.0.0.1:27770) connection closed 2024-11-21T17:41:55.077452Z :HTTP DEBUG: Cache received successfull (200) response for /server 2024-11-21T17:41:55.077731Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:47094) <- (200 Found) 2024-11-21T17:41:55.077774Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:47094) connection closed 2024-11-21T17:41:55.077860Z :HTTP DEBUG: (#17,127.0.0.1:4438) -> (200 Found) 2024-11-21T17:41:55.077867Z :HTTP DEBUG: (#17,127.0.0.1:4438) connection closed 2024-11-21T17:41:55.080738Z :HTTP INFO: Listening on http://[::]:11480 2024-11-21T17:41:55.080845Z :HTTP INFO: Listening on http://[::]:16005 2024-11-21T17:41:55.080921Z :HTTP DEBUG: resolving 127.0.0.1:11480 2024-11-21T17:41:55.080933Z :HTTP DEBUG: connecting 2024-11-21T17:41:55.081041Z :HTTP DEBUG: (#17,127.0.0.1:11480) outgoing connection opened 2024-11-21T17:41:55.081048Z :HTTP DEBUG: (#17,127.0.0.1:11480) <- (GET /server) 2024-11-21T17:41:55.081113Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:55722) incoming connection opened 2024-11-21T17:41:55.081130Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:55722) -> (GET /server) 2024-11-21T17:41:55.081148Z :HTTP DEBUG: Updating ownership http://127.0.0.1:16005 with deadline 2024-11-21T17:51:55.081141Z 2024-11-21T17:41:55.081153Z :HTTP DEBUG: SetRefreshTime "/server" to 2024-11-21T17:51:55.081141Z (+1732211515.081141s) 2024-11-21T17:41:55.081159Z :HTTP DEBUG: IncomingForward /server to http://127.0.0.1:16005 timeout 30.000000s 2024-11-21T17:41:55.081176Z :HTTP DEBUG: resolving 127.0.0.1:16005 2024-11-21T17:41:55.081185Z :HTTP DEBUG: connecting 2024-11-21T17:41:55.081281Z :HTTP DEBUG: (#19,127.0.0.1:16005) outgoing connection opened 2024-11-21T17:41:55.081287Z :HTTP DEBUG: (#19,127.0.0.1:16005) <- (GET /server) 2024-11-21T17:41:55.081308Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:48042) incoming connection opened 2024-11-21T17:41:55.081351Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:48042) -> (GET /server) 2024-11-21T17:41:55.091496Z :HTTP ERROR: (#19,127.0.0.1:16005) connection closed with error: Connection timed out 2024-11-21T17:41:55.091633Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:48042) connection closed 2024-11-21T17:41:55.091721Z :HTTP WARN: Cache received failed response with error "Connection timed out" for /server - retrying locally 2024-11-21T17:41:55.101924Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:55722) <- (200 Found) 2024-11-21T17:41:55.102016Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:55722) connection closed 2024-11-21T17:41:55.102125Z :HTTP DEBUG: (#17,127.0.0.1:11480) -> (200 Found) 2024-11-21T17:41:55.102132Z :HTTP DEBUG: (#17,127.0.0.1:11480) connection closed >> test.py::py2_flake8 [GOOD] >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |45.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/import_test >> ydb-tests-functional-canonical::import_test [GOOD] |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> ydb-tests-fq-plans::import_test [GOOD] |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |45.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |45.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |45.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |45.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |45.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/import_test >> ydb-tests-fq-plans::import_test [GOOD] >> test.py::py2_flake8 [GOOD] |45.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |45.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} >> test_compatibility.py::flake8 [GOOD] >> run_tests.py::flake8 [GOOD] |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |45.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |45.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} >> test.py::py2_flake8 [GOOD] >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |45.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/run_tests/flake8 >> run_tests.py::flake8 [GOOD] |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} >> __main__.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |44.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/compatibility/flake8 >> test_compatibility.py::flake8 [GOOD] |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |45.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |45.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |45.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp >> test.py::flake8 [GOOD] |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |45.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |45.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} >> conftest.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] |45.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/statistics_workload/flake8 >> __main__.py::flake8 [GOOD] |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |45.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/flake8 >> test.py::flake8 [GOOD] |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp >> test_ttl.py::flake8 [GOOD] |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |45.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |45.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/flake8 >> test_config_with_metadata.py::flake8 [GOOD] |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp >> test_fifo_messaging.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] >> ydb-library-benchmarks-runner::import_test [GOOD] |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |45.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |45.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |45.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/origin_attributes.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |45.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/import_test >> ydb-library-benchmarks-runner::import_test [GOOD] |45.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/new_fair_share_thread_pool.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |45.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |45.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp >> ydb-tests-functional-sqs-large::import_test [GOOD] |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |45.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/import_test >> ydb-tests-functional-sqs-large::import_test [GOOD] |45.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp >> ydb-tests-functional-config::import_test [GOOD] |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |45.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/import_test >> ydb-tests-functional-config::import_test [GOOD] |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp >> ydb-tests-functional-script_execution::import_test [GOOD] |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/config.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/phoenix.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/stripped_error.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/string_helpers.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |46.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/import_test >> ydb-tests-functional-script_execution::import_test [GOOD] |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/crash_handler.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error_code.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/blob_range.pb.{h, cc} |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg/libcpp-lfalloc-dbg.a |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |46.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |46.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |46.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |46.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/tools/protobuf_plugin/ut/ydb-core-config-tools-protobuf_plugin-ut |46.1%| [AR] {default-linux-x86_64, relwithdebinfo, pic} $(B)/yt/yt/core/libyt-yt-core.a |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |46.0%| [DL] $(B)/canondata_storage/1903280/45bea04670ac04ec37c2deab5ff4a786ae244430/resource.tar.gz{, .log} |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/meta/libydb-mvp-meta.a >> ydb-tests-functional-scheme_shard::import_test [GOOD] |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |46.0%| [DL] $(B)/canondata_storage/1784117/1ee1d854fe43f6981a4bf1da95e36e8be387e233/resource.tar.gz{, .log} |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/quota/libclient-yc_private-quota.a |46.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |46.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/import_test >> ydb-tests-functional-scheme_shard::import_test [GOOD] |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/access/libclient-yc_private-access.a |46.0%| [DL] $(B)/canondata_storage/1917492/ef839f70e5a2f493427f7f92ed00d26a993f6d4a/resource.tar.gz{, .log} |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/libclient-nc_private-iam.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/security/simple/libmvp-security-simple.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/libyc_private-ydb-v1.a |46.0%| [DL] $(B)/canondata_storage/937458/c423db03e0e9f65962fec3378f160c7d70d32138/resource.tar.gz{, .log} >> ydb-tests-tools-pq_read-test::import_test [GOOD] |46.0%| [DL] $(B)/canondata_storage/1871102/5b57bbc366ed87ccb54f2fce62c4a3214ac10518/resource.tar.gz{, .log} |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/protos/libmvp-core-protos.a |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/bin/mvp_meta |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/libydb-mvp-core.a |46.0%| [DL] $(B)/canondata_storage/1937429/d03442e328dca2de744539eee34693d8645faba4/resource.tar.gz{, .log} |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |46.1%| [DL] $(B)/canondata_storage/1937367/81cc07436d22bfe9ec8505998487b9ec6016cc03/resource.tar.gz{, .log} >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] >> TBlobStorageGroupInfoIterTest::IteratorForward [GOOD] >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup [GOOD] >> TBlobStorageGroupInfoTest::SubgroupPartLayout |46.0%| [DL] $(B)/canondata_storage/1881367/3848e32ce807b5f10bb012e51d0ebe5ff6708554/resource.tar.gz{, .log} |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |46.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/import_test >> ydb-tests-tools-pq_read-test::import_test [GOOD] |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |46.0%| [DL] $(B)/canondata_storage/1775059/17f0d56cbd3b1817a494481bb24fbafc1bd7be1b/resource.tar.gz{, .log} |46.1%| [DL] $(B)/canondata_storage/1871182/1e53ee2b92848bee51fb8b73b6906845db1d0bd7/resource.tar.gz{, .log} |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |46.1%| [DL] $(B)/canondata_storage/1814674/522ed289227f8ca49d5b5d2d75ab25980e8e24b7/resource.tar.gz{, .log} |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |46.1%| [DL] $(B)/canondata_storage/1937150/a36e1dbd38eb3982d2ac3bfe25ca0672dc9d7f6d/resource.tar.gz{, .log} |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |46.1%| [DL] $(B)/canondata_storage/1936273/a60f9999d0698e9bb3fb56c37d5b3b1e5d6d9c95/resource.tar.gz{, .log} |46.1%| [DL] $(B)/canondata_storage/1900335/d4e82c318baf1ed34d6266f6481c0a7670c611fe/resource.tar.gz{, .log} |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |46.1%| [DL] $(B)/canondata_storage/1942100/0c1b1bb025932861fb70abad9310240dbe73a50c/resource.tar.gz{, .log} |46.1%| [DL] $(B)/canondata_storage/1871102/093ef1237a5eb90e2e1f6670f45824dd7aa652e1/resource.tar.gz{, .log} |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |46.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::IteratorForward [GOOD] |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |46.1%| [DL] $(B)/canondata_storage/1946324/da6d46e38db4b05c0745cd0fc3b082c37b3cdbab/resource.tar.gz{, .log} |46.1%| [DL] $(B)/canondata_storage/1775059/f321b7af9d96556e34658539453b2887f1c38930/resource.tar.gz{, .log} |46.1%| [DL] $(B)/canondata_storage/1903280/7cc9f1de74341758d8f5a97318a2f3f942a54b15/resource.tar.gz{, .log} |46.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part19/ydb-library-yql-tests-sql-dq_file-part19 |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |46.2%| [DL] $(B)/canondata_storage/1937424/0a7fa81182305af7b414a8e11e361266a61bc724/resource.tar.gz{, .log} |46.1%| [DL] $(B)/canondata_storage/1809005/1e02e07ca10c72b9e1c4f8d753caa384f345ca23/resource.tar.gz{, .log} |46.1%| [DL] $(B)/canondata_storage/1937027/65db208ba11cd5e90b41bbb5f58baaa54793e4a9/resource.tar.gz{, .log} |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |46.2%| [DL] $(B)/canondata_storage/1817427/34bb8688e3affd3b54214743d3582c8d1694106f/resource.tar.gz{, .log} |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |46.2%| [DL] $(B)/canondata_storage/1942525/5034185140ffa064b6ff5f40aec4f177acd3c5a1/resource.tar.gz{, .log} |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |46.2%| [DL] $(B)/canondata_storage/1942671/e5a8e9ea3cc95035ab65c78d12cce22189e05430/resource.tar.gz{, .log} >> ydb-tests-functional-ttl::import_test [GOOD] |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |46.2%| [DL] $(B)/canondata_storage/1936947/d814cd457b003ee9da1e09ee0877a39078a61012/resource.tar.gz{, .log} |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |46.2%| [DL] $(B)/canondata_storage/1871182/90cd88ea2a475c617af2c5f379760c2ad7b2e034/resource.tar.gz{, .log} |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |46.2%| [DL] $(B)/canondata_storage/1130705/c87efbee3d7d51d89c25e9454a95a9e836e53b27/resource.tar.gz{, .log} |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |46.2%| [DL] $(B)/canondata_storage/1880306/9e9848effe0d45eb3d4372fca57bf6962d09aeed/resource.tar.gz{, .log} |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |46.2%| [DL] $(B)/canondata_storage/1937367/3b134d615cd12a4e7acd4044dc106653bd43b397/resource.tar.gz{, .log} |46.2%| [DL] $(B)/canondata_storage/1871102/be2c02bcbeb4e2d0d7243f891f9cd59b613c9586/resource.tar.gz{, .log} |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |46.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part0/ydb-library-yql-tests-sql-hybrid_file-part0 |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/yqlrun/gateway_spec.cpp |46.2%| [DL] $(B)/canondata_storage/1936842/9a35ead5b9e859efd416e87982485153dd2ce155/resource.tar.gz{, .log} |46.2%| [DL] $(B)/canondata_storage/1599023/9fb10775fd57dc9adafaafe2a658f6533a20dc46/resource.tar.gz{, .log} |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |46.3%| [DL] $(B)/canondata_storage/1130705/0028f368df473b7ebff2145046017939f0673670/resource.tar.gz{, .log} |46.3%| [DL] $(B)/canondata_storage/1809005/f38e3f8d804c3c736510cb1eca690761b713a4b0/resource.tar.gz{, .log} |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/yqlrun/http/libtools-yqlrun-http.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/yqlrun/yqlrun.cpp |46.2%| [DL] $(B)/canondata_storage/1871102/a14717a3c8c558cf8fefe6d46cd5b04ed47ccc80/resource.tar.gz{, .log} |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |46.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/import_test >> ydb-tests-functional-ttl::import_test [GOOD] |46.3%| [DL] $(B)/canondata_storage/1923547/eaec090e24c98f865902b418cc92d70420b05c0b/resource.tar.gz{, .log} |46.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |46.3%| [DL] $(B)/canondata_storage/1937027/93c656fa9af655b08186ebb07d44445ef304fbc9/resource.tar.gz{, .log} |46.3%| [DL] $(B)/canondata_storage/1871102/bc396b8b31a3dc31af3e0918ca66137d03d31eff/resource.tar.gz{, .log} |46.3%| [DL] $(B)/canondata_storage/1937150/5e5d899491feb90365a9064ef20b0b01f17c7419/resource.tar.gz{, .log} |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |46.3%| [DL] $(B)/canondata_storage/1942525/bdd139810f50778f152db9396ade27a31f707314/resource.tar.gz{, .log} |46.3%| [DL] $(B)/canondata_storage/1784117/bdd3c0ddc1670802f060fcdb1711e78ede383acf/resource.tar.gz{, .log} |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |46.3%| [DL] $(B)/canondata_storage/1775059/b19c8660bf25c5ff6689bee92a8ca8837c638c17/resource.tar.gz{, .log} |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_large/ut_btree_index_large.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |46.3%| [DL] $(B)/canondata_storage/1936273/e76bbb45fb3007593a65735b5c0016b79db0c798/resource.tar.gz{, .log} |46.4%| [DL] $(B)/canondata_storage/1600758/7854653343bd5226d6b3f3f5fa085f0193656cc7/resource.tar.gz{, .log} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |46.4%| [DL] $(B)/canondata_storage/1937027/07d1655ee1666f7cc6e979f137fa5a6d3f866455/resource.tar.gz{, .log} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |46.3%| [DL] $(B)/canondata_storage/1936273/b21d0cac033210604f1cd49a5c524081567ed4e3/resource.tar.gz{, .log} |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |46.3%| [DL] $(B)/canondata_storage/1775059/3015fdb690d45c556ed1066a415637cc49d6ec88/resource.tar.gz{, .log} |46.4%| [DL] $(B)/canondata_storage/1880306/e4ccac619cc79d4b07e7e803e386d47da238c793/resource.tar.gz{, .log} |46.4%| [DL] $(B)/canondata_storage/1784826/66abdb7f6ea46c5c7564f94f24f2656a0e2aa349/resource.tar.gz{, .log} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |46.4%| [DL] $(B)/canondata_storage/1924537/7199a3a7eba1a101a7ecd6552b3df25cb9a6ef2b/resource.tar.gz{, .log} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |46.4%| [DL] $(B)/canondata_storage/1784826/61f7d79c6b081f267865b1f3c0c8b51fcae1ebaa/resource.tar.gz{, .log} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |46.4%| [DL] $(B)/canondata_storage/1924537/b112d187ebf731abc2b04c974853f91beb3dd74d/resource.tar.gz{, .log} |46.4%| [DL] $(B)/canondata_storage/1937367/7890620b546312cfd9cbc4ee46166efc1a36450c/resource.tar.gz{, .log} |46.4%| [DL] $(B)/canondata_storage/1689644/57f5e520abfb96651cc218a0d82eb6ee0fe38907/resource.tar.gz{, .log} |46.4%| [DL] $(B)/canondata_storage/1936273/b34296023a3ba5080f0236257e86efdd8a89093e/resource.tar.gz{, .log} |46.4%| [DL] $(B)/canondata_storage/1924537/481c71becc4b20198444748993508fe1ded84514/resource.tar.gz{, .log} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/liblibrary-cpp-lfalloc.a |46.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/_10c9874010308af47fbf8680a3.yasm |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |46.5%| [DL] $(B)/canondata_storage/1880306/5d2fb97b23cd70975bc5d744391981f9d5595c04/resource.tar.gz{, .log} |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |46.4%| [DL] $(B)/canondata_storage/1942671/a089042abddfd51839b75278c8f86236a82e2fbd/resource.tar.gz{, .log} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |46.4%| [DL] $(B)/canondata_storage/1784826/3ecd87c8ccd1a18f9d4f216f75472f1a834938f2/resource.tar.gz{, .log} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/bin/main.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/config_ut.cpp |46.5%| [DL] $(B)/canondata_storage/1937027/00f4d6f3eee88b3871a02f7dd00f16d78030cdd2/resource.tar.gz{, .log} |46.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/10c9874010308af47fbf8680a3.auxcpp |46.5%| [DL] $(B)/canondata_storage/1923547/fd6a07dc80ba28f96de9cc3ede62013c2ff4f35e/resource.tar.gz{, .log} |46.5%| [DL] $(B)/canondata_storage/1784826/14d74cf07b9bc3ef5bc3a0c5040b886c7cc0007e/resource.tar.gz{, .log} |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |46.5%| [DL] $(B)/canondata_storage/1942415/5f0ec6d9c04156bf00348913fc51614b979e220d/resource.tar.gz{, .log} |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |46.5%| [DL] $(B)/canondata_storage/1923547/61c7053456cbb3e809e03779a3f7621039603dac/resource.tar.gz{, .log} |46.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |46.5%| [DL] $(B)/canondata_storage/1916746/50372851d94a84250091c501f470aca37411751a/resource.tar.gz{, .log} |46.5%| [DL] $(B)/canondata_storage/937458/818e067fe83fe9b2daba4296b6b1e552d869fd55/resource.tar.gz{, .log} |46.5%| [DL] $(B)/canondata_storage/1880306/6d87e35267d9e4ac0736e1c4d17b92f0831eaba6/resource.tar.gz{, .log} |46.5%| [DL] $(B)/canondata_storage/1942173/5b2cc71cf1cf6cdf932029ae85f8889f18d81d77/resource.tar.gz{, .log} |46.5%| [DL] $(B)/canondata_storage/1781765/fd9ac83e51987bc944359ff67a2d8ec5051d37a8/resource.tar.gz{, .log} |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |46.5%| [DL] $(B)/canondata_storage/1889210/2fbf7f68942208b15ab6eb23b14b78640f078541/resource.tar.gz{, .log} |46.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |46.6%| [DL] $(B)/canondata_storage/1942525/968864b3be3864b00b0f0fb54ee97438202376c6/resource.tar.gz{, .log} |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/mock/libcommon-http_gateway-mock.a |46.5%| [DL] $(B)/canondata_storage/1936997/9befa0c3335c1ce55cbe46da2b9eb6fd58679c56/resource.tar.gz{, .log} |46.5%| [DL] $(B)/canondata_storage/1903280/d42e99dc4fef588809a37cacbb5855333c1c2edb/resource.tar.gz{, .log} |46.5%| [DL] $(B)/canondata_storage/1936947/581aa6d896ffe57e25bdb8006459e912860e61fa/resource.tar.gz{, .log} |46.5%| [DL] $(B)/canondata_storage/1942671/136488dd722e833e1c1e1c8bc98c69cae0134648/resource.tar.gz{, .log} |46.6%| [DL] $(B)/canondata_storage/1936947/2bc1f88e24977d85753d38b3cac45a372d34ec2f/resource.tar.gz{, .log} |46.6%| [DL] $(B)/canondata_storage/1597364/1370803313e5d0237dc90749ce72827c7dd82536/resource.tar.gz{, .log} |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |46.6%| [DL] $(B)/canondata_storage/1773845/e19fed515bf1f2a7f0b738a3e17a516cd55cdf4a/resource.tar.gz{, .log} |46.6%| [DL] $(B)/canondata_storage/1814674/9cc588658d645e8972899f036025c6c4884f598d/resource.tar.gz{, .log} >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_empty.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |46.5%| [DL] $(B)/canondata_storage/1936997/fbfc46046cfa3a913150834618e28cd82c05d5b0/resource.tar.gz{, .log} |46.5%| [DL] $(B)/canondata_storage/1784826/0c338d21c57ec0e55d25f0c5a7d66d262578559c/resource.tar.gz{, .log} |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |46.6%| [DL] $(B)/canondata_storage/1031349/2509d6f19c7d08d2f97888cb86cd1f893cd619db/resource.tar.gz{, .log} |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |46.6%| [DL] $(B)/canondata_storage/1937429/8d4678be89a5cdafec6099b5b174e1923a6b1b84/resource.tar.gz{, .log} |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |46.6%| [DL] $(B)/canondata_storage/1903280/e408d73e432cdcbd076f8502cb4502ad1d54ab5a/resource.tar.gz{, .log} |46.6%| [DL] $(B)/canondata_storage/1937027/555a559b5825a201986d8c31f3e51fe1196d9726/resource.tar.gz{, .log} |46.7%| [DL] $(B)/canondata_storage/1773845/3f7cf07086245864aa149e34543610596d0da304/resource.tar.gz{, .log} |46.7%| [DL] $(B)/canondata_storage/1937429/82c91013a516db34237d53cdad4ae5a77a3c568b/resource.tar.gz{, .log} |46.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |46.6%| [DL] $(B)/canondata_storage/1937492/a3be8907a794dd8afc1b0615834f797b64dd9927/resource.tar.gz{, .log} |46.6%| [DL] $(B)/canondata_storage/1942278/c7e94a55443ba1bfb954699e3753bab75896bf89/resource.tar.gz{, .log} |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |46.7%| [DL] $(B)/canondata_storage/1916746/6ecba111842446dcf0346e93a4db199bbedbdb10/resource.tar.gz{, .log} |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |46.7%| [DL] $(B)/canondata_storage/1899731/355eddae33a3318d608f8973d7978cafbe97d4cd/resource.tar.gz{, .log} |46.7%| [DL] $(B)/canondata_storage/1784117/a5ac79faa57763376eaf89f447411b1d96378091/resource.tar.gz{, .log} |46.7%| [DL] $(B)/canondata_storage/1937429/af1c6e6e04642438d43d596ae49e1f47c2f9a8bf/resource.tar.gz{, .log} |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |46.6%| [DL] $(B)/canondata_storage/1937367/6753adf109c979219bfffa5389a252ae034aa308/resource.tar.gz{, .log} |46.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part1/ydb-library-yql-tests-sql-dq_file-part1 |46.6%| [DL] $(B)/canondata_storage/1600758/72bf03e23e03cc6eea365311492e82f69d27547f/resource.tar.gz{, .log} |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |46.6%| [DL] $(B)/canondata_storage/1784826/885e3ecf7da6faaa6a93df31c27314e793907f63/resource.tar.gz{, .log} |46.7%| [DL] $(B)/canondata_storage/1916746/5a3af070be0e3803da460e9b8077af974f329983/resource.tar.gz{, .log} |46.7%| [DL] $(B)/canondata_storage/1903885/f00a3197fa44aa3d49bf7fe1bbf0fed52ce265b9/resource.tar.gz{, .log} |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |46.7%| [DL] $(B)/canondata_storage/1937492/d66e77714299db72286b6a554eb40992ddbc4b9b/resource.tar.gz{, .log} |46.7%| [DL] $(B)/canondata_storage/1031349/00d772b75437904a4810c1baf5e317bde8a6e2b5/resource.tar.gz{, .log} |46.7%| [DL] $(B)/canondata_storage/1847551/37b6d30f0db871c667c3895752450e72de2125d9/resource.tar.gz{, .log} |46.8%| [DL] $(B)/canondata_storage/1880306/9f21ecea8b7e24ea53f2ae352301fe1250fdba57/resource.tar.gz{, .log} |46.8%| [DL] $(B)/canondata_storage/1781765/628d82040b97a4c0c6e6723ca4453171e6143b19/resource.tar.gz{, .log} |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |46.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part2/ydb-library-yql-tests-sql-hybrid_file-part2 |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |46.7%| [DL] $(B)/canondata_storage/1937367/c43db192f475421f2559d93dbe396ac1a811fd89/resource.tar.gz{, .log} |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |46.7%| [DL] $(B)/canondata_storage/1937027/3d8ae7405c87d4dd8f5110c9cafc9e3ec447e435/resource.tar.gz{, .log} |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/yqlrun/yqlrun |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part18/ydb-library-yql-tests-sql-dq_file-part18 |46.8%| [DL] $(B)/canondata_storage/1937027/31125da8bc31fe5a5232f3a169fa8a2431a89df9/resource.tar.gz{, .log} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/f876c7e5551ebce27aee411303.auxcpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |46.8%| [DL] $(B)/canondata_storage/1130705/223d79eda7e49588c54267c8b7c488154ed801c9/resource.tar.gz{, .log} |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |46.8%| [DL] $(B)/canondata_storage/1942671/d6da076374b1124e492566e9f81d7f26078203f0/resource.tar.gz{, .log} |46.8%| [DL] $(B)/canondata_storage/1942173/faa0388e8ff65e27dc14e716b65cbd83441fd698/resource.tar.gz{, .log} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |46.8%| [DL] $(B)/canondata_storage/1942173/eee032b3354d69a6e319d8f41f249f7e53178373/resource.tar.gz{, .log} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |46.8%| [DL] $(B)/canondata_storage/1924537/1ee261cbcafb6071f58372428e9ece77d59ec4fd/resource.tar.gz{, .log} |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |46.8%| [DL] $(B)/canondata_storage/937458/ea9bb2a5f9f6868f4e251937f810b7466fb20b69/resource.tar.gz{, .log} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |46.8%| [DL] $(B)/canondata_storage/1781765/a90279cc9a67c5059fc23ab2db51011dafb37555/resource.tar.gz{, .log} |46.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/_f876c7e5551ebce27aee411303.yasm |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |46.8%| [DL] $(B)/canondata_storage/1889210/25929c9307ecf4e0bdf1647e711c682147acf305/resource.tar.gz{, .log} |46.8%| [DL] $(B)/canondata_storage/937458/b4627e6d6be4f5c698896c8236ab5f6f65070d11/resource.tar.gz{, .log} >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] >> TBlobStorageGroupInfoIterTest::Domains [GOOD] |46.8%| [DL] $(B)/canondata_storage/212715/b10a3a963ab6644683db33c830058d65ff99d14f/resource.tar.gz{, .log} |46.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |46.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |46.8%| [DL] $(B)/canondata_storage/1597364/a4a11f25f9a25c3aeb4b614333c373013ce0dbe0/resource.tar.gz{, .log} |46.8%| [DL] $(B)/canondata_storage/1937367/e20c4de8f0db337c5ef869a8ce171ea4e06d80ca/resource.tar.gz{, .log} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] >> ValidationTests::CanCopyTo [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary [GOOD] >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |46.8%| [DL] $(B)/canondata_storage/212715/b9f267b2022a251b638e7a1f1ebeb788c308ed2f/resource.tar.gz{, .log} |46.8%| [DL] $(B)/canondata_storage/1871102/a105d0f5f9856af79134cb48c8f21a1b942134a2/resource.tar.gz{, .log} |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |46.8%| [DL] $(B)/canondata_storage/1924537/3ef7ee54911365a79534947d32d1e7c271e9edf3/resource.tar.gz{, .log} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |46.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |46.7%| [DL] $(B)/canondata_storage/1784826/fff6c7690453c14e8a0ab17cc1dbdc617c2e4169/resource.tar.gz{, .log} |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |46.8%| [DL] $(B)/canondata_storage/1937429/114f8ad7d2fefa7b1548a3d84a3909986ebb4e65/resource.tar.gz{, .log} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |46.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |46.8%| [DL] $(B)/canondata_storage/1871182/a8f0dda19ece2eb39da3b275b4504de52525ed97/resource.tar.gz{, .log} |46.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::Domains [GOOD] |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |46.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/_c43757827e03b03f81c937ad5a.yasm |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/c43757827e03b03f81c937ad5a.auxcpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |46.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::CanCopyTo [GOOD] |46.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] |46.8%| [DL] $(B)/canondata_storage/1814674/6222a4327ec3a132645a3145eb274ab71016ac00/resource.tar.gz{, .log} |46.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] |46.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/spilling/kqp_scan_spilling_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |46.9%| [DL] $(B)/canondata_storage/1923547/67f6df540c55c53542953e1bf74b7234a7231c48/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1871002/cee2cad2ea8ef95806a2aa2a8821680484bbe3b2/resource.tar.gz{, .log} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |46.8%| [DL] $(B)/canondata_storage/1871182/4f7621e3e8578e759c3947c8f07cc08181387ffd/resource.tar.gz{, .log} |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |46.9%| [DL] $(B)/canondata_storage/1903280/bdce64a3cd13bc28600baabc9b05d8e5728e311e/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1597364/d207979f510aaee5943f848bff1aebe434b82dfa/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/212715/1628fc53f3f62f8d00cdc3a2832cfcfea9a015c3/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1777230/92358f07848628e912a541ea35cf562f3ca2e131/resource.tar.gz{, .log} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |46.9%| [DL] $(B)/canondata_storage/1936273/ad5bb5518d18e6806e72772975bede630b68c916/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1903885/c8ed7244a5448efc4a28b5df8fbd77bc4288e041/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/937458/77127fceb419592ea59ebf8fd420f0f67121e538/resource.tar.gz{, .log} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |46.9%| [DL] $(B)/canondata_storage/1903885/e15ecedc064b62e14bd146feca52d1d6acd0bb64/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1917492/bb4a6167e361b901902dd3149427c5029802cf99/resource.tar.gz{, .log} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |46.9%| [DL] $(B)/canondata_storage/1931696/4ad92bebbe0a55859a86bf8023661b81c90b894e/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1599023/0e73c4fa67e9c960ff5312b7132f6c7465a2e8d9/resource.tar.gz{, .log} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |46.9%| [DL] $(B)/canondata_storage/1942525/e82383a0853340bf9ac348d8e641aeedc7c24e1b/resource.tar.gz{, .log} |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |46.9%| [DL] $(B)/canondata_storage/1775059/b57c9040709a7b012953cf170d04a292adc8d3d3/resource.tar.gz{, .log} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |46.9%| [DL] $(B)/canondata_storage/1917492/b1f6880fce0f4bdb598f464d2cc793c782661bc5/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1942278/a082f831494e5032b97d462b87753ef9c3f0a5df/resource.tar.gz{, .log} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |46.9%| [DL] $(B)/canondata_storage/1917492/1ed6d08398686e90568735860251083949d84e4e/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1809005/ad7c074711ee8d1675aebabbf8025a2c8bd317d8/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1937150/8facf8f2f4f1dbe0881f83a275f035467ce8f3bd/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1903280/ec3176815eddcf643d7c668ba3e55b8b28ad99b8/resource.tar.gz{, .log} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |47.0%| [DL] $(B)/canondata_storage/1903885/7dffac89ce1ad5b85a289c1c8f6a474e7e3a9362/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1903280/cfc00695f60d304a5b897d2cf0fdcda9f6f0bc03/resource.tar.gz{, .log} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |46.9%| [DL] $(B)/canondata_storage/1936842/c642aa0e8ba8df646b4ff92018c848b92dcb289d/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1936842/5461a7f0f4d722c81cba2eff5dd1d41bf3a77f80/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1925821/4e746178682b9df3b5c4d499b609fbaf2ac5b376/resource.tar.gz{, .log} |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |47.0%| [DL] $(B)/canondata_storage/1924537/081e3ea5ef34a4fe33a8e971e47d53ea3a5151a4/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1889210/74544419c972160350a9c20f583a6dcc6f5d9b40/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1889210/24a25999f164b13ed263c37581db046794ca3fa6/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1031349/b34d6646a07c5cb3362856012fec19ee3306256d/resource.tar.gz{, .log} |46.9%| [DL] $(B)/canondata_storage/1597364/13b42f67efe0808381a2e9549fe4850dbad7a463/resource.tar.gz{, .log} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |47.0%| [DL] $(B)/canondata_storage/1937027/d467c683292423d8a40039f4803bb61721a8cd03/resource.tar.gz{, .log} |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |47.0%| [DL] $(B)/canondata_storage/1777230/c670638311f49020b53d30b7f38b56ace838101e/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1777230/00c02c2221ad7773f9cfecb5ec1bd067dbaacbc5/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1880306/468b163936a9bc33f15e62d2d0026dcdb00b8520/resource.tar.gz{, .log} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |47.0%| [DL] $(B)/canondata_storage/1936947/25efb9f6eb4d1e76047ae7c2aef5ff59896f5b3c/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1880306/0c5ef34fdd8425c29c71bb31e0e955648c9186ba/resource.tar.gz{, .log} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |47.0%| [DL] $(B)/canondata_storage/212715/819f960bc7971689bf0f1a064927d4dbbb8d14ae/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1936997/82e8e136c5cb6c83d4a44aa387dbf64338ed57ae/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1031349/7d5eff370031e75ffe32ec32582d27203420e6b7/resource.tar.gz{, .log} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |47.0%| [DL] $(B)/canondata_storage/1871102/272188d52656e4b0f1c180e9407fd7cd898e5045/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1130705/da7974592864104e97d4cfb7947d82f2379f0266/resource.tar.gz{, .log} |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |47.0%| [DL] $(B)/canondata_storage/1942278/a5b73649957467a15d6799bcb6222e175bde02e1/resource.tar.gz{, .log} |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |47.0%| [DL] $(B)/canondata_storage/1936947/1186d579cc7e3197ae0471c3293b328d56f77001/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1031349/0b9bcb16a38e69c55142d62ab5b476d514cf83bf/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1931696/0cf0f02388f28b9c85e51af557c6c0adc7e3c2e1/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1936842/51593b2a750dbb036388d012a30fa937edaab5f0/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1925842/8a20a06ffe6b76e06a0c1b84ed9c57f5101194a7/resource.tar.gz{, .log} |47.0%| [DL] $(B)/canondata_storage/1600758/d550f6a68107f128a0baaebaff84ef0c6691b095/resource.tar.gz{, .log} |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |47.0%| [DL] $(B)/canondata_storage/1937424/4bf3629a378a97c2134d5c9ef82b431269f7812a/resource.tar.gz{, .log} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |47.1%| [DL] $(B)/canondata_storage/1937429/0252e3c99211f482c7e07e8f941c9d3bc7c92fd0/resource.tar.gz{, .log} |47.1%| [DL] $(B)/canondata_storage/1942525/4286c760bf658fe8e2079ca57866e3a8a4db409d/resource.tar.gz{, .log} |47.1%| [DL] $(B)/canondata_storage/1923547/e2d4955e81c4f7962197732fb8ffe5f144012239/resource.tar.gz{, .log} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |47.1%| [DL] $(B)/canondata_storage/1847551/d84e968fa8a66e33f0268ab656a20e2b84ad109c/resource.tar.gz{, .log} |47.1%| [DL] $(B)/canondata_storage/1931696/8b5249e741c0ac6b5882b8f37eb2fa8ea628e403/resource.tar.gz{, .log} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |47.1%| [DL] $(B)/canondata_storage/1937150/8af84cdae63c27872de09da76cddd708de02e35b/resource.tar.gz{, .log} |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |47.1%| [DL] $(B)/canondata_storage/1937424/1f3cd125c2d8eafb2ebb1dbc7c974f4f15ef1793/resource.tar.gz{, .log} |47.1%| [DL] $(B)/canondata_storage/1925821/46adefeb17892d8d0db52b6bd1c5cbf83fc892ff/resource.tar.gz{, .log} |47.1%| [DL] $(B)/canondata_storage/1880306/c2ca13a1907a4ca0d7268cb9eec4bdd9f56fcd83/resource.tar.gz{, .log} |47.1%| [DL] $(B)/canondata_storage/1775059/e9552b9a4d6e86a0dce623a0dcdac76be34f22bb/resource.tar.gz{, .log} |47.1%| [DL] $(B)/canondata_storage/1809005/9c9521692eb4e7097120f2dbbf0ff153301478e9/resource.tar.gz{, .log} |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |47.1%| [DL] $(B)/canondata_storage/1900335/b69eef2db8bbb87629e850b8626d51a7c2f99f1c/resource.tar.gz{, .log} |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |47.1%| [DL] $(B)/canondata_storage/1946324/c73c9eddeafc3ff13ff62d961df837bb6725d750/resource.tar.gz{, .log} |47.1%| [DL] $(B)/canondata_storage/1773845/57222273edb3e600187ff62653acf03a13f24744/resource.tar.gz{, .log} |47.1%| [DL] $(B)/canondata_storage/1923547/f8815f5c37d8a37467fb21ce650719d84ad34373/resource.tar.gz{, .log} |47.1%| [DL] $(B)/canondata_storage/1937001/a770b7e950bbaeaf08ef4bbb336b7e3683a914ce/resource.tar.gz{, .log} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |47.1%| [DL] $(B)/canondata_storage/1784826/02898379a4c0c69ac2a74e54bad0c911e0b55bc0/resource.tar.gz{, .log} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |47.1%| [DL] $(B)/canondata_storage/1925842/72490ba4ca88a4360df1e1456a0800bc1ada47f5/resource.tar.gz{, .log} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |47.2%| [DL] $(B)/canondata_storage/1130705/d9515e536cf880a45dedae2a41661295b91c6258/resource.tar.gz{, .log} |47.2%| [DL] $(B)/canondata_storage/1031349/fbbf24f543f1b677d727bca56c1443dfe538b3b6/resource.tar.gz{, .log} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |47.2%| [DL] $(B)/canondata_storage/1847551/88403ddbf01474ba2e3e37f885d908baa723db7c/resource.tar.gz{, .log} |47.1%| [DL] $(B)/canondata_storage/1931696/8efbe84ad728243c3e1c1cdb30d3b3f31d345567/resource.tar.gz{, .log} |47.1%| [DL] $(B)/canondata_storage/1942100/8fb0a7a6c71d8992f7b5d0fc7d2d03c809f0254b/resource.tar.gz{, .log} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |47.2%| [DL] $(B)/canondata_storage/1775319/8ac8c87858e0db34f5a3c99b3f4ca1084cccbace/resource.tar.gz{, .log} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |47.2%| [DL] $(B)/canondata_storage/1942173/88bbdf23a1e54cd5c5a8e5a6a9b995056573e90a/resource.tar.gz{, .log} |47.2%| [DL] $(B)/canondata_storage/1889210/9885815b3706cdb615855cdf9706b1ee426b3f80/resource.tar.gz{, .log} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |47.2%| [DL] $(B)/canondata_storage/1937429/e5eaf8d78c61231eab5dfa6a18215af9f922a482/resource.tar.gz{, .log} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |47.2%| [DL] $(B)/canondata_storage/1923547/b78c71ff76583fef098d28babd2f10e4e7cbccc9/resource.tar.gz{, .log} |47.2%| [DL] $(B)/canondata_storage/1917492/11f230eb792116e595ab03312b67142ea47d20e0/resource.tar.gz{, .log} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |47.2%| [DL] $(B)/canondata_storage/1937150/c1acae706dd71ce088fc48a032c252e2fac078b9/resource.tar.gz{, .log} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |47.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |47.2%| [DL] $(B)/canondata_storage/1599023/53262e114e5fb21cb58c259e812c31e2f63afae0/resource.tar.gz{, .log} |47.2%| [DL] $(B)/canondata_storage/1773845/6e61cbdfae8bc6d693a1ad4cc304d9d45edb7242/resource.tar.gz{, .log} |47.2%| [DL] $(B)/canondata_storage/1880306/d9c0e7be0cd0986dec7319115d94c6ed554b6ac1/resource.tar.gz{, .log} |47.2%| [DL] $(B)/canondata_storage/1689644/2bd6d3fb78f1d7cb3b8de730f65e151f606e2b42/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1936842/e15468da5c6a430935df259a2106604daa68ad66/resource.tar.gz{, .log} |47.2%| [DL] $(B)/canondata_storage/995452/716ad95d1f07400d4492e3dea5246cf9ad9ac9e5/resource.tar.gz{, .log} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |47.2%| [DL] $(B)/canondata_storage/1925821/6ac3fd5e5dd20ee6d3841e1231c1129dee1f6a05/resource.tar.gz{, .log} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |47.3%| [DL] $(B)/canondata_storage/1689644/76bd2942df187ba04bb9771a46cdadf0d1dbe01c/resource.tar.gz{, .log} |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |47.3%| [DL] $(B)/canondata_storage/212715/536da9610d37d18c947fc7368e3720d62d90036f/resource.tar.gz{, .log} |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |47.3%| [DL] $(B)/canondata_storage/212715/3898a96b3df9e749177ae140aa9739e06b0e4669/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1942415/ecf45b8d311b13ba55e2de94295cabed9b642863/resource.tar.gz{, .log} |47.2%| [DL] $(B)/canondata_storage/1903885/c99336662dd85cc4dbf2e30aa3726a822664376a/resource.tar.gz{, .log} |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |47.3%| [DL] $(B)/canondata_storage/1920236/25167f14d72879f8881f693851d4290f3941dcbf/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1937001/601e94a23ec26980c16840b1ec99d6084037513f/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1942173/a88e613f98b9308632d7651072259231cab1e791/resource.tar.gz{, .log} |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |47.3%| [DL] $(B)/canondata_storage/1937150/6ed1231d0735e7ff4ac5f603831c10709457ac3b/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1889210/9134d9e30423bbc1dffa9f6443fbc36d9fb3203d/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1880306/b2c00ff823e390f0263acf2dbb68c876e0b31abd/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1936842/45561f6cfd09b2c9c24d1d0eb74eb99fd3c0f61b/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1814674/a71168cd07531a101c5ea9b29bae77a3ea1d4693/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1937429/73a35abfefcb4c30cf44393e3335cf2af34209e7/resource.tar.gz{, .log} |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |47.3%| [DL] $(B)/canondata_storage/1775059/3cb7d014d70b84dbcb84645fa987dd9d47d7fd6c/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1903280/fbbb08f81e8431c873a84474187acbd073ef4018/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1889210/052ee36577b14824b146640268a61fb1664c0f7d/resource.tar.gz{, .log} |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |47.4%| [DL] $(B)/canondata_storage/1942525/8f1e438ab44695d5d2d07d5aa00ddcc561c38421/resource.tar.gz{, .log} |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |47.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |47.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/expr_nodes/dq_expr_nodes.{gen.h ... defs.inl.h} |47.3%| [DL] $(B)/canondata_storage/1936842/c62861d65748dca3fd75e9393720a48de8395467/resource.tar.gz{, .log} |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |47.4%| [DL] $(B)/canondata_storage/1942173/dbfab3fd6b2a084258584e8ee47fd89f14e189da/resource.tar.gz{, .log} |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/common/ut/retry_events_queue_ut.cpp |47.4%| [DL] $(B)/canondata_storage/1031349/506ae7e8d4f20418c9124d112729390d56f60276/resource.tar.gz{, .log} |47.4%| [DL] $(B)/canondata_storage/1031349/5cd6d3a72668cbea9853b86e5da2f0f315f35bc5/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1871002/72263a808838fefc0abe1dc2ac9fac8909e4a5a1/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1889210/c1b0707098b27716037274ba14a9fc58ec6d54ce/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1942525/a26edc49ac3c9a8155d4006bedb7f8cc9675ee3d/resource.tar.gz{, .log} |47.3%| [DL] $(B)/canondata_storage/1871102/fa4f080643cfe96ac90e96595d2e7db77c75a3b7/resource.tar.gz{, .log} |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |47.3%| [DL] $(B)/canondata_storage/1031349/ac3fdb59ac5555bb04f369156daa910ae69f4f8f/resource.tar.gz{, .log} |47.4%| [DL] $(B)/canondata_storage/1781765/cf4791e13b24747d9e6fb3bfc11e0fdb45a964c9/resource.tar.gz{, .log} |47.4%| [DL] $(B)/canondata_storage/1942525/1159b122a0dc77fe26cec831747a249913a7783d/resource.tar.gz{, .log} |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |47.4%| [DL] $(B)/canondata_storage/1925842/9d9b2ad4701e00c2bd94e410ea0b0b6ddc58fd38/resource.tar.gz{, .log} |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |47.4%| [DL] $(B)/canondata_storage/1871002/4cb15fb9e597ca755ed7e9f8f31c5eafa9b5a582/resource.tar.gz{, .log} |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |47.4%| [DL] $(B)/canondata_storage/1889210/3d889b385570041dbbd2165e00510547b2c1144d/resource.tar.gz{, .log} |47.4%| [DL] $(B)/canondata_storage/1937367/26ffadbe955b9e88125bb0a27831ce1640a50e2a/resource.tar.gz{, .log} |47.4%| [DL] $(B)/canondata_storage/1925821/6132b4b967a7c6d2d9c522d4a344e781b4121793/resource.tar.gz{, .log} |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |47.5%| [DL] $(B)/canondata_storage/1925842/620b4a12b99ac4a4ef526f742b6157cccada6f07/resource.tar.gz{, .log} |47.4%| [DL] $(B)/canondata_storage/1871102/69be1cf486f07d50c602df988fd0308f3c43bd08/resource.tar.gz{, .log} |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/attributes_md5_ut.cpp |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |47.4%| [DL] $(B)/canondata_storage/1775059/e6328418d209e6f2afe65be714175e5a3ade006c/resource.tar.gz{, .log} |47.4%| [DL] $(B)/canondata_storage/1809005/108bbf4b6cd9ab5b73dedcf18b5aa453d5b82a70/resource.tar.gz{, .log} |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/infly_ut.cpp |47.5%| [DL] $(B)/canondata_storage/1130705/3deff34d248db1fb5a54ca6f66a2bd921ca5f5fe/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/1942100/2d6c7e378366673856333d19c3501c45eed6b4cd/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/1931696/34a23b0fbb8cda9112778eff3500850ab0b81fdb/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/1871182/027431fe7e452cb0fbf3b9f53f15eb69aef793e3/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/1899731/bf1552ea5a722ea8bc2de463418359c419c09386/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/1031349/201452dd8c883b2adcbf46cb075c912d25efe67e/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/1903885/85783ac2a357850f457cbdc94d8685f602517f63/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/1903885/816633aaba150966f45785296717d160cf702a05/resource.tar.gz{, .log} |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/sha256_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/message_delay_stats_ut.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |47.5%| [DL] $(B)/canondata_storage/1775319/38406f106ad293bc18eb9f25f8a585b0fc6a7034/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/1942671/3559361cb2a96fdebafda74938d6d6c5f595f8a5/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/1923547/be7e687ea36299e4a042c1495c58b793c69141af/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/1946324/859eeab6372d0950962668f1ee4087cc149a1e7c/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/1923547/4f4b5d3dd0bd075220c6a8df315e3056e8ca2ac8/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/937458/e291908d1acc33f8b839a28a42f73be6affed850/resource.tar.gz{, .log} |47.5%| [DL] $(B)/canondata_storage/1920236/87ffa4c1bdf96124a80c950f8ff630741d28d4cf/resource.tar.gz{, .log} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |47.6%| [DL] $(B)/canondata_storage/1889210/eecb0781dab14320b0f96bfa31a980580d1d7ba2/resource.tar.gz{, .log} |47.6%| [DL] $(B)/canondata_storage/1937429/f07b84667ac042b441b980139436c1568397aded/resource.tar.gz{, .log} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |47.6%| [DL] $(B)/canondata_storage/1775319/4f0c679fa773e90cc66a570ed4a5f0d8cc31f2c2/resource.tar.gz{, .log} |47.6%| [DL] $(B)/canondata_storage/1903885/5bfe97f5876ada641c26fbdc01de3a321c5117a2/resource.tar.gz{, .log} |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |47.6%| [DL] $(B)/canondata_storage/1777230/783a2910c4d77e5aa8c5d6ad3e840bc965864783/resource.tar.gz{, .log} |47.6%| [DL] $(B)/canondata_storage/1900335/1c84908d492197ead2c896624a2389b6dc3780ab/resource.tar.gz{, .log} |47.6%| [DL] $(B)/canondata_storage/1903280/12e22760f1793514adf21cca06ce333170f22e8f/resource.tar.gz{, .log} |47.6%| [DL] $(B)/canondata_storage/1937424/71aa4d14c8d66bba5d69f626e865a747fde28f75/resource.tar.gz{, .log} |47.6%| [DL] $(B)/canondata_storage/1942671/114b21a6d17bc5be63ccf80717cafe8c74702dff/resource.tar.gz{, .log} |47.6%| [DL] $(B)/canondata_storage/1942671/db8798ead58a329e866fa250746caab9ca42a26c/resource.tar.gz{, .log} |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |47.6%| [DL] $(B)/canondata_storage/1936947/0ad6ee1282daf17d09361563e96adcbb4dd32437/resource.tar.gz{, .log} |47.7%| [DL] $(B)/canondata_storage/1597364/dba0843848a9b6c75e97f9c07beb339bd899e83a/resource.tar.gz{, .log} |47.7%| [DL] $(B)/canondata_storage/1937027/591a1ceca790d81eaf524a7a3e730722b0d7bdb7/resource.tar.gz{, .log} |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |47.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part6/ydb-library-yql-tests-sql-hybrid_file-part6 |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/benchmark |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp >> TopicNameConverterForCPTest::BadLegacyTopics [GOOD] >> TopicNameConverterForCPTest::BadModernTopics [GOOD] |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |47.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne >> TopicNameConverterTest::LegacyStyle [GOOD] >> TopicNameConverterTest::FirstClass [GOOD] |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |47.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> TopicNameConverterForCPTest::CorrectLegacyTopics [GOOD] >> TopicNameConverterForCPTest::CorrectModernTopics [GOOD] >> DiscoveryConverterTest::FullLegacyPath [GOOD] >> DiscoveryConverterTest::FullLegacyNamesWithRootDatabase [GOOD] >> DiscoveryConverterTest::DiscoveryConverter [GOOD] >> DiscoveryConverterTest::EmptyModern [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge >> DiscoveryConverterTest::FullLegacyNames [GOOD] >> DiscoveryConverterTest::FirstClass [GOOD] >> TQueryResultSizeTrackerTest::CheckAll [GOOD] >> DiscoveryConverterTest::MinimalName [GOOD] >> DiscoveryConverterTest::WithLogbrokerPath [GOOD] >> TopicNameConverterTest::LegacyStyleDoubleName [GOOD] >> TopicNameConverterTest::NoTopicName [GOOD] |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |47.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_ut.cpp |47.7%| [DL] $(B)/canondata_storage/1916746/2673649875bad3e7ba633862e83892bc57334832/resource.tar.gz{, .log} |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |47.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::FirstClass [GOOD] |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |47.7%| [DL] $(B)/canondata_storage/1942278/fd0ca7f95f7c6343b4a13953597c61288fa071d0/resource.tar.gz{, .log} |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |47.8%| [DL] $(B)/canondata_storage/1775059/60aa9c77d2376aa1beb6e616fcbdc82d0b2724be/resource.tar.gz{, .log} >> TopicNameConverterTest::Paths [GOOD] >> TopicNameConverterTest::PathFromDiscoveryConverter [GOOD] >> TQueueBackpressureTest::IncorrectMessageId [GOOD] >> DiscoveryConverterTest::AccountDatabase [GOOD] >> DiscoveryConverterTest::CmWay [GOOD] |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |47.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterForCPTest::BadModernTopics [GOOD] |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/common_ut.cpp |47.8%| [DL] $(B)/canondata_storage/1889210/6219b71e060e8fdf87b91b39a594295ea521db49/resource.tar.gz{, .log} |47.8%| [DL] $(B)/canondata_storage/1936273/c3891df321db69a340c035f6ee3e3b82d4bdbf8e/resource.tar.gz{, .log} |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compression_ut.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |47.8%| [DL] $(B)/canondata_storage/1937150/bfd37f8371e4e91bdf62bcb8724b428fe27f2206/resource.tar.gz{, .log} >> TBlobStorageQueueTest::TMessageLost [GOOD] |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |47.8%| [DL] $(B)/canondata_storage/1130705/ac9859ee8d53f34b0483c0f88da3629c3f1f4324/resource.tar.gz{, .log} |47.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckAll [GOOD] |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |47.8%| [DL] $(B)/canondata_storage/1777230/a0620ade18d5d9590309b7ceb4fe7b5f476ce7cb/resource.tar.gz{, .log} |47.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest |47.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterForCPTest::CorrectModernTopics [GOOD] |47.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |47.8%| [DL] $(B)/canondata_storage/1814674/84cbfbe4040a046d449594db65102999cee0bced/resource.tar.gz{, .log} |47.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::EmptyModern [GOOD] |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |47.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::NoTopicName [GOOD] |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |47.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::FirstClass [GOOD] |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |47.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::FullLegacyNamesWithRootDatabase [GOOD] |47.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::PathFromDiscoveryConverter [GOOD] |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |47.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |47.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::WithLogbrokerPath [GOOD] >> TQueueBackpressureTest::PerfInFlight |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |47.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TBlobStorageQueueTest::TMessageLost [GOOD] |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |47.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::IncorrectMessageId [GOOD] |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |47.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::CmWay [GOOD] |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_pg/flat_database_pg_ut.cpp |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |47.9%| [DL] $(B)/canondata_storage/1899731/b9b361b8c242c528f8d095b6a77697b3bd10ec53/resource.tar.gz{, .log} |47.9%| [DL] $(B)/canondata_storage/1931696/5c3e75d3a93b046b25d923d257b180452f6b217a/resource.tar.gz{, .log} |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |47.9%| [DL] $(B)/canondata_storage/1871182/2666f20d4e176027b281a7b851dd96e818956ef4/resource.tar.gz{, .log} |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |47.9%| [DL] $(B)/canondata_storage/1784826/ded8e4bd34c1c30373c7726abce38eca90ec35ab/resource.tar.gz{, .log} |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |47.9%| [TA] $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} |47.9%| [DL] $(B)/canondata_storage/1784826/27f8cd2f641de7f2ee55266beaca6dda7fb6d4cc/resource.tar.gz{, .log} |47.9%| [DL] $(B)/canondata_storage/1937150/2d475e4bf97968b8ba3ae996beb4334ab9cfac60/resource.tar.gz{, .log} |47.9%| [DL] $(B)/canondata_storage/1916746/3cb022be2018b398cd935b68f7b1091e2882ca57/resource.tar.gz{, .log} |47.9%| [DL] $(B)/canondata_storage/1814674/aff89164540cb0673786555c6e82154c2d8a3ef5/resource.tar.gz{, .log} |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |47.9%| [DL] $(B)/canondata_storage/1925842/70942689b7ce63cefca5f7da5343fab5153230a8/resource.tar.gz{, .log} |47.9%| [DL] $(B)/canondata_storage/1903280/1a83c50872a07e15c5461f201b027b8b1cf142a1/resource.tar.gz{, .log} |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |48.0%| [DL] $(B)/canondata_storage/1936273/9613a7e6a06ad9123b9b3496470bc3108947b98f/resource.tar.gz{, .log} |48.0%| [DL] $(B)/canondata_storage/1937424/be6de2a45c0e092d8da0f5c04670601e603a4d75/resource.tar.gz{, .log} >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] >> TQueueBackpressureTest::PerfTrivial >> TQueueBackpressureTest::PerfInFlight [GOOD] |47.9%| [DL] $(B)/canondata_storage/1903885/517bc28c121f874aa51bbc3deb7b23a0b3fdeaf2/resource.tar.gz{, .log} |47.9%| [DL] $(B)/canondata_storage/1889210/0a74d27984bc3e33adaacdc7b85618c55673e8d8/resource.tar.gz{, .log} |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |47.9%| [DL] $(B)/canondata_storage/1817427/afca08871a12f7dcb17f712cbd2347965f220a40/resource.tar.gz{, .log} |48.0%| [DL] $(B)/canondata_storage/1925821/ecd9dd14fd5c368a780124aeaab181143df1a49c/resource.tar.gz{, .log} |48.0%| [DL] $(B)/canondata_storage/1130705/20da23c279246100d1cf6675c98b016c27d78ebb/resource.tar.gz{, .log} |48.0%| [DL] $(B)/canondata_storage/1946324/e1f7c67cafa20200008de81571567844ef07755d/resource.tar.gz{, .log} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |48.0%| [DL] $(B)/canondata_storage/1871182/cf13957d635dc8c77a65ef70797b7c6b8d4646c5/resource.tar.gz{, .log} |48.0%| [DL] $(B)/canondata_storage/1942278/a9dafa0c92a2a568391db5df4c8a2a950955314c/resource.tar.gz{, .log} >> TQueueBackpressureTest::CreateDelete [GOOD] >> ValidationTests::MapType [GOOD] >> ValidationTests::AdvancedCopyTo [GOOD] |48.0%| [DL] $(B)/canondata_storage/1889210/a392a54ec359804e59b33f48a3c5e8f3c7765cda/resource.tar.gz{, .log} |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |48.0%| [DL] $(B)/canondata_storage/212715/1d5e3cd59753ff0c77fb4968cc3520790b529523/resource.tar.gz{, .log} |48.0%| [DL] $(B)/canondata_storage/1942525/bdcfc13ab61bbc8317caf8a3654b63d189e255fa/resource.tar.gz{, .log} |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |48.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |48.1%| [DL] $(B)/canondata_storage/1600758/a3c9dd835d113e6cdf30d9e35d9cc95c3a203da3/resource.tar.gz{, .log} >> ValidationTests::HasReservedPaths [GOOD] |48.0%| [DL] $(B)/canondata_storage/1936273/85968a675c17dd0728c8d7ba5fd43bd0b237dc65/resource.tar.gz{, .log} |48.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfInFlight [GOOD] |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |48.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest |48.1%| [DL] $(B)/canondata_storage/1931696/9b1f78d9612780e865306f09a2040d8a0d826732/resource.tar.gz{, .log} |48.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TBlobStorageGroupInfoIterTest::WalkFailRealms [GOOD] >> TBlobStorageGroupInfoIterTest::PerRealmIterator [GOOD] >> TBlobStorageGroupInfoIterTest::IteratorForwardAndBackward [GOOD] >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] >> ValidationTests::CanDispatchByTag [GOOD] |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |48.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |48.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |48.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest |48.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark >> TBlobStorageGroupInfoBlobMapTest::CheckCorrectBehaviourWithHashOverlow [GOOD] >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |48.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::CreateDelete [GOOD] |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |48.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::HasReservedPaths [GOOD] |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp >> TQueueBackpressureTest::PerfTrivial [GOOD] |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |48.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |48.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |48.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::AdvancedCopyTo [GOOD] |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |48.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::MapType [GOOD] |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |48.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |48.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |48.2%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |48.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::CanDispatchByTag [GOOD] |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |48.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |48.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |48.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] |48.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfTrivial [GOOD] |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |48.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::PerRealmIterator [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |50.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/ut/ydb-core-fq-libs-metrics-ut |50.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] Test command err: [0:1:0:3:1]# 173 184 157 167 152 185 195 192 144 [0:1:1:1:1]# 189 195 192 171 157 161 167 155 196 [0:1:3:3:1]# 184 157 182 152 185 157 192 144 189 [0:1:3:4:0]# 148 154 155 158 194 160 156 163 140 [0:1:2:3:2]# 152 177 174 176 154 146 161 170 168 [0:1:1:2:1]# 157 167 152 189 195 192 171 157 161 [0:1:1:0:2]# 158 150 131 167 177 161 177 174 173 [0:1:3:0:1]# 161 155 171 196 154 167 184 157 182 [0:1:0:3:2]# 174 173 152 146 184 176 168 157 161 [0:1:2:2:0]# 163 140 161 148 162 159 168 178 190 [0:1:0:2:0]# 161 156 163 159 196 148 190 162 168 [0:1:3:2:1]# 152 185 157 192 144 189 161 155 171 [0:1:2:3:1]# 157 182 173 185 157 167 144 189 195 [0:1:3:1:2]# 157 161 170 131 190 158 161 178 167 [0:1:2:0:1]# 155 171 157 154 167 155 157 182 173 [0:1:3:0:2]# 131 190 158 161 178 167 173 152 177 [0:1:2:0:2]# 190 158 150 178 167 177 152 177 174 [0:1:2:4:1]# 154 167 155 157 182 173 185 157 167 [0:1:2:1:2]# 161 170 168 190 158 150 178 167 177 [0:1:2:4:2]# 178 167 177 152 177 174 176 154 146 [0:1:0:2:1]# 167 152 185 195 192 144 157 161 155 [0:1:0:0:0]# 190 162 168 174 148 154 177 158 194 [0:1:3:2:0]# 156 163 140 196 148 162 162 168 178 [0:1:1:0:1]# 171 157 161 167 155 196 182 173 184 [0:1:0:2:2]# 146 184 176 168 157 161 150 131 190 [0:1:1:0:0]# 178 190 162 155 174 148 160 177 158 [0:1:2:3:0]# 194 160 177 163 140 161 148 162 159 [0:1:2:4:0]# 154 155 174 194 160 177 163 140 161 [0:1:1:3:2]# 177 174 173 154 146 184 170 168 157 [0:1:2:1:1]# 144 189 195 155 171 157 154 167 155 [0:1:1:1:0]# 162 159 196 178 190 162 155 174 148 [0:1:1:3:1]# 182 173 184 157 167 152 189 195 192 [0:1:3:4:1]# 196 154 167 184 157 182 152 185 157 [0:1:1:4:2]# 167 177 161 177 174 173 154 146 184 [0:1:0:1:0]# 159 196 148 190 162 168 174 148 154 [0:1:3:4:2]# 161 178 167 173 152 177 184 176 154 [0:1:0:0:1]# 157 161 155 155 196 154 173 184 157 [0:1:1:4:0]# 155 174 148 160 177 158 140 161 156 [0:1:2:1:0]# 148 162 159 168 178 190 154 155 174 [0:1:2:0:0]# 168 178 190 154 155 174 194 160 177 [0:1:3:3:2]# 173 152 177 184 176 154 157 161 170 [0:1:0:4:0]# 174 148 154 177 158 194 161 156 163 [0:1:1:2:0]# 140 161 156 162 159 196 178 190 162 [0:1:0:1:1]# 195 192 144 157 161 155 155 196 154 [0:1:3:0:0]# 162 168 178 148 154 155 158 194 160 [0:1:3:1:1]# 192 144 189 161 155 171 196 154 167 [0:1:0:4:1]# 155 196 154 173 184 157 167 152 185 [0:1:2:2:1]# 185 157 167 144 189 195 155 171 157 [0:1:3:1:0]# 196 148 162 162 168 178 148 154 155 [0:1:2:2:2]# 176 154 146 161 170 168 190 158 150 [0:1:0:3:0]# 177 158 194 161 156 163 159 196 148 [0:1:3:3:0]# 158 194 160 156 163 140 196 148 162 [0:1:0:1:2]# 168 157 161 150 131 190 177 161 178 [0:1:3:2:2]# 184 176 154 157 161 170 131 190 158 [0:1:1:3:0]# 160 177 158 140 161 156 162 159 196 [0:1:1:2:2]# 154 146 184 170 168 157 158 150 131 [0:1:1:4:1]# 167 155 196 182 173 184 157 167 152 [0:1:1:1:2]# 170 168 157 158 150 131 167 177 161 [0:1:0:0:2]# 150 131 190 177 161 178 174 173 152 [0:1:0:4:2]# 177 161 178 174 173 152 146 184 176 mean# 166.6666667 dev# 15.11254078 |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |50.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |51.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] |51.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |51.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_arrow.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_hash.cpp |51.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_column_filter.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_program_step.cpp |51.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |51.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |51.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut |51.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |52.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |52.3%| [DL] $(B)/canondata_storage/1903280/b3ad5a45d76b516f66899551cc71277d7e559aab/resource.tar.gz{, .log} |52.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |52.6%| [DL] $(B)/canondata_storage/1775319/e518084f9a6f7560212f360435984047c6cdf17e/resource.tar.gz{, .log} |52.7%| [DL] $(B)/canondata_storage/1871102/597d6ee930787f14a7fd3507c37be2e17e206201/resource.tar.gz{, .log} |52.7%| [DL] $(B)/canondata_storage/1775059/502776df8bd4c104347b692d9cedc4d35048bfbb/resource.tar.gz{, .log} |52.7%| [DL] $(B)/canondata_storage/1871102/41439821d7c11fdff3fabc8b28b32c29a0af320c/resource.tar.gz{, .log} |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_group/main.cpp |52.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_drop.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/commands.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/main.cpp |52.9%| [DL] $(B)/canondata_storage/1924537/99b9f14a8cf47c7ed8fe38921521b6e743c7838e/resource.tar.gz{, .log} |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_query.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare_scheme.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.global.a |53.0%| [TA] $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |53.1%| [TA] $(B)/ydb/core/config/tools/protobuf_plugin/ut/test-results/unittest/{meta.json ... results_accumulator.log} |53.4%| [DL] $(B)/canondata_storage/1920236/3d3a0652b04204362f293bde5bd1db28f8645dd7/resource.tar.gz{, .log} |54.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |54.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp >> Metrics::EmptyIssuesList [GOOD] >> Metrics::SeveralTopItems [GOOD] |55.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part1/ydb-library-yql-tests-sql-hybrid_file-part1 |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> Metrics::OnlyOneItem [GOOD] >> Metrics::CombineSubItems [GOOD] |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |56.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_local_kmeans.cpp >> Metrics::SeveralSubItems [GOOD] |57.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service >> Metrics::MoreThanFiveItems [GOOD] |57.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::SeveralTopItems [GOOD] |57.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::EmptyIssuesList [GOOD] |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/top_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::OnlyOneItem [GOOD] |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |58.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::CombineSubItems [GOOD] |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest |58.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::SeveralSubItems [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |59.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::MoreThanFiveItems [GOOD] |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |61.5%| [DL] $(B)/canondata_storage/1130705/757d10cb32f3f15562b523da2252a50eeaba7592/resource.tar.gz{, .log} |61.7%| [DL] $(B)/canondata_storage/1903280/afac309a2db105c8b5b6044c02f5c6755972bae8/resource.tar.gz{, .log} |62.1%| [DL] $(B)/canondata_storage/1917492/070995acd2c8a2466496cea9a294777a34e981c8/resource.tar.gz{, .log} |62.2%| [DL] $(B)/canondata_storage/1936842/baf80494bd9561ab5e7825bd062823ca1bfa64ba/resource.tar.gz{, .log} |62.2%| [DL] $(B)/canondata_storage/1871002/74160783fcdc3a479258b5a4bea49b3bd257f296/resource.tar.gz{, .log} |62.3%| [DL] $(B)/canondata_storage/1809005/2c57f3023075d58f8075081492e1ebcaa65b94b3/resource.tar.gz{, .log} |62.3%| [DL] $(B)/canondata_storage/1781765/eaf8b4f54dbd9300a96708f39f699380d90b82a9/resource.tar.gz{, .log} |62.3%| [DL] $(B)/canondata_storage/1942525/74bf94126143e9723be7f582af97f7e4178aca36/resource.tar.gz{, .log} |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |63.1%| [DL] $(B)/canondata_storage/1937027/d6800001b04dd48ddf438b36e325ad1cf97cb1c0/resource.tar.gz{, .log} |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |63.9%| [DL] $(B)/canondata_storage/1936947/79f6f05a619e566dcfd3200df680cadf79a1ceda/resource.tar.gz{, .log} |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |64.2%| [DL] $(B)/canondata_storage/1937367/6af906d8e8515951055311e09244912c4095ac7f/resource.tar.gz{, .log} |64.2%| [DL] $(B)/canondata_storage/1936842/fcb47a0e0def68bc18f22f65f762c31aed818052/resource.tar.gz{, .log} |64.2%| [DL] $(B)/canondata_storage/1871182/a5143e37b158b01c1abee6d3aa96332d56e08679/resource.tar.gz{, .log} |64.4%| [DL] $(B)/canondata_storage/1942671/c9c83131b391b0a13b103155f61dd4f9a78f6ce6/resource.tar.gz{, .log} |64.4%| [DL] $(B)/canondata_storage/1817427/fe4f0f55fa639e868048e3677847ba676115c30c/resource.tar.gz{, .log} |64.7%| [DL] $(B)/canondata_storage/1942173/244e633354167f84bd1643b15c8646303e7785de/resource.tar.gz{, .log} |64.7%| [DL] $(B)/canondata_storage/1773845/38a5e42f094acedb001785c46756100166f2d154/resource.tar.gz{, .log} |64.7%| [DL] $(B)/canondata_storage/1597364/81324d44a583c1e929d9afce72a977f6bafb04dd/resource.tar.gz{, .log} |64.8%| [DL] $(B)/canondata_storage/1031349/9307f22e787d6672ca7ce676234156a5d522b352/resource.tar.gz{, .log} |64.8%| [DL] $(B)/canondata_storage/1924537/b026db360d04d5258e7809234e0e43766eca50c6/resource.tar.gz{, .log} |64.8%| [DL] $(B)/canondata_storage/1936947/263a4aa7d0f3b612765b8f33f9c77526e5f5aa78/resource.tar.gz{, .log} |64.8%| [DL] $(B)/canondata_storage/1889210/10a2f6c7c73e83596767c28aa17294d6794df9f2/resource.tar.gz{, .log} |64.8%| [DL] $(B)/canondata_storage/1936273/675c4946b21871cc600565c50ebb14d6b242301d/resource.tar.gz{, .log} |64.9%| [DL] $(B)/canondata_storage/1936273/0d6f42ca322b97862bb9eb8744733aceed4802ba/resource.tar.gz{, .log} |64.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |65.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_log_cache_ut.cpp |65.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_crypto_ut.cpp |64.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |65.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |65.0%| [DL] $(B)/canondata_storage/1031349/8221fc254d5fbec8ed6f97695721bdf1adc20225/resource.tar.gz{, .log} |65.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |65.1%| [DL] $(B)/canondata_storage/1599023/5ebe01f73e8e346a61b42aadb75da1a518ed1660/resource.tar.gz{, .log} |65.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |65.2%| [TA] $(B)/ydb/core/fq/libs/metrics/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_context.cpp |65.2%| [DL] $(B)/canondata_storage/1871182/7949078f204bbdbafbcf0efaa8e1e8cbc661f9ab/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1920236/e3a40a0f1b28089f5bcc00a85b3176919dc509ac/resource.tar.gz{, .log} |65.3%| [DL] $(B)/canondata_storage/1900335/7d71d797a341c73a27d37b4ad44eff7a6300965d/resource.tar.gz{, .log} |65.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |65.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |65.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |65.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |65.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |65.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |65.7%| [DL] $(B)/canondata_storage/1936947/2bc3e51a8b9883f1a1d8b98124fe921cba1fca45/resource.tar.gz{, .log} |65.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |65.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |65.8%| [DL] $(B)/canondata_storage/937458/0064e8ff05bc3832d392001e1067a4b8e592ee57/resource.tar.gz{, .log} |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |65.9%| [DL] $(B)/canondata_storage/1942671/174666f6b0943a6e3b50e2853a026a9e5306df50/resource.tar.gz{, .log} |66.1%| [DL] $(B)/canondata_storage/1937001/a40201d9703e1f0a566dafa26fbe3ce28327ccbb/resource.tar.gz{, .log} |66.3%| [DL] $(B)/canondata_storage/1942671/157db22ce38fb6cce530ef150bd605411e8ebf46/resource.tar.gz{, .log} |66.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/yt/provider/libproviders-yt-provider.a |66.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/yt/provider/libproviders-yt-provider.a |66.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |66.4%| [DL] $(B)/canondata_storage/1775059/8f09ac6b5b3163a6b48a01cc498b9df38d463d0a/resource.tar.gz{, .log} |66.5%| [DL] $(B)/canondata_storage/1923547/78a9f0a15afb28041ed1c9bf17e22144af9d87d0/resource.tar.gz{, .log} |66.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/actors/ut/yql_arrow_push_down_ut.cpp |66.6%| [DL] $(B)/canondata_storage/1942173/cdd1c55686f268ec709b7b06494ceedf8dba76de/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1784117/562608e5eb2c9a9b9076bc8caa84f8c27bb8d804/resource.tar.gz{, .log} |66.7%| [DL] $(B)/canondata_storage/1903280/927f0adda78bba191400c48aa84923902c441ddf/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1784117/b7687d9809a69c860dddf82e25a7baf09d85ad87/resource.tar.gz{, .log} |66.9%| [DL] $(B)/canondata_storage/1936842/f15786b1a2c120b062d233a3acf6481caeba1cc1/resource.tar.gz{, .log} |67.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/tpch/tpch |67.2%| [DL] $(B)/canondata_storage/1942671/6d2caf417069cf56304b9a87bee6e46557c6040d/resource.tar.gz{, .log} |67.3%| [DL] $(B)/canondata_storage/937458/f96a7bf5332a169a8ce992c48c7edc92ee1f8d6f/resource.tar.gz{, .log} |67.4%| [DL] $(B)/canondata_storage/1942173/a466b507289bc354fe44bf0c86d9f81646344db8/resource.tar.gz{, .log} |67.5%| [DL] $(B)/canondata_storage/1871002/5a5d63ea223b3a8a0646cdb551c6e739db3ec1b5/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1903885/86384881f884d02499a1a1c2d428dbffe5562509/resource.tar.gz{, .log} |67.6%| [DL] $(B)/canondata_storage/1937027/0d4e7463a2cca915bc28cdfa63111f875afc615b/resource.tar.gz{, .log} |67.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |67.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |67.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |67.8%| [DL] $(B)/canondata_storage/1881367/550f3b79ddf2520d4c20e67e83a71edceeb0c664/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1917492/53254226c3bcc22f2f64563cade92bbf7d103511/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1903280/ee99ccd66f40b93152bfac693040f9212fb7c86f/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1781765/f3b4483a271a53c7042af53dd89e7eaa7933954c/resource.tar.gz{, .log} |67.8%| [DL] $(B)/canondata_storage/1903885/ca691fdd45e5fd4ff7f0de337a847a572abfc30b/resource.tar.gz{, .log} |67.9%| [DL] $(B)/canondata_storage/1597364/50672d9edb4f5c65065b9ef1e197812ffea3d4ab/resource.tar.gz{, .log} |67.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |67.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |67.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/c0f0d29b6c33e7f05e57d209d8.auxcpp |67.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/_c0f0d29b6c33e7f05e57d209d8.yasm |67.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |68.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |68.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |67.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |68.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |68.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |68.1%| [DL] $(B)/canondata_storage/1775059/f8f056a0190a716df840d5350581b5176f1620e0/resource.tar.gz{, .log} |68.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |68.4%| [DL] $(B)/canondata_storage/1903280/24119f69ce7a44754a5937aa5bfe43a55ebc0544/resource.tar.gz{, .log} |68.4%| [DL] $(B)/canondata_storage/1916746/2ce040fc217c9c67c9501b51bd145ed612d2e4f7/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1942173/61b2e01110faecf0f5c4f213e07ca9a795ac4758/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937027/06ef7ef6dee3ee697013fd133a8e8a843e5f5de9/resource.tar.gz{, .log} |68.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |68.7%| [DL] $(B)/canondata_storage/1597364/8d657000086cecf224bc72e90af1c7594765fc47/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937150/4e9b56e1d1bc0e96e8da0e9d08a0b6ac9492ef3a/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1871102/76ef40ba4c47f7efe65d7bad7234c744fd5830db/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1889210/e83d0680db32f18ae0fc05b1d22a19d24deaf2e2/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1936947/a4dfb10814524145ff4772935d09e5a668f36c7c/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1925842/e57e96bc0849393d23e4c2749e610a2eea073ec0/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1599023/cef1288a3a3f22f12b4d2a0b8bd80a0e7250701b/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1925842/3826c71d0a906529f5506f86d9d3c6a16d5aef14/resource.tar.gz{, .log} |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |69.0%| [DL] $(B)/canondata_storage/1937027/911c4fb8122dc5d7733b150e068edcf272fa4a83/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1597364/e093effc5b67d50506b993781809e66ea91b4bca/resource.tar.gz{, .log} |69.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |69.2%| [DL] $(B)/canondata_storage/1937001/441781d594b64769bedacb579efb911f22209130/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1881367/ad25b910d63584c57089eae59d027766b4eaa76c/resource.tar.gz{, .log} |69.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |69.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |69.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |69.4%| [DL] $(B)/canondata_storage/1889210/a01034cad0321e484aa98a1919f9803e0731c5c8/resource.tar.gz{, .log} |69.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |69.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |69.5%| [DL] $(B)/canondata_storage/1942100/d8dcb117190dd26bf18428ea90360b8472802f4a/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1871182/5c4b689d6652e455bd5534102389cc50193dc744/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1942278/5eae7303204cecb578258cdbb0730e6f301fe9ed/resource.tar.gz{, .log} |69.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |69.9%| [DL] $(B)/canondata_storage/1809005/8d9f81d158800caf319f48dbee6f4ac4868563ac/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1781765/4a8585d94943c333a58ed548ecb65ad34b52755f/resource.tar.gz{, .log} >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] |70.2%| [DL] $(B)/canondata_storage/1031349/f5895ed6e69da88b0fc4924a0a0c6ab4bb0b1724/resource.tar.gz{, .log} |70.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |70.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |70.2%| [DL] $(B)/canondata_storage/1600758/e20b72387b5160905fc6ab9c9b1db35181c0aab8/resource.tar.gz{, .log} |70.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |70.3%| [DL] $(B)/canondata_storage/1871002/e16c2456d2585dc5260dae8153efad865802ebff/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1937424/8993c081ff6be9c32469da328d475149e2821dd0/resource.tar.gz{, .log} |70.3%| [DL] $(B)/canondata_storage/1900335/e1d65d264295642feda5f8ea8dd5531cef75bc40/resource.tar.gz{, .log} |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |70.4%| [DL] $(B)/canondata_storage/1923547/c233f36376ececec4d4b8799ba0c97c5a0a0b109/resource.tar.gz{, .log} |70.5%| [DL] $(B)/canondata_storage/1942100/0c8d8b78baeb89dd0643b1eaf6b779508990e6bb/resource.tar.gz{, .log} |70.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |70.7%| [DL] $(B)/canondata_storage/1600758/68f814e1b8c48a7c8c799660d583bea56db0cae3/resource.tar.gz{, .log} |70.7%| [DL] $(B)/canondata_storage/1931696/9060622cf11e385e8664f10b6994b4d7cd39f3ee/resource.tar.gz{, .log} |70.8%| [DL] $(B)/canondata_storage/1689644/dfcc9726c89ffc827ee88d69c2a34d897f6c9a22/resource.tar.gz{, .log} |71.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] |71.0%| [DL] $(B)/canondata_storage/1920236/b75e9728ed12152b2d9ddc60dd94c08dfc4796e2/resource.tar.gz{, .log} |71.0%| [DL] $(B)/canondata_storage/1936273/16a5945b6fd0dd622c0f5a29d4b7818e35b19526/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1777230/e4dcbc908eebc2925492abf82160d5dc404358d6/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1942100/070d287587bd5d2ed4158069a020e4772af81216/resource.tar.gz{, .log} |71.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/blobsan/blobsan |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |71.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |71.2%| [DL] $(B)/canondata_storage/1871182/a433ef127601aa9f74dedeb4efed9b9f728cdc6c/resource.tar.gz{, .log} |71.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yt/yt/client/libyt-yt-client.a |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/blobsan/main.cpp |71.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_build_index.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/mrrun/mrrun.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |71.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |71.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |71.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |71.9%| [DL] $(B)/canondata_storage/995452/59cb21feb51bcd4aaf002804abbfbb4a05ffe65f/resource.tar.gz{, .log} |71.9%| [DL] $(B)/canondata_storage/1946324/4e55210d05969637e5668c50c7fbdc1a61f108f5/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1924537/dd93ed409770f0ece7fecd435f3849c139b141e9/resource.tar.gz{, .log} |72.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |72.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |72.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part2/ydb-library-yql-tests-sql-dq_file-part2 |72.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |72.1%| [DL] $(B)/canondata_storage/1925821/cfff423c8d7238e8abebca9535bd33e932257ec1/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1942415/9b0f428523034738a81372e143ed76e0d1ffdfad/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1784117/035863e9d5dcd47a63c8d359ae9a30ba4ceaf67b/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1781765/62947eb159726b899d4d2af791e768b5990f0b7e/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1937027/bae649e6896209dbfb01462c67cd54c0f971d262/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/995452/094991b6c0cfd5ed30c429b04d69b8af3c10eec4/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1031349/6c70521322fc43f752ef6b89f8667fefd006af8b/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1937424/d5d9e5b42a440866dd2b2f9da0c4923a86da8bea/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1689644/763d9bd4404423a24deab02585b884f08692c90b/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1931696/3b66a4b8ee9789607df97fd1b710d3ca890dd9b2/resource.tar.gz{, .log} |72.1%| [DL] $(B)/canondata_storage/1923547/320f607d9e9c19a93a835d3183938f1fba6dd52c/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1937027/260442135926ff6c9957da5c2478f83f49087cc4/resource.tar.gz{, .log} |72.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |72.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |72.2%| [DL] $(B)/canondata_storage/1031349/44024c82812375f0173f3b2781471a0977b071db/resource.tar.gz{, .log} |72.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |72.2%| [DL] $(B)/canondata_storage/1773845/27425423327af8b10415a6bbb80a5aec7c55b13f/resource.tar.gz{, .log} |72.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |72.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |72.2%| [DL] $(B)/canondata_storage/1781765/e399fc9631f4d96fcb5c717860c01dc3564d9b6c/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1599023/5228a2529d3a722c804df96464cc1cfa228876a1/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1784117/523c2e2d47e3427f1204fa18ca2d5ade41618bc0/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1942173/f28a221bc60f2450a1edbf8db6e85651a9c3bd79/resource.tar.gz{, .log} |72.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |72.2%| [DL] $(B)/canondata_storage/1942525/7de1fbc5f1b7918aec7094b41384bc4c27fc7953/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1925842/c1066aab7478fbe8c5b14337f793c111997cc324/resource.tar.gz{, .log} |72.2%| [DL] $(B)/canondata_storage/1946324/be96ad9cdb7bebb78c68ecc4a7b291982b0e9f1e/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1775319/2c692d91178bcc9774270c84072137fd625262ae/resource.tar.gz{, .log} |72.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydb/ydb |72.2%| [DL] $(B)/canondata_storage/1600758/32cfdeb8c6377a2e7e62c6c4adbb95f25af7669b/resource.tar.gz{, .log} |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |72.3%| [DL] $(B)/canondata_storage/1809005/4d456bdd41cc8761526df95e5b61e959b1dfc12f/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1871182/0dad491c929525221cf344b9f6f54cd14f2ce4f0/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1920236/9f586e65bb81318babf436f4458de22c9257b1d1/resource.tar.gz{, .log} >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] |72.3%| [DL] $(B)/canondata_storage/1942525/68adc93267fab0086b1faf825d05122058d5f469/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1773845/c255bb2163f40b1ca08f81b23e10624ae1969605/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1923547/3423d2190488ae10821f2c300e70cd1b3ed9fc6d/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1936842/aecf4970df1ec06496312636476de0e7b19c3ebc/resource.tar.gz{, .log} |72.3%| [DL] $(B)/canondata_storage/1937001/cce9b26ef7e344a7dad40ff2fee61fd47fb80a21/resource.tar.gz{, .log} |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |72.3%| [DL] $(B)/canondata_storage/1773845/58669ef67391607d0bea8c241fd7cfc291b1e4a8/resource.tar.gz{, .log} |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |72.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yt/yt/core/libyt-yt-core.a |72.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |72.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part10/ydb-library-yql-tests-sql-dq_file-part10 |72.4%| [DL] $(B)/canondata_storage/1773845/ddb0deeaff9c015244a4452e588027cb4567b1a2/resource.tar.gz{, .log} >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |72.3%| [DL] $(B)/canondata_storage/1889210/c48249ef01b032757b4c9d64577e12744571e6ff/resource.tar.gz{, .log} |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |72.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |72.4%| [DL] $(B)/canondata_storage/1903280/f275d6f071715007b59c0fcf1ce9a3d4eafb9599/resource.tar.gz{, .log} |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |72.4%| [DL] $(B)/canondata_storage/937458/3ce9d3f90d17a09aa182a1ae8e08f2f065219fab/resource.tar.gz{, .log} |72.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] |72.4%| [DL] $(B)/canondata_storage/1871102/cedf8264a1905131c6de15c01a397082d1677da3/resource.tar.gz{, .log} |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |72.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] Test command err: testing erasure none main# 0 main# 1 Checked 2 cases, took 0 us testing erasure block-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 149250 us testing erasure mirror-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 Checked 512 cases, took 40 us testing erasure block-2-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 4096 cases, took 524 us testing erasure mirror-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 Checked 64 cases, took 9 us testing erasure block-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 32768 cases, took 5382 us testing erasure stripe-2-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 4096 cases, took 448 us |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |72.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |72.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |72.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/mrrun/mrrun |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |72.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |72.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/79b13353271c8cfe46ea4b9f1e.auxcpp >> TBtreeIndexTPartLarge::SmallKeys1GB |72.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp >> test_generator.py::TestTpcdsGenerator::test_s1_state >> test_init.py::TestTpchInit::test_s1_s3 >> test_init.py::TestTpchInit::test_s1_column_decimal |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp >> test_generator.py::TestTpchGenerator::test_s1_parts >> test_generator.py::TestTpchGenerator::test_s1 >> test_init.py::TestTpcdsInit::test_s1_s3 >> test_init.py::TestClickbenchInit::test_s1_s3 |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |72.3%| [TA] $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |72.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/_79b13353271c8cfe46ea4b9f1e.yasm >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |72.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> test_init.py::TestTpcdsInit::test_s1_s3 [GOOD] >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] >> test_generator.py::TestTpchGenerator::test_s1_state |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp >> test_init.py::TestTpchInit::test_s1_row >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb >> test_init.py::TestClickbenchInit::test_s1_s3 [GOOD] >> test_init.py::TestTpchInit::test_s1_row [GOOD] |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |72.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |72.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/tests/tpch/tpch |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column >> TYardTest::TestChunkReadRandomOffset >> test_init.py::TestTpcdsInit::test_s1_column [GOOD] >> TPDiskTest::TestAbstractPDiskInterface [GOOD] >> TPDiskTest::TestChunkWriteRelease |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |72.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |72.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/meta/bin/mvp_meta |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp >> test_init.py::TestClickbenchInit::test_s1_column >> test_generator.py::TestTpcdsGenerator::test_s1 >> TYardTest::TestWholeLogRead >> TBlobStoragePDiskCrypto::TestMixedStreamCypher >> test_generator.py::TestTpcdsGenerator::test_s1_parts >> TYardTest::TestLogWriteRead >> TYardTest::TestInit >> test_init.py::TestClickbenchInit::test_s1_column [GOOD] |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_switchable_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |72.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] >> TBlobStoragePDiskCrypto::TestMixedStreamCypher [GOOD] >> TBlobStoragePDiskCrypto::TestInplaceStreamCypher [GOOD] >> TBlockDeviceTest::TestDeviceWithSubmitGetThread >> test_init.py::TestTpchInit::test_s1_column >> TPDiskUtil::PayloadParsingTest [GOOD] >> TPDiskUtil::SectorRestorator [GOOD] >> TPDiskUtil::SectorRestoratorOldNewHash [GOOD] >> TPDiskUtil::SectorPrint [GOOD] >> TPDiskUtil::TChunkIdFormatter [GOOD] >> TPDiskUtil::TOwnerPrintTest [GOOD] >> TPDiskUtil::TChunkStateEnumPrintTest [GOOD] >> TPDiskUtil::TIoResultEnumPrintTest [GOOD] >> TPDiskUtil::TIoTypeEnumPrintTest [GOOD] >> TPDiskUtil::TestNVMeSerial [GOOD] >> TPDiskUtil::TestDeviceList [GOOD] >> TPDiskUtil::TestBufferPool |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tests/tpch/tpch |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |72.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |72.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_clock_pro_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp >> TYardTest::TestLogWriteRead [GOOD] >> TYardTest::TestLogWriteReadMedium >> test_init.py::TestTpchInit::test_s1_column [GOOD] >> TYardTest::TestWholeLogRead [GOOD] >> TYardTest::TestSysLogReordering >> TSectorMapPerformance::TestSSD1960GBRead100MBOnFirstSector |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/meta/bin/mvp_meta |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp >> TBlockDeviceTest::TestDeviceWithSubmitGetThread [GOOD] >> TBlockDeviceTest::TestWriteSectorMapAllTypes >> test_init.py::TestTpcdsInit::test_s1_row |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |72.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] |72.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp >> test_init.py::TestTpcdsInit::test_s1_column_decimal >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] >> TYardTest::TestLogWriteReadMedium [GOOD] >> TYardTest::TestLogWriteReadMediumWithHddSectorMap |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |72.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test |72.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpcdsInit::test_s1_s3 [GOOD] |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp >> TYardTest::TestInit [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column_decimal [GOOD] >> TYardTest::TestInitOnIncompleteFormat >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb >> TPDiskTest::TestThatEveryValueOfEStateEnumKeepsItIntegerValue [GOOD] >> TPDiskTest::TestPDiskActorErrorState >> TLogCache::Simple [GOOD] >> TLogCache::EraseRangeSingleMinElement [GOOD] >> TLogCache::EraseRangeSingleMidElement [GOOD] >> TLogCache::EraseRangeSingleMaxElement [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunk |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reshuffle_kmeans.cpp |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |72.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb [GOOD] |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |72.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpcdsInit::test_s1_column [GOOD] |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp >> TPDiskTest::TestPDiskActorErrorState [GOOD] >> TPDiskTest::TestPDiskActorPDiskStopStart |72.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpchInit::test_s1_row [GOOD] >> TPDiskTest::TestPDiskActorPDiskStopStart [GOOD] |72.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg >> TPDiskTest::TestPDiskOwnerRecreation |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp >> TYardTest::TestInitOnIncompleteFormat [GOOD] >> TYardTest::TestInitOwner >> TYardTest::TestLogWriteReadMediumWithHddSectorMap [GOOD] >> TYardTest::TestLogWriteReadLarge |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |72.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/async_io/ut/ut_helpers.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/async_io/ut/dq_solomon_write_actor_ut.cpp |72.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpchInit::test_s1_column [GOOD] |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk >> TYardTest::TestInitOwner [GOOD] >> TYardTest::TestIncorrectRequests |72.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb [GOOD] |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp >> TYardTest::TestLogWriteReadLarge [GOOD] >> TYardTest::TestLogWriteCutEqual |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/trace_ut.cpp >> TPDiskUtil::TestBufferPool [GOOD] >> TPDiskUtil::SectorMap >> TYardTest::TestIncorrectRequests [GOOD] >> TYardTest::TestEmptyLogRead |72.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/libcpp-client-ydb_federated_topic.a |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/local_partition_ut.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/describe_topic_ut.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/basic_usage_ut.cpp |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/topic_to_table_ut.cpp >> TPDiskUtil::SectorMap [GOOD] >> TPDiskUtil::SectorMapStoreLoadFromFile [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnFirstSector >> TYardTest::TestEmptyLogRead [GOOD] >> TYardTest::TestLogContinuityPersistence |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |72.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/impl/libclient-ydb_federated_topic-impl.a |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/basic_usage_ut.cpp |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> TBlockDeviceTest::TestWriteSectorMapAllTypes [GOOD] >> TBlockDeviceTest::WriteReadRestart |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> TYardTest::TestLogContinuityPersistence [GOOD] >> TYardTest::TestLogContinuityPersistenceLarge |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |72.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |72.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb [GOOD] >> TPDiskTest::TestChunkWriteRelease [GOOD] >> TPDiskTest::TestLogWriteReadWithRestarts |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |72.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> TPDiskTest::TestPDiskOwnerRecreation [GOOD] >> TPDiskTest::TestPDiskOwnerRecreationWithStableOwner |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/common_ut.cpp |72.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |72.7%| RESOURCE $(sbr:770480022) - 0 bytes |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |72.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |72.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |72.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |72.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |72.7%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |72.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpStringCells [GOOD] |72.7%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut >> PgTest::DumpIntCells |72.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |72.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |72.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |72.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |72.7%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/{recipes-docker_compose-bin.final.pkg.fake ... library/recipes/docker_compose/bin/docker-compose} >> PgTest::DumpIntCells [GOOD] >> TYardTest::TestLogContinuityPersistenceLarge [GOOD] >> TYardTest::TestHttpInfo |72.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |72.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpStringCells [GOOD] |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> TYardTest::TestHttpInfo [GOOD] >> TYardTest::TestHttpInfoFileDoesntExist >> TSectorMapPerformance::TestSSD1960GBRead100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite100MBOnFirstSector |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |72.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |72.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> TYardTest::TestHttpInfoFileDoesntExist [GOOD] >> TYardTest::TestFirstRecordToKeep >> TSectorMapPerformance::TestSSD1960GBWrite100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBRead1000MBOnFirstSector >> TPDiskTest::TestPDiskOwnerRecreationWithStableOwner [GOOD] >> TPDiskTest::TestPDiskManyOwnersInitiation |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |72.7%| [DL] $(B)/canondata_storage/1871182/dd4b9c2ec136d95ef56e810f40fc95467b04ecae/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1880306/4d5d293606e8de46c7ff73818ff99e0fc57be13f/resource.tar.gz{, .log} |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |72.7%| [DL] $(B)/canondata_storage/1936947/bdfafed4d47cdedd18d3728ba1a3488ab05e7c41/resource.tar.gz{, .log} |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |72.6%| [DL] $(B)/canondata_storage/1871002/7e2607acdc67e4a5b8020e384ddfda98482a3dd6/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1936842/34312fe123c805aa28f5163a73c7f09252f8e0a4/resource.tar.gz{, .log} |72.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |72.7%| [DL] $(B)/canondata_storage/1809005/2a59475dc877549ac4197a291aacd77d92f24ab4/resource.tar.gz{, .log} |72.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |72.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |72.7%| [DL] $(B)/canondata_storage/1936947/fbd52f5ef5dc03aa0434d075e0a1299ea39b26ed/resource.tar.gz{, .log} >> TYardTest::TestFirstRecordToKeep [GOOD] >> TYardTest::TestHugeChunkAndLotsOfTinyAsyncLogOrder |72.6%| [DL] $(B)/canondata_storage/1871102/9387e6320639d466974a3bb05a9090c015e83fc2/resource.tar.gz{, .log} |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |72.6%| [DL] $(B)/canondata_storage/1599023/a09dc141000b40ce3e6095c62dea100bc7280e22/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1937001/4aaa35652a4aadbbe10797f226b61d7445c2e45e/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1923547/7035195682d1d389b130d309e647c0e6bae0996a/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1937429/e11799bc6b03b95f687825951895ae651115cd1d/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1937424/f54290c1c9e8b8c01bdab19c1d6ef1f76de15d9c/resource.tar.gz{, .log} |72.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest |72.6%| [DL] $(B)/canondata_storage/1942415/668fd150f695e80692dda505ef688dce77166cc6/resource.tar.gz{, .log} |72.7%| [DL] $(B)/canondata_storage/1031349/e7008f224fdaa1b7d924871c4b168a87665db0a9/resource.tar.gz{, .log} >> TPDiskTest::TestLogWriteReadWithRestarts [GOOD] >> TPDiskTest::TestLogSpliceNonceJump |72.6%| [DL] $(B)/canondata_storage/1900335/3723346a2da176c5ee65dcf2ea559b19068a6488/resource.tar.gz{, .log} |72.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpIntCells [GOOD] |72.6%| [DL] $(B)/canondata_storage/1600758/7b03b4e937e7ce456df18ab698865618521526f1/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1916746/d19e7aec784b7578ca293f03447ef038017b522b/resource.tar.gz{, .log} |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut >> TPDiskTest::TestPDiskManyOwnersInitiation [GOOD] >> TPDiskTest::TestVDiskMock |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |72.6%| [DL] $(B)/canondata_storage/1942278/f9f1f95ac19bf87a37947afe2c4d14ccf42248f1/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1880306/c540459cb4387b8d7c2ab1f6a0dd2118f05c9809/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1130705/eac262cda27c06132baae4257bc3accb70d14812/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1889210/796baf28896eb5aaad8828a0b6000e7d17563447/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1130705/ea4630684a8ca3f006e81d74f06282c0ca946402/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1937492/50fb7b13e9ba844f85b4d3655e8e191f40eff050/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1847551/d239124b206d17cdfeda5a30fdaf3832c020b2f2/resource.tar.gz{, .log} |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> TFlatDatabasePgTest::BasicTypes |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |72.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |72.6%| [DL] $(B)/canondata_storage/1942525/43a9b27bd71c75014ae789d65d577314e37262fc/resource.tar.gz{, .log} |72.6%| [DL] $(B)/canondata_storage/1900335/a93c9a2bd84a19d5ed7b813ddf3960f383b3d67c/resource.tar.gz{, .log} |72.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |72.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |72.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |72.5%| [DL] $(B)/canondata_storage/1775059/ab56aab54b522dc673269d54a69440f25fd25d9d/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1903885/d8ef1e8fb573d41016d2ce617a6eb2b955cadf60/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1871182/e02f7cf2d403eeff46ab74696026e36be4ded9a9/resource.tar.gz{, .log} |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> TPDiskTest::TestVDiskMock [GOOD] >> TPDiskTest::TestRealFile >> TFlatDatabasePgTest::BasicTypes [GOOD] |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |72.5%| [DL] $(B)/canondata_storage/1781765/a18a0c7b9f1f9ff355c6f36700ff285ecfd77f4b/resource.tar.gz{, .log} |72.5%| [DL] $(B)/canondata_storage/1936842/557f7ab03608bf231a6bd2276c94b8a7ee4523b0/resource.tar.gz{, .log} |72.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |72.1%| [DL] $(B)/canondata_storage/1809005/6cb24cd095b4f0501ec39c6352d23fb4a3ba9958/resource.tar.gz{, .log} |72.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |72.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |72.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |72.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |72.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> ArrowTest::BatchBuilder |71.7%| [DL] $(B)/canondata_storage/1031349/6832f9241abb81a3c19acf956e8e9e9ed37578ef/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1773845/c0a795fb831b832e6e40aa0f1b1a1e5b60134f81/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1777230/b8c638a79c26a4c14c582731ad5b06fe98478bb4/resource.tar.gz{, .log} |71.7%| [DL] $(B)/canondata_storage/1936947/960a823a7074bb0a4fc6829f35dc9035ea62bcf1/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1942415/8d010130e1284b2e3b1f4a934fc3768cc8409e69/resource.tar.gz{, .log} |71.8%| [DL] $(B)/canondata_storage/1871102/0805bf7d763724d22f5600786ba3f36973f091d1/resource.tar.gz{, .log} |71.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest >> ArrowTest::BatchBuilder [GOOD] |71.4%| [DL] $(B)/canondata_storage/1777230/13a939531efb4067a14f4e097b82c407f6c484fc/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1900335/8eba31ae2dcfd9245ad9327a1ac3ca89667336e2/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1923547/82c5214ca1ac24aea9514c0da9d0fdf3a36d3b61/resource.tar.gz{, .log} |71.4%| [DL] $(B)/canondata_storage/1937367/be35feae41cdf3f87ad7ea8d4ce18ba9629b9c4f/resource.tar.gz{, .log} |71.1%| [DL] $(B)/canondata_storage/1936273/1aa122f73bdfc2d7c7ecc6254b0a10b2df910380/resource.tar.gz{, .log} |71.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |71.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest >> ArrowTest::ArrowToYdbConverter [GOOD] >> ArrowTest::KeyComparison [GOOD] >> ArrowTest::SortWithCompositeKey [GOOD] >> ArrowTest::MergingSortedInputStream [GOOD] >> ArrowTest::MergingSortedInputStreamReversed [GOOD] >> ArrowTest::MergingSortedInputStreamReplace [GOOD] >> ColumnFilter::MergeFilters [GOOD] >> ColumnFilter::CombineFilters [GOOD] >> Dictionary::Simple >> ThrottlerControlTests::Overflow_1 [GOOD] |70.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut >> TPDiskTest::TestLogSpliceNonceJump [GOOD] >> SamplingControlTests::EdgeCaseLower [GOOD] |70.4%| [DL] $(B)/canondata_storage/1847551/682cc73a2d58def116940ca081e758391e0f27cb/resource.tar.gz{, .log} >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step >> ThrottlerControlTests::Overflow_2 [GOOD] |70.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |70.0%| [DL] $(B)/canondata_storage/1942525/b9299e5debda55b6ec2d51671d37c03bdf672e59/resource.tar.gz{, .log} |70.0%| [DL] $(B)/canondata_storage/1871002/fb6fb37c565974a6f0c497e8b3e58f6b5bf320b2/resource.tar.gz{, .log} >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step >> SamplingControlTests::EdgeCaseUpper [GOOD] |70.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest >> TFlatDatabasePgTest::BasicTypes [GOOD] >> TPDiskTest::TestMultipleLogSpliceNonceJump >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step |69.7%| [DL] $(B)/canondata_storage/1931696/8382830b676a61af36d1344910d51cd1bf39f3ef/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1777230/dd70c380673122cd500d799c70016541eabd320a/resource.tar.gz{, .log} |69.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |69.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |69.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |69.7%| [DL] $(B)/canondata_storage/1936273/3aa418c24e77eb510bbef390a883a810189ad7de/resource.tar.gz{, .log} |69.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest >> ThrottlerControlTests::LongIdle [GOOD] >> ThrottlerControlTests::Simple [GOOD] >> FormatTimes::ParseDuration [GOOD] |69.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |69.5%| [DL] $(B)/canondata_storage/1937150/6b5a12edf907ec102b80b96b177733ca6ed1ded6/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1937001/0a62c6e91e29cdeb1135736130cced7fc45c219d/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1880306/c56e2bb8307f2239f8acf535a621c40b5a08a363/resource.tar.gz{, .log} >> SamplingControlTests::Simple [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnFirstSector [GOOD] >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] |69.4%| [DL] $(B)/canondata_storage/1942278/bbfba5da727810c5f55bb0961dd52294da024504/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1784826/6123ea7aa0267741109a77c112e89d06fa728b93/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1937367/46e259d63bb269eac924a02dbf163b759073a96a/resource.tar.gz{, .log} >> TSectorMapPerformance::TestHDD1960GBRead100MBOnLastSector |69.4%| [DL] $(B)/canondata_storage/1942100/0c8472aade15448e1084d8c96bfb0a5ee21afb4e/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1942525/471bed5f88a668af45ac44bbcfdc687ddde8df24/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1942100/2af4e4b4fdba37b11e481ee71a1d7bf5f7f785c0/resource.tar.gz{, .log} >> TYardTest::TestHugeChunkAndLotsOfTinyAsyncLogOrder [GOOD] >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] >> Config::ExcludeScope [GOOD] >> TYardTest::TestDestroySystem |69.5%| [DL] $(B)/canondata_storage/1130705/98bfcb23db43674b07f163a5d89bc355761ccf70/resource.tar.gz{, .log} |69.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseUpper [GOOD] |69.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_1 [GOOD] |69.5%| [DL] $(B)/canondata_storage/1899731/0bc935d3f61810d330a6462fb133ddcc4ac126c6/resource.tar.gz{, .log} |69.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_pg/unittest |69.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |69.5%| [DL] $(B)/canondata_storage/212715/281225c593b89b14398e3d64718321920556da62/resource.tar.gz{, .log} |69.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_2 [GOOD] >> TPDiskTest::TestMultipleLogSpliceNonceJump [GOOD] |69.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Simple [GOOD] |69.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest |69.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseLower [GOOD] |69.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |69.5%| [DL] $(B)/canondata_storage/1600758/99308b4324dde12d46c32387dd23cb39768d4365/resource.tar.gz{, .log} |69.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] |69.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::LongIdle [GOOD] |69.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::ParseDuration [GOOD] |69.5%| [DL] $(B)/canondata_storage/1903885/a4d0122d8471ff0ca85352e617bed922d9ad8df1/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1937429/089b4c182c7fc7602e8f7e7a4b535cbf50844bdb/resource.tar.gz{, .log} |69.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut >> TYardTest::TestDestroySystem [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyLogWrite |69.5%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |69.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |69.5%| [DL] $(B)/canondata_storage/1937001/7109df5869c8df84d1eced32a121709a7a6081d6/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1881367/03ce4da085261f32ea1c441399858f72350f0970/resource.tar.gz{, .log} |69.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] |69.5%| [DL] $(B)/canondata_storage/1925842/bac0b87c8df73e5d30ab0b57349de6b672768da8/resource.tar.gz{, .log} >> TYardTest::TestDestructionWhileWritingChunk |69.5%| [DL] $(B)/canondata_storage/1942671/a6ef6234ecec8bdd9b5f7ec30206378c9f7268ef/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1899731/f6b32a6820fc036afae21f367915e7bc82284241/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1937492/b472fd4c22edefd63722fcdafc178d25f35c8edf/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1917492/5ce972a5c2e3d600308091645f162df219851507/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1936997/93899b3de50fae3f9677baacc98094a7a629590a/resource.tar.gz{, .log} |69.6%| [AR] {RESULT} $(B)/ydb/library/yql/providers/yt/provider/libproviders-yt-provider.a |69.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::Simple [GOOD] |69.6%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |69.6%| [DL] $(B)/canondata_storage/1942525/16208faf1c4299915ddd3945e2017b318594867c/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1937424/567d7f4e2a03fd773183d9e7015f2f468ea57566/resource.tar.gz{, .log} >> TYardTest::TestDestructionWhileWritingChunk [GOOD] |69.5%| [DL] $(B)/canondata_storage/1899731/85df5ddd78ef06f5b299b58b79881c22b39759af/resource.tar.gz{, .log} |69.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] |69.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest |69.6%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |69.6%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |69.6%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a >> TPDiskTest::TestRealFile [GOOD] |69.6%| [DL] $(B)/canondata_storage/1599023/b01cb10652577009aa8ecebf7aa8f50a4a8d4236/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1847551/155e040f6efb509114e481612c26cc3259caa89d/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1942173/1055029c046ccc9d6feeae4f468d618044a7fa75/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1925821/2762f8f29ee80e9d69a3fae7ac21750c067803da/resource.tar.gz{, .log} >> TBlockDeviceTest::WriteReadRestart [GOOD] >> TColorLimitsTest::Colors [GOOD] >> TColorLimitsTest::OwnerFreeSpaceShare [GOOD] >> TLogCache::EraseRangeOnEmpty [GOOD] >> TLogCache::EraseRangeOutsideOfData [GOOD] >> TLogCache::EraseRangeSample [GOOD] >> TLogCache::EraseRangeAllExact [GOOD] >> TLogCache::EraseRangeAllAmple [GOOD] >> PDiskCompatibilityInfo::OldCompatible >> TYardTest::TestDestructionWhileReadingChunk >> TPDiskTest::TestSIGSEGVInTUndelivered >> TPDiskTest::TestFakeErrorPDiskManyLogWrite [GOOD] |69.5%| [DL] $(B)/canondata_storage/1937150/be16d4af021ec170de66b93263bcd36bcba25641/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1130705/173afc6d44db00f6f42767e88bce00b623a40335/resource.tar.gz{, .log} |69.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> Config::ExcludeScope [GOOD] |69.5%| [DL] $(B)/canondata_storage/1889210/a4abb800446905e7d80fe38237bce315efaf5daf/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1597364/50d0ff496a786c8f009d7afa268d209155aef6ce/resource.tar.gz{, .log} |69.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |69.5%| [DL] $(B)/canondata_storage/1942671/18f32d5eb8ab2aab65012dda63f9cfd635ed3680/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1942173/5dda369a5c566435d55e882d65f0212fa3dfb906/resource.tar.gz{, .log} >> TYardTest::TestSysLogReordering [GOOD] >> TSectorMapPerformance::TestSSD1960GBRead1000MBOnFirstSector [GOOD] >> TYardTest::TestStartingPoints |69.6%| [DL] $(B)/canondata_storage/1900335/44d8407be2783234f3018d11eeb1589813e73bcf/resource.tar.gz{, .log} >> PDiskCompatibilityInfo::OldCompatible [GOOD] >> PDiskCompatibilityInfo::Incompatible >> TPDiskTest::TestSIGSEGVInTUndelivered [GOOD] >> TYardTest::TestDestructionWhileReadingChunk [GOOD] >> TPDiskTest::TestFakeErrorPDiskLogRead >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector >> TYardTest::TestStartingPoints [GOOD] >> TYardTest::TestWhiteboard |69.5%| [DL] $(B)/canondata_storage/1871102/fc62e492471256a62165f341a79346abd3d08986/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1903280/419b5c18140d44a17c33d80899398c8647846b33/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1880306/ee64d24fc7c0bd8fa221eca8eb309837e5c0fe9d/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1937429/7495c8355df97f85fa824cc601aaf3eb891c07d7/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1781765/a9bb192df522b281951b02a8ad80c7fbaa8b1717/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1937027/d0866b9a79f4fc7bab36a1975d074abe641f546b/resource.tar.gz{, .log} >> PDiskCompatibilityInfo::Incompatible [GOOD] >> TPDiskTest::TestFakeErrorPDiskLogRead [GOOD] |69.5%| [DL] $(B)/canondata_storage/1920236/b91be0c508f3325775d30c05e1d48d09dbf039fa/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1942525/d140db22959141111740879cdd2464012e1a4760/resource.tar.gz{, .log} |69.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] |69.6%| [DL] $(B)/canondata_storage/1871182/a09ccd00a4b0358de052d958e9948bdc99497247/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1936273/640ea425b9d5a6140c315077f2a83bba387482d8/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1881367/164f3c6886439a33c9799bda28227bc201c09eb7/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1031349/5baef42837a5c7e8f75ff06754ea8ff7be02b259/resource.tar.gz{, .log} >> TPDiskTest::TestPDiskOnDifferentKeys [GOOD] >> TYardTest::TestDestructionWhileReadingLog >> TPDiskTest::TestFakeErrorPDiskSysLogRead >> PDiskCompatibilityInfo::NewIncompatibleWithDefault |69.6%| [DL] $(B)/canondata_storage/1031349/f9e5528e64f4bcdb4154fd10489bc2c93c9230cf/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1784117/ed4d4136ae1bf7a366a93d130e4c3e74aa7566cd/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1942173/badfb45c9ff8847ac34b8c1fc73d36f02f754caa/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1937367/e5d3b4a217429148a8315cf4e228d45b21f861fe/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1937367/e2a772964cf46b8a14a828d48a136378216522b9/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1936947/a99026e839b7e22714c2a9a81971a3b5e3ed1eb4/resource.tar.gz{, .log} >> TYardTest::TestDestructionWhileReadingLog [GOOD] >> TPDiskTest::WrongPDiskKey >> TPDiskTest::TestFakeErrorPDiskSysLogRead [GOOD] |69.6%| [DL] $(B)/canondata_storage/1871102/0f5b062694101798bb8a310f92ed9974c1a845c0/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1889210/a25c62064c6b3aebb0148e4cc231d4df4bb7bd7c/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1773845/e1901cc86dc30911ad22db641ffb6fe66c04423b/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1946324/96d594815fbe2bdfa11f7d8491a7bb58b8738a79/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1903885/71ee43a73f62c943cd2c83ad3cb710bb8b1d9fb0/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1937492/ff27734bcb37c413b13864458b4334e93e0d3308/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1031349/ce2302f84e58b9b15afe3898be33e7dcfaa01063/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1942100/285e905e2474369b3f45b547f2afa647f1a81533/resource.tar.gz{, .log} >> TPDiskTest::TestFakeErrorPDiskManyChunkRead >> TYardTest::TestFormatInfo |69.6%| [DL] $(B)/canondata_storage/1936842/356668b054049036b6fc6ae585623a4cb1b29102/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1600758/e4cf89c10a0c8ed92967210019872e3f5d0ba6b9/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1936273/364381182da99c268c68cfaa8dd7a6e282153b85/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1942173/93413c7f437227eab2052810218d1df60ced3a52/resource.tar.gz{, .log} >> PDiskCompatibilityInfo::NewIncompatibleWithDefault [GOOD] |69.6%| [DL] $(B)/canondata_storage/1937429/581a41f498c09b791293b88fcb066123b9531421/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1871002/31d5df73f869f6fcf8bde774aab16576da3e6aa0/resource.tar.gz{, .log} >> TPDiskTest::WrongPDiskKey [GOOD] >> TYardTest::TestFormatInfo [GOOD] >> TYardTest::TestEnormousDisk >> TYardTest::TestWhiteboard [GOOD] >> TYardTest::TestMultiYardStartingPoints >> PDiskCompatibilityInfo::Trunk >> TPDiskUtil::AtomicBlockCounterFunctional [GOOD] |69.6%| [DL] $(B)/canondata_storage/937458/432610f205d490984e1977d219cad674f7aad6a5/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1031349/593cf731fe784ac5bdeb5744a6013cfa7df8c284/resource.tar.gz{, .log} >> TPDiskUtil::AtomicBlockCounterSeqno [GOOD] >> PDiskCompatibilityInfo::Trunk [GOOD] >> TPDiskUtil::Light >> PDiskCompatibilityInfo::SuppressCompatibilityCheck >> PDiskCompatibilityInfo::SuppressCompatibilityCheck [GOOD] >> PDiskCompatibilityInfo::Migration >> TPDiskUtil::Light [GOOD] >> PDiskCompatibilityInfo::Migration [GOOD] >> TPDiskUtil::LightOverflow [GOOD] >> TPDiskUtil::DriveEstimator >> TSectorMapPerformance::TestHDD1960GBRead100MBOnLastSector [GOOD] |69.6%| [DL] $(B)/canondata_storage/1784826/c8ce54c6ece9e34ad4006150cfd33aa59537e273/resource.tar.gz{, .log} |69.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnFirstSector >> TYardTest::TestMultiYardStartingPoints [GOOD] >> TYardTest::TestSysLogOverwrite |69.6%| [DL] $(B)/canondata_storage/1942100/636230304ba87d5b90b5566e93fadf7c12da6a8e/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1942100/178599a2b23ce6932b343bc5f863a036b0534c1c/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1937150/19f3cf1ec3946e665195d75146c9af1ad0df2747/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1889210/718540831bd2dba15f12341f6611010d7d655169/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1936947/ef3e5fbc5fb23bc80e348df0815b2958ed5e589d/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1880306/f9e06f45c25a1b68f533811cf1a5eba7085a7de4/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1031349/d86a7eaf6f5bc2cdaedba52c0890601b8cc1d981/resource.tar.gz{, .log} >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector [GOOD] >> TYardTest::TestBadDeviceInit >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnFirstSector [GOOD] |69.5%| [DL] $(B)/canondata_storage/1937027/7e92a59557f254d8b58c96118ce2e626b197c0b1/resource.tar.gz{, .log} |69.5%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb >> TYardTest::TestBadDeviceInit [GOOD] >> TYardTest::TestChunkReadRandomOffset [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector |69.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut >> TYardTest::TestChunkContinuity2 |69.4%| [DL] $(B)/canondata_storage/1871182/5e06b08307574a72f79e9da297b863e3e09d864d/resource.tar.gz{, .log} >> TYardTest::TestChunkWriteRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> PDiskCompatibilityInfo::Migration [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkRead [GOOD] Test command err: GREEN 0.5025125628 0 CYAN 0.8623115578 0.862 LIGHT_YELLOW 0.8934673367 0.893 YELLOW 0.9145728643 0.914 LIGHT_ORANGE 0.9306532663 0.93 PRE_ORANGE 0.9467336683 0.946 ORANGE 0.9668341709 0.966 RED 0.9879396985 0.987 BLACK 0.9979899497 0.997 2024-11-21T17:42:31.140118Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:31.164325Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 13364027231653170264 MagicNextLogChunkReference: 16060210582375884225 MagicLogChunk: 2574383040204915910 MagicDataChunk: 1416529930680066292 MagicSysLogChunk: 14675517046351729790 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210950994878 (2024-11-21T17:42:30.994878Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:31.168713Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:31.172992Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:31.173175Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:31.173286Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:31.173451Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:31.173584Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:31.173637Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2007796 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:31.173910Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T17:42:31.174260Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1102} Going to restart PDisk since received TEvAskWardenRestartPDiskResult PDiskId# 1 2024-11-21T17:42:31.174360Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [0:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 2007796 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2024-11-21T17:42:31.177861Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:31.194200Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 13364027231653170264 MagicNextLogChunkReference: 16060210582375884225 MagicLogChunk: 2574383040204915910 MagicDataChunk: 1416529930680066292 MagicSysLogChunk: 14675517046351729790 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210950994878 (2024-11-21T17:42:30.994878Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:31.200433Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1549886 NonceLog# 2007796 NonceData# 1979249} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:42:31.206503Z node 1 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:42:31.206533Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 3 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 12288} PDiskId# 1 2024-11-21T17:42:31.206549Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 12288} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:31.208318Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:31.208491Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:31.208530Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:31.669690Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:31.682838Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 11304088511086751296 MagicNextLogChunkReference: 10811227026392906250 MagicLogChunk: 9769808435939212896 MagicDataChunk: 11963828523350225626 MagicSysLogChunk: 17773925702172389887 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210951606295 (2024-11-21T17:42:31.606295Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:31.686396Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:31.692804Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:31.692833Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:31.693807Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:31.694047Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:31.694131Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:31.695047Z node 2 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [1:_:0:0:0] FirstNonceToKeep# 1692430 CutLogId# [0:0:0] ownerRound# 4 PDiskId# 1 2024-11-21T17:42:31.704058Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T17:42:31.708695Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1102} Going to restart PDisk since received TEvAskWardenRestartPDiskResult PDiskId# 1 2024-11-21T17:42:31.708855Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [1:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1692430 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2024-11-21T17:42:31.714658Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:31.716078Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 11304088511086751296 MagicNextLogChunkReference: 10811227026392906250 MagicLogChunk: 9769808435939212896 MagicDataChunk: 11963828523350225626 MagicSysLogChunk: 17773925702172389887 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210951606295 (2024-11-21T17:42:31.606295Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:31.717416Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1165555 NonceLog# 1692430 NonceData# 2091807} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [1:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:42:31.723553Z node 2 :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl_log.cpp:431} Incompatible version ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 2 ... acing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:42:32.256581Z node 5 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 3 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 12288} PDiskId# 1 2024-11-21T17:42:32.256643Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 12288} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:32.256827Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:32.256936Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:32.256979Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:32.260540Z node 5 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [4:4294967295:0:0:0] OwnerId# 3 OwnerRound# 11 PDiskId# 1 2024-11-21T17:42:32.331323Z node 6 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:32.352417Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 5046044557797273756 MagicNextLogChunkReference: 4250510474202617014 MagicLogChunk: 10470593372210205740 MagicDataChunk: 16909529006504356284 MagicSysLogChunk: 13338912816442098806 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210952286825 (2024-11-21T17:42:32.286825Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:32.360106Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:32.368618Z node 6 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:32.368645Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:32.372254Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:32.376326Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:32.380290Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:32.380421Z node 6 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [5:_:0:0:0] FirstNonceToKeep# 1949321 CutLogId# [0:0:0] ownerRound# 12 PDiskId# 1 2024-11-21T17:42:32.384424Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T17:42:32.392610Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1102} Going to restart PDisk since received TEvAskWardenRestartPDiskResult PDiskId# 1 2024-11-21T17:42:32.392740Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [5:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1949321 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2024-11-21T17:42:32.398159Z node 6 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:32.408299Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 5046044557797273756 MagicNextLogChunkReference: 4250510474202617014 MagicLogChunk: 10470593372210205740 MagicDataChunk: 16909529006504356284 MagicSysLogChunk: 13338912816442098806 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210952286825 (2024-11-21T17:42:32.286825Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:32.416390Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1088055 NonceLog# 1949321 NonceData# 1687380} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [5:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:42:32.428288Z node 6 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:42:32.428318Z node 6 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 3 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 12288} PDiskId# 1 2024-11-21T17:42:32.428334Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 12288} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:32.428657Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:32.429056Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:32.429186Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:32.429356Z node 6 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [5:4294967295:0:0:0] OwnerId# 3 OwnerRound# 13 PDiskId# 1 2024-11-21T17:42:32.429570Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1102} Going to restart PDisk since received TEvAskWardenRestartPDiskResult PDiskId# 1 2024-11-21T17:42:32.429693Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [5:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1949321 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2024-11-21T17:42:32.437848Z node 6 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:32.452459Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 5046044557797273756 MagicNextLogChunkReference: 4250510474202617014 MagicLogChunk: 10470593372210205740 MagicDataChunk: 16909529006504356284 MagicSysLogChunk: 13338912816442098806 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210952286825 (2024-11-21T17:42:32.286825Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:32.460477Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 2909237 NonceLog# 3441302 NonceData# 3194627} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [5:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:42:32.464626Z node 6 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:42:32.464656Z node 6 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 4 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 16384} PDiskId# 1 2024-11-21T17:42:32.464672Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 16384} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:32.464896Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:32.468464Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:32.468533Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 |69.4%| [DL] $(B)/canondata_storage/1599023/b8ad35209f29e3ae308d0f48b10aeffda01d29c8/resource.tar.gz{, .log} >> TYardTest::TestSysLogOverwrite [GOOD] |69.4%| [DL] $(B)/canondata_storage/212715/7a6807b4e165da760d46e0887816887571bfacbc/resource.tar.gz{, .log} |69.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |69.4%| [DL] $(B)/canondata_storage/1936842/3070aa36e76249b07e9d35cb263dbb1236806278/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1937027/3bbc108de55ced7d8ea9dde3d33b539d651376a2/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1942278/2479c937ada2e75d2c6653f0cad3c43f5a4f06de/resource.tar.gz{, .log} |69.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |69.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |69.2%| [LD] {RESULT} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |69.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut >> TYardTest::TestUpsAndDownsAtTheBoundary >> TPDiskTest::TestFakeErrorPDiskManyChunkWrite >> TYardTest::TestChunkContinuity2 [GOOD] |69.1%| [DL] $(B)/canondata_storage/1942100/4aa9e62b86e4c5c3e9c9dadf048cc0ff6ca8bc67/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1871102/487289822b55151d5bfd88d1dcd849a7a02d10b3/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1784117/cb10fc911ed03589097ad5a3bcbcd64029d4ed63/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1903280/cd929bf1ad1ba301bcf2976b4845d75f386c6cf5/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1937027/840ae09a36bdc9a4737f612d0787fa5691189018/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1784117/171e9b753021491729fde435744a1c6a7eab11a6/resource.tar.gz{, .log} |69.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part9/ydb-library-yql-tests-sql-hybrid_file-part9 |69.1%| [DL] $(B)/canondata_storage/1599023/2257627623f9ecc02660dc51c2da964bf24e60cf/resource.tar.gz{, .log} |69.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut >> TYardTest::TestChunkWriteRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkWrite [GOOD] >> Dictionary::Simple [GOOD] |68.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |68.9%| [LD] {RESULT} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |68.9%| [DL] $(B)/canondata_storage/1597364/5c8f443c3c4d257c5cc9ae09d46f62d2dbcbded8/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1937027/2c2635807b1c895dd72532d28ab483f4c079524f/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1889210/5799676c8b21a8a6ab9a23a98cfcb11d1e8db6cf/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1923547/63a9bf11f98a47ebc6a355858fc4c8179b67ce82/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1946324/8130a6ff70c94e35f3314a41fdd274c67d346f2d/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1942100/551d394c490cdd39558aea14297d28dd74804b86/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1777230/978467ecd66b7473cbb5a78812196988c2151940/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1599023/892497444bbacbe92ad2c557c09c697b859ad48d/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1942415/4eadae4d7247ca7a82f53f147f81aef7d5caa5dc/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1784117/0327cec5b34f60c96db651d3505ec161fd64bb5a/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/937458/320b06d50641c62738ef0e2f333cec71c14bfc4b/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1814674/6b45e34c475ed17d7555fbe7f3091282279c761a/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1775319/74bc7546a3c0c01c11b723666a17da4eca8c2c07/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1936842/cb5a8c69f3eb77766fab5ac551027f757f0abc05/resource.tar.gz{, .log} >> TYardTest::TestChunkWriteReadWithHddSectorMap >> TPDiskTest::PDiskRestart >> Dictionary::ComparePayloadAndFull |68.9%| [DL] $(B)/canondata_storage/1924537/8b664aa6c736022fbba07ebb5d7782851aaea5be/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1847551/9284ecf3eac8764c279b785b85bb154fba4c8f08/resource.tar.gz{, .log} |68.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/ut/ydb-core-persqueue-codecs-ut |68.9%| [DL] $(B)/canondata_storage/1942278/14e1322c424babe14d336d0e30e11ebf1c359af0/resource.tar.gz{, .log} >> TYardTest::TestChunkContinuity3000 |68.9%| [DL] $(B)/canondata_storage/1880306/3b0ac69e754a5bc45f3b426e0cb53bbecdce37e5/resource.tar.gz{, .log} >> TPDiskTest::PDiskRestart [GOOD] |68.9%| [DL] $(B)/canondata_storage/1931696/8786785a3fe08451381b624963d1fcdfd92ae069/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1942100/d39849f28d468fa88039784446b53f38c2db1da1/resource.tar.gz{, .log} >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector [GOOD] |68.8%| [DL] $(B)/canondata_storage/1781765/6df866e48931c0e70847b2260b3eea091b1ffa6c/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1130705/85069899508bcd3b8be2b6d75961f8852e8ff128/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1881367/2d7a75f4178eeea3a2e83df99de305b10359458b/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942415/f6f9af5cbb20343e1122f9a21a916296a441b2fb/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942671/17c7e87d808f783b60251714ee76f807b99866f5/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936273/271f22955fd0fcf6e1856272701535b2e45b32ef/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936997/cf1a703bf40f5aee609a5f5135a4d554031effca/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1871182/b2146249716fc2f308dff54f809c6f3e5b912d8d/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1923547/c3f064ea25dafaabdc78d527cb888e8c29c155df/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1920236/b71e9d330355acb299c30c8f443a7df178347ab0/resource.tar.gz{, .log} >> TPDiskTest::PDiskRestartManyLogWrites >> TYardTest::TestChunkContinuity3000 [GOOD] >> TPDiskTest::PDiskRestartManyLogWrites [GOOD] >> TYardTest::TestChunkContinuity9000 >> TPDiskTest::TestLogSpliceChunkReserve >> TYardTest::TestChunkContinuity9000 [GOOD] >> TYardTest::TestChunkWriteReadWithHddSectorMap [GOOD] >> TYardTest::TestChunkWriteReadMultiple >> TYardTest::TestChunkLock >> TYardTest::TestChunkLock [GOOD] >> TYardTest::TestCheckSpace >> TYardTest::TestCheckSpace [GOOD] >> TYardTest::TestBootingState >> TYardTest::TestBootingState [GOOD] >> TYardTest::Test3AsyncLog >> TYardTest::Test3AsyncLog [GOOD] >> TYardTest::TestChunkDelete >> Dictionary::ComparePayloadAndFull [GOOD] >> Hash::ScalarBinaryHash [GOOD] >> Hash::ScalarCTypeHash [GOOD] >> Hash::ScalarCompositeHash [GOOD] >> ProgramStep::Round0 [GOOD] >> ProgramStep::Round1 [GOOD] >> ProgramStep::Filter [GOOD] >> ProgramStep::Add [GOOD] >> ProgramStep::Substract [GOOD] >> ProgramStep::Multiply [GOOD] >> ProgramStep::Divide [GOOD] >> ProgramStep::Gcd [GOOD] >> ProgramStep::Lcm [GOOD] >> ProgramStep::Mod [GOOD] >> ProgramStep::ModOrZero [GOOD] >> ProgramStep::Abs [GOOD] >> ProgramStep::Negate [GOOD] >> ProgramStep::Compares [GOOD] >> ProgramStep::Logic0 [GOOD] >> ProgramStep::Logic1 [GOOD] >> ProgramStep::StartsWith [GOOD] >> ProgramStep::EndsWith [GOOD] >> ProgramStep::MatchSubstring [GOOD] >> ProgramStep::StartsWithIgnoreCase [GOOD] >> ProgramStep::EndsWithIgnoreCase [GOOD] >> ProgramStep::MatchSubstringIgnoreCase [GOOD] >> ProgramStep::ScalarTest [GOOD] >> ProgramStep::Projection [GOOD] >> ProgramStep::MinMax [GOOD] >> ProgramStep::Sum [GOOD] >> ProgramStep::SumGroupBy [GOOD] >> ProgramStep::SumGroupByNotNull [GOOD] >> ProgramStep::MinMaxSomeGroupBy |68.6%| [DL] $(B)/canondata_storage/1942415/0256128ac8ca0ee7db70a045de39aefe7d42898f/resource.tar.gz{, .log} >> ProgramStep::MinMaxSomeGroupBy [GOOD] >> ProgramStep::MinMaxSomeGroupByNotNull [GOOD] |68.6%| [DL] $(B)/canondata_storage/1937367/8e6ed09f9acc0e3323bc0508667d24a148793a51/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1809005/bace128d842e0e2cef93390c0800c74269352290/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1942525/0c03a9c206fe0b66d4716933185c93b3bcd09103/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1031349/1ad42ee65e67699c7849e156ebaee300f84f8937/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871002/2eca5232e8e4d20f356cd7e26120cb51b472a03f/resource.tar.gz{, .log} >> TYardTest::TestUpsAndDownsAtTheBoundary [GOOD] >> TYardTest::TestUnflushedChunk |68.6%| [DL] $(B)/canondata_storage/1937429/0cd5c4c599538f9f8310cc0a7b67cdd6d3a2333f/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1689644/4fd6684657e6d8973170a91936689f26f2b662fc/resource.tar.gz{, .log} |68.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part14/ydb-library-yql-tests-sql-dq_file-part14 |68.6%| [DL] $(B)/canondata_storage/1936842/c608d0442aed7541d0f8b13a5b587f4b16587009/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1899731/5f48750839c300c592c921895adce61b6bdd10c7/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1880306/3dc569e22abef14294acdad1d23118654806f3a5/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1784826/ca2b5c92f6c48a734cb3bc782f744c31b81d1837/resource.tar.gz{, .log} >> TYardTest::TestChunkDelete [GOOD] >> TYardTest::TestChunkForget |68.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |68.6%| [DL] $(B)/canondata_storage/1936947/0d1daf9062d6c8b5dd4dc9b1f73c97791c044137/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1923547/331b1de1b2a9544651bd249eccea1d8975558c09/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1899731/945b5e7f9461fd64a5afc8ef7e202b25c09868e6/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937429/231d22d843eec78552d52ff0253bfa29e1a7a389/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1946324/eac6b8c7847ce1f23bab0871d4d46d46df2cadf7/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1936842/0049c952a1bcb0ee8c00f8d262e8ccbc9a964444/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1777230/7bf27f01819182c286a6e9395b8c7d8d18f3fff1/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1942100/b3af8dc6178472cab79e2c61d51edf8f4af32e2f/resource.tar.gz{, .log} ------- [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/yqlrun/yqlrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'longjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_detach' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'puts' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'siglongjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'malloc_stats' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'uname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcslen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined >> TYardTest::TestChunkForget [GOOD] >> TYardTest::Test3HugeAsyncLog |68.6%| [DL] $(B)/canondata_storage/1881367/1778652f0a952156dd3968c21e9af10b6a474237/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871102/8fb53a3a81ad5d5949727846153c9f6f58a0845e/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1031349/110747d194be1b3ec565c8629bddeb11bdda85c5/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1777230/2768c5271266d2bfc16d534dcba9a9afad3910bf/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937150/0ce3b8d2f4edd9e0ed3b0820a3590fd6124c375e/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/995452/798b97b59d948b81c59b61c7d9d39e722ebbfcc0/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1689644/3e68881a70015247fc9201a3e5f029c5770414d0/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1942278/712088e94c8a5f29908b7a81c4185b8ab9ebca5f/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871102/0f954067db9c14aae8830105a157009ce2550f6c/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1899731/5f2ba051437dbbe71df0674617fe1a74e541bb6d/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1917492/064a3289ad6eaf99ba9f2a34e99fb15ca8194278/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1777230/a97ee3227d27cacdc966530fa6d636c72275674d/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1942173/7a7dc71e67e8e32cfc358509ee0600a7789a62ce/resource.tar.gz{, .log} |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector [GOOD] |68.7%| [DL] $(B)/canondata_storage/1942100/4ec2a1b49d221a247c90e1d642077630614a2f1c/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1942671/caec24c7829bb07b3e5d07ae4de6f86179394486/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1130705/6ffd9ee62f7f1ead96b9e0706567eed65aef89a0/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871102/3fcf32ea5c486527b20a5dea1db1e9ccf2e36a61/resource.tar.gz{, .log} |68.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |68.7%| [DL] $(B)/canondata_storage/1903885/1f5c633d9ef5c6b22274dcefd1b823de60aa2a36/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1880306/25bda7bb5f356755a0d73916af1171e59aa33ace/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1924537/36fe336db7de347902767b13c1e0d63cf42757cd/resource.tar.gz{, .log} >> TYardTest::TestUnflushedChunk [GOOD] >> TYardTest::TestRedZoneSurvivability |68.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |68.7%| [DL] $(B)/canondata_storage/1599023/af4de9c2015a8dbd6c450ba09edc50e553a0c403/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1130705/6c54f70b6ca10a02b9f318b370b9fd95ba01421a/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1924537/994204c85c8f656606cca064cdae9e3d22058188/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937001/6a56dcab007ee7dae62350ff55c93dfb66c55be9/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1597364/8c3e86bd1d9a6577c911775a64d51195a61e9b9e/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1946324/6416045a0bb9d6e8e5b0b141a708474cc016eb51/resource.tar.gz{, .log} |68.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part4/ydb-library-yql-tests-sql-hybrid_file-part4 |68.7%| [DL] $(B)/canondata_storage/995452/5cca323a1119285bbfb44f019f5dc9be6361e6b3/resource.tar.gz{, .log} |68.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |68.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/yqlrun/yqlrun ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/formats/arrow/ut/unittest >> ProgramStep::MinMaxSomeGroupByNotNull [GOOD] Test command err: Process: 100000d;/100000; 10000d;/10000; NO_CODEC(poolsize=1024;keylen=1) 0.2021203448 0.2210911404 NO_CODEC(poolsize=1024;keylen=10) 0.1534132783 0.2482180533 NO_CODEC(poolsize=1024;keylen=16) 0.1104676508 0.2045372848 NO_CODEC(poolsize=1024;keylen=32) 0.06592569055 0.1591802296 NO_CODEC(poolsize=1024;keylen=64) 0.03972180035 0.1324717476 NO_CODEC(poolsize=128;keylen=1) 0.2016566193 0.2164784476 NO_CODEC(poolsize=128;keylen=10) 0.07304169975 0.08752922393 NO_CODEC(poolsize=128;keylen=16) 0.05151637558 0.06514358749 NO_CODEC(poolsize=128;keylen=32) 0.02919093319 0.04189888314 NO_CODEC(poolsize=128;keylen=64) 0.01605694811 0.02821124922 NO_CODEC(poolsize=16;keylen=1) 0.2010010074 0.2099570542 NO_CODEC(poolsize=16;keylen=10) 0.0719219365 0.07635285397 NO_CODEC(poolsize=16;keylen=16) 0.05039654131 0.05396013899 NO_CODEC(poolsize=16;keylen=32) 0.02807102527 0.03070808446 NO_CODEC(poolsize=16;keylen=64) 0.01493699686 0.01701612239 NO_CODEC(poolsize=1;keylen=1) 0.2008730831 0.2086845872 NO_CODEC(poolsize=1;keylen=10) 0.07177339648 0.07487027428 NO_CODEC(poolsize=1;keylen=16) 0.0502445638 0.05244238527 NO_CODEC(poolsize=1;keylen=32) 0.02791992658 0.0291982148 NO_CODEC(poolsize=1;keylen=64) 0.01478641518 0.01551089526 NO_CODEC(poolsize=512;keylen=1) 0.2021203448 0.2210911404 NO_CODEC(poolsize=512;keylen=10) 0.1482943606 0.1971260763 NO_CODEC(poolsize=512;keylen=16) 0.1053484084 0.1534129488 NO_CODEC(poolsize=512;keylen=32) 0.0608061115 0.1080222928 NO_CODEC(poolsize=512;keylen=64) 0.03460202321 0.08129402495 NO_CODEC(poolsize=64;keylen=1) 0.2013687897 0.2136153969 NO_CODEC(poolsize=64;keylen=10) 0.07240183504 0.08114272681 NO_CODEC(poolsize=64;keylen=16) 0.05087647028 0.05875304549 NO_CODEC(poolsize=64;keylen=32) 0.02855098581 0.03550414104 NO_CODEC(poolsize=64;keylen=64) 0.01541697597 0.02181403389 lz4(poolsize=1024;keylen=1) 0.006629768257 0.05541610349 lz4(poolsize=1024;keylen=10) 0.04233951498 0.3344832994 lz4(poolsize=1024;keylen=16) 0.05657489465 0.404264214 lz4(poolsize=1024;keylen=32) 0.09037137941 0.5318074361 lz4(poolsize=1024;keylen=64) 0.01074936154 0.1063492063 lz4(poolsize=128;keylen=1) 0.003831111821 0.02881389382 lz4(poolsize=128;keylen=10) 0.00718182175 0.06087121933 lz4(poolsize=128;keylen=16) 0.008735936466 0.07523964551 lz4(poolsize=128;keylen=32) 0.01375268158 0.117441454 lz4(poolsize=128;keylen=64) 0.02262360212 0.1850289108 lz4(poolsize=16;keylen=1) 0.00273442178 0.01820340324 lz4(poolsize=16;keylen=10) 0.003078137332 0.02169239789 lz4(poolsize=16;keylen=16) 0.003266503667 0.02356577168 lz4(poolsize=16;keylen=32) 0.003742685614 0.02844311377 lz4(poolsize=16;keylen=64) 0.004937163375 0.03979647465 lz4(poolsize=1;keylen=1) 0.00251497006 0.01603325416 lz4(poolsize=1;keylen=10) 0.002531395234 0.01628089447 lz4(poolsize=1;keylen=16) 0.002515970516 0.01617933723 lz4(poolsize=1;keylen=32) 0.00251450677 0.01630226314 lz4(poolsize=1;keylen=64) 0.002511620933 0.01653353149 lz4(poolsize=512;keylen=1) 0.005362411291 0.04359726295 lz4(poolsize=512;keylen=10) 0.02347472854 0.1933066062 lz4(poolsize=512;keylen=16) 0.03056053336 0.2426853056 lz4(poolsize=512;keylen=32) 0.04856356058 0.3467897492 lz4(poolsize=512;keylen=64) 0.04102771881 0.3228658321 lz4(poolsize=64;keylen=1) 0.003312844256 0.02372010279 lz4(poolsize=64;keylen=10) 0.004839661617 0.03863241259 lz4(poolsize=64;keylen=16) 0.005715507689 0.04687204687 lz4(poolsize=64;keylen=32) 0.007821957352 0.06669044223 lz4(poolsize=64;keylen=64) 0.01258912656 0.1073551894 zstd(poolsize=1024;keylen=1) 0.007215007215 0.0754840827 zstd(poolsize=1024;keylen=10) 0.04436824057 0.3776978417 zstd(poolsize=1024;keylen=16) 0.06417364307 0.4694540288 zstd(poolsize=1024;keylen=32) 0.1088704328 0.6098141264 zstd(poolsize=1024;keylen=64) 0.1881404128 0.7447345433 zstd(poolsize=128;keylen=1) 0.0037131439 0.04002713704 zstd(poolsize=128;keylen=10) 0.007337810029 0.07809798271 zstd(poolsize=128;keylen=16) 0.01002666048 0.1029455519 zstd(poolsize=128;keylen=32) 0.0164095737 0.1578947368 zstd(poolsize=128;keylen=64) 0.02945264987 0.2517949988 zstd(poolsize=16;keylen=1) 0.002581457579 0.02794819359 zstd(poolsize=16;keylen=10) 0.002771136709 0.03048416019 zstd(poolsize=16;keylen=16) 0.003293212485 0.03570300158 zstd(poolsize=16;keylen=32) 0.004068848428 0.0434375 zstd(poolsize=16;keylen=64) 0.005660601031 0.05875115349 zstd(poolsize=1;keylen=1) 0.002424204263 0.02626193724 zstd(poolsize=1;keylen=10) 0.002120141343 0.0234375 zstd(poolsize=1;keylen=16) 0.002304281881 0.02519132653 zstd(poolsize=1;keylen=32) 0.002374739805 0.02573879886 zstd(poolsize=1;keylen=64) 0.002526753864 0.02699269609 zstd(poolsize=512;keylen=1) 0.005499167269 0.05848930481 zstd(poolsize=512;keylen=10) 0.02331932211 0.2237078941 zstd(poolsize=512;keylen=16) 0.03368486881 0.2936507937 zstd(poolsize=512;keylen=32) 0.05794194663 0.4212765957 zstd(poolsize=512;keylen=64) 0.1039097138 0.5749553837 zstd(poolsize=64;keylen=1) 0.003147524472 0.03401360544 zstd(poolsize=64;keylen=10) 0.004774564592 0.05176470588 zstd(poolsize=64;keylen=16) 0.006192580533 0.06557881773 zstd(poolsize=64;keylen=32) 0.009437809496 0.09619952494 zstd(poolsize=64;keylen=64) 0.01593496889 0.1514644351 NO_CODEC --1000 ----1 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=5168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=14168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=20168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=36168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=68168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----16 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=5168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=14168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=20168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=36168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=68168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----64 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=5168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=14168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=20168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=36168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=68168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----128 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=5168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=14168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=20168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=20168;columns=1; - ... p:69;event=parsing;size=41928;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41928;columns=1; --------41928 / 42056 = 0.3043560966% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=44104;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=44104;columns=1; --------44104 / 44232 = 0.2893832519% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=45648;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=45648;columns=1; --------45648 / 45776 = 0.2796225096% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=51064;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=51064;columns=1; --------51064 / 51192 = 0.2500390686% ----512 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=40792;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=40792;columns=1; --------40792 / 40920 = 0.3128054741% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=45768;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=45768;columns=1; --------45768 / 45896 = 0.2788914067% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=49088;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=49088;columns=1; --------49088 / 49216 = 0.2600780234% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=57936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=57936;columns=1; --------57936 / 58064 = 0.220446404% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=112984;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=112984;columns=1; --------112984 / 113112 = 0.1131621755% ----1024 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41304;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41304;columns=1; --------41304 / 41432 = 0.3089399498% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=50888;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=50888;columns=1; --------50888 / 51016 = 0.2509016779% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=57280;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=57280;columns=1; --------57280 / 57408 = 0.2229654404% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=74320;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=74320;columns=1; --------74320 / 74448 = 0.1719320868% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=680272;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=680272;columns=1; --------680272 / 680400 = 0.01881246326% --100000 ----1 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=400672;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=400672;columns=1; --------400672 / 400800 = 0.03193612774% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=404392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=404392;columns=1; --------404392 / 404520 = 0.03164244042% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=406872;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=406872;columns=1; --------406872 / 407000 = 0.03144963145% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=413472;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=413472;columns=1; --------413472 / 413600 = 0.03094777563% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=426688;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=426688;columns=1; --------426688 / 426816 = 0.02998950367% ----16 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=400688;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=400688;columns=1; --------400688 / 400816 = 0.0319348529% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=405312;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=405312;columns=1; --------405312 / 405440 = 0.03157063931% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=408872;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=408872;columns=1; --------408872 / 409000 = 0.03129584352% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=420960;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=420960;columns=1; --------420960 / 421088 = 0.03039744661% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=427648;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=427648;columns=1; --------427648 / 427776 = 0.02992220227% ----64 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=400736;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=400736;columns=1; --------400736 / 400864 = 0.03193102898% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=406512;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=406512;columns=1; --------406512 / 406640 = 0.03147747393% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=411384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=411384;columns=1; --------411384 / 411512 = 0.03110480375% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=429432;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=429432;columns=1; --------429432 / 429560 = 0.02979793277% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=430720;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=430720;columns=1; --------430720 / 430848 = 0.02970885324% ----128 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=400800;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=400800;columns=1; --------400800 / 400928 = 0.03192593184% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=407568;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=407568;columns=1; --------407568 / 407696 = 0.03139594207% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=422952;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=422952;columns=1; --------422952 / 423080 = 0.03025432542% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=417536;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=417536;columns=1; --------417536 / 417664 = 0.03064664419% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=434816;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=434816;columns=1; --------434816 / 434944 = 0.02942907593% ----512 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=401184;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=401184;columns=1; --------401184 / 401312 = 0.03189538314% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=409504;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=409504;columns=1; --------409504 / 409632 = 0.03124755878% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=415048;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=415048;columns=1; --------415048 / 415176 = 0.03083029848% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=429824;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=429824;columns=1; --------429824 / 429952 = 0.02977076511% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=908136;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=908136;columns=1; --------908136 / 908264 = 0.01409281883% ----1024 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=401696;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=401696;columns=1; --------401696 / 401824 = 0.03185474237% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=420472;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=420472;columns=1; --------420472 / 420600 = 0.03043271517% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=423240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=423240;columns=1; --------423240 / 423368 = 0.03023374464% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=446208;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=446208;columns=1; --------446208 / 446336 = 0.02867794666% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6800648;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6800648;columns=1; --------6800648 / 6800776 = 0.001882138156% 568411279426701291 11314927502458297152 4910891849062175032 |68.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 |68.7%| [DL] $(B)/canondata_storage/1931696/baaf97fef703e42cdaefa73847e109a1450b5b99/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937367/1c67f3b284e4254e921806deed7356235d09b497/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1917492/711d3fac29661e54e8d79bda0ff96166d8ae283f/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1784826/8e073b9fd058f3f074a4656c14602ccbd76303e2/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937492/280f310029e9135c17fc7143ea31b16e51fad84f/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1031349/c3543d8f1cc01d601eeee6bc8f381c30a592c6ad/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1923547/b752d090106a138d94de874b3e8b115ee6aa3c61/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1942173/1b0498e994a7de827f7d1a300010b8424167a1f2/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1599023/28c053adbf3f9551c4abe79e7a68fce4419330c3/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942100/d9edbf8667aac002ce0c8844e68538839402ad3d/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936842/8d9f23542db0c4f13723c24b10a242ee68c61ce3/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1903280/2010996c42ed76fd6d1e7bedccdf6026ec5a5fdb/resource.tar.gz{, .log} |68.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut |68.8%| [DL] $(B)/canondata_storage/1773845/d6fe26bbfec70ff5239a20763ffbdc27ad4a01c0/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1871182/8741170d9243172a408ff5d126ef5ae65b3c3de0/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1775059/4b281f6de1ebcb83a84d5b91cdbf1d4228a88f67/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1775059/a930e411af43ce1b309d1de7c970eb2ac2eef1e1/resource.tar.gz{, .log} |68.8%| [TS] {RESULT} ydb/tools/cfg/bin/import_test |68.8%| [DL] $(B)/canondata_storage/1942525/54e70dcd0201d7d7770d670aadf55f64af334a0c/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1925842/ddbfdd82a4dd25bf18b0261649ab89dcfd67553f/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937027/a3de41ffd24fbd15ac4a4f974e41beecda0f1147/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942525/4fe45c70cbf0c28bef09a91de22cc41d1fc13153/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936273/a62235705daacff938053a0c1726e0c527b8307a/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1775319/63afeae1357fd3b8b5336934dd3e288ccc05a9e3/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937027/74f1823cd9853da5a0b0d77e4281e13574c3c11f/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942525/81dfcdf279dd45758cc7cc418c3fe9b12f2e6066/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1600758/3e1c972b67010ef976f8710e1ee2f4efbc022be3/resource.tar.gz{, .log} >> TYardTest::TestRedZoneSurvivability [GOOD] |68.8%| [DL] $(B)/canondata_storage/1937429/b2e019e5c80a384dae2cb46b81e53ad9800ec6e1/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1871182/4d40c08aee85fb231923402132a00b2ae47137cc/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1773845/77f8b008f626911d8af361315777d5aab3bab090/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937150/af1149e4ecbbaf59deead854c81e1ca2a679d76d/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1946324/e871328b5487b9b2c440f1dd14b427a10459f3e7/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1784826/19728bd1f1bdea5d0605d9a498ec2970c5f3e92a/resource.tar.gz{, .log} >> TYardTest::TestSlay >> TYardTest::Test3HugeAsyncLog [GOOD] >> TYardTest::TestSlay [GOOD] >> TYardTest::TestChunkFlushReboot >> TYardTest::TestSlayRace >> TPDiskTest::TestLogSpliceChunkReserve [GOOD] >> TPDiskTest::SpaceColor [GOOD] >> TPDiskTest::RecreateWithInvalidPDiskKey |68.8%| [DL] $(B)/canondata_storage/1900335/0fda279e8dbc42dfa916afebb80172e4d6c2bfb9/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1777230/2492c292c3dcbe5dcfc10230e1b6f79478cb2008/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936842/118804db24c1cfa3c8dcaa7cee1354cbe5b3d933/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1689644/bb7a5ebb839768b3371fdb6466d95c49c7caa5bc/resource.tar.gz{, .log} |68.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 |68.9%| [DL] $(B)/canondata_storage/1031349/4e362e41dd365ac933a1de3f249df5eea8bb185d/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937027/b34cd762829e7bec8c91f56a9ebd21a8720c2fb7/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1775059/34e3c4f18af78c5b08d1779bfd2babe42d60869d/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1903280/76ac83783dd253263cbbfa647528ead00c7b0238/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1899731/d2bc375d62c2739c9466376d2ff97d13069fe91a/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1903885/020891901aa63873d865b7d859cce18f09b6b3c3/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936997/ff8901ef8c0bb5b2132f64a3a6c568591cfc3cc7/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1031349/f83fa171747712e0fbf290ce2fe9f17cc2679c05/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1871002/87af0e803663459b2fc0b931b22ed73d40f91575/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1903280/8668619c47aeb76bd072ccb1766ddd8397f57a04/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/937458/c4452645e3437dc640f4297668664a507105b886/resource.tar.gz{, .log} |68.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |68.8%| [DL] $(B)/canondata_storage/1923547/8ad70f7c12e1ac27e62098253e8dcdce5a61fe2e/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/937458/ca874ae4a90e1527826d17c1da5f3d3dad325887/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1923547/a709fdeee4741cf8fd3eee54587d1eeb75290aa3/resource.tar.gz{, .log} |68.9%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/import_test >> TPDiskTest::RecreateWithInvalidPDiskKey [GOOD] >> TPDiskTest::SmallDisk10Gb |68.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part10/ydb-library-yql-tests-sql-hybrid_file-part10 |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |68.8%| [DL] $(B)/canondata_storage/1936273/0d86ad2b4c27fcc90610fc18283a8b444dba82f9/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1923547/45484b99c033020b648870c9707d8e325a2db399/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/212715/a907bc0539ff9e52b20ab4615eab35bda7220afb/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1031349/c13f065489973f70d7c46a11f6ca4ae035ad584f/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1936997/6a7178ff3312fb6732ca319b267bb1a987c57c73/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1925821/301359f830853bb29d8dc6bedf12ccc575fd3fd8/resource.tar.gz{, .log} >> TYardTest::TestSlayRace [GOOD] >> TYardTest::TestChunkFlushReboot [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 |68.7%| [DL] $(B)/canondata_storage/1942415/eb3b960b9379ed168e3265e38f52b5ecd2264129/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/212715/2cb1b5139d83aa48f3466b8892464b93f89797d0/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936997/0369012b4079b3fe371b0e69a32dd2ddf31664b0/resource.tar.gz{, .log} |68.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 |68.8%| [DL] $(B)/canondata_storage/1942100/015c616d2e4af2cc361f357361c829cbfef60e80/resource.tar.gz{, .log} |68.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part8/ydb-library-yql-tests-sql-hybrid_file-part8 |68.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part6/ydb-library-yql-tests-sql-dq_file-part6 >> TYardTest::TestAllocateAllChunks >> TYardTest::TestSlayRecreate >> TYardTest::TestAllocateAllChunks [GOOD] >> TPDiskTest::SmallDisk10Gb [GOOD] |68.7%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |68.7%| [DL] $(B)/canondata_storage/1931696/12a17fd03ea37900d110696f266c04ad62432625/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1925842/a4b71373097359ba466e2713f3de746df8a53ab1/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1689644/293b19e389f6ac00b10f915f27569a997d91474e/resource.tar.gz{, .log} |68.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part4/ydb-library-yql-tests-sql-dq_file-part4 |68.6%| [DL] $(B)/canondata_storage/1809005/407ce5051ca928115d8bf49186af1531f3ab285d/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937424/15437eeaafd0fe50e7d85ae31a223a08a54e09a5/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1903885/68bd9a70978575acf2efa3516be7bb1d450b0d4f/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937492/b1c27c23ed7b20add05d293f02d7d6eb09176974/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1916746/8af1fb7747dc5b2dccf47bca5be44479c7ae6621/resource.tar.gz{, .log} |68.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part17/ydb-library-yql-tests-sql-dq_file-part17 |68.7%| [DL] $(B)/canondata_storage/1942415/671b55e8616124598c7474d9969f72ec8014eef6/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1784826/05ab9e9d8749e041f30f4fb272518bbf7bc091b0/resource.tar.gz{, .log} |68.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |68.7%| [DL] $(B)/canondata_storage/1937027/de4be6db5a9d9653a2d7cf00ba5ccfe48c1b3a99/resource.tar.gz{, .log} |68.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part5/ydb-library-yql-tests-sql-dq_file-part5 |68.7%| [DL] $(B)/canondata_storage/1917492/ec43eca86102041177f140bc47a05783a6966105/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1946324/6828126e82ac7c9623f8b25fa82fef255c53fba5/resource.tar.gz{, .log} >> TYardTest::TestChunkDeletionWhileWriting >> TPDiskTest::PDiskIncreaseLogChunksLimitAfterRestart >> TYardTest::TestChunkDeletionWhileWriting [GOOD] >> TYardTest::TestSlayRecreate [GOOD] |68.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part13/ydb-library-yql-tests-sql-dq_file-part13 |68.5%| [DL] $(B)/canondata_storage/1925842/aef0e0012573a9964b38282d14d79db58aac0dc6/resource.tar.gz{, .log} |68.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/ydb-library-yql-tests-sql-hybrid_file-part3 |68.6%| [DL] $(B)/canondata_storage/1937027/2dab2bc49e185bfd04165d91b8a45f43e85735e2/resource.tar.gz{, .log} |68.6%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |68.6%| [DL] $(B)/canondata_storage/1923547/c7c2b8305045ef487bf309f434bfa96167619151/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1936997/1b75842b463219c1de23899c69a6f937818f5efa/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1923547/5fccde9e77cd24e0e9edadda2daa7eb56bde0a29/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1942525/a6d79a71fe1f7b9d4dbe9fc0e8d7f38f783c44cf/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1942278/0bea467ec952d32bb910b3d7bd336a8d0e758469/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937492/ac17fc910522968a2c86c54ef70ff70f08d08871/resource.tar.gz{, .log} >> TYardTest::TestSlayLogWriteRaceActor >> TYardTest::TestSlayLogWriteRaceActor [GOOD] >> TYardTest::TestStartingPointReboots |68.5%| [DL] $(B)/canondata_storage/1814674/5f3c7350d8e72ada6a702e29e5f3bfddaa73df08/resource.tar.gz{, .log} |68.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part0/ydb-library-yql-tests-sql-dq_file-part0 |68.5%| [DL] $(B)/canondata_storage/1925842/80a317691e3de1dccaaf65a2e0ac2eda115fe088/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1773845/8dea106443923a91389548a0f46eaff49228c517/resource.tar.gz{, .log} |68.5%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |68.5%| [DL] $(B)/canondata_storage/1773845/f3e4c472dc37081782e19cd965bd65655fb94de9/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1942100/21245e81d28b28ef09d03385075d39472fbb3dba/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1925842/3305983375fd65f2565c804b57aadeec6345c6c4/resource.tar.gz{, .log} |68.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part7/ydb-library-yql-tests-sql-hybrid_file-part7 |68.5%| [DL] $(B)/canondata_storage/1937367/c8f509a79779b30b722211cfeb063fe74251b5ea/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1937001/1cbaa0990b057cd081f509e8a6410c993209697e/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937367/c99cfb4b780550a1a456fcf97cd04e4ab58600f3/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1924537/24871a42dc8e7365f6316f556fa6616a3c687a13/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1946324/c4e3e08799ff2867f35fb0960060a07338ecc49d/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1781765/97c29e53add37e5e221fbc6e22055fd1d8762911/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937027/751768eb2f05a82d5c0ae53923fca610307cec52/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1809005/d82aaae062254cb5da7fc7348fa82d8f67f6ea93/resource.tar.gz{, .log} |68.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part3/ydb-library-yql-tests-sql-dq_file-part3 |68.6%| [DL] $(B)/canondata_storage/1925842/b328515b1ab9021b09eade67e865074624010fca/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/937458/451cb5773bbc54fc1287d09034d5251907c23f31/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1773845/67e34a6f8d63716e511a557d7164ba4684e7c32c/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1871002/f97d407256b2c37a8aa0d391cd742e47a1681d82/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1942671/612b8e5cf42e27086abb71abea474d6f2f73b914/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1599023/e9f8d240b4483477bcadcd3788795f2462724043/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1600758/46c823d9545fa3ae54937e43128b4c7eb42457e8/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1942173/8e89d11f444b9bfa5ce3b1040f7d67aa7d59d029/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1784826/d2a1b732d518bb4ef49d545b2ddf9c004a5d5c75/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1942415/5d0460f6a5e883e733ff9b871dd0c0a3717965d3/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1936273/8ea0dd0bb7dcfcb9060145c85aba7872eea15de5/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1847551/5836720bd6edb7a20e88f4ea2ae09a4e4b561093/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1880306/3ec645abf253c80395cd29e55e1be69e2faef49e/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1600758/6536c12aedebd96c5fc71915b4cfc72252cfa630/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937367/a874490aa85e801e6e6c639be280467bb36af7f1/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/937458/e5719cd256fe3fd898e8ebe6df280521ffd29040/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1847551/bd3dbf8cccccd7565b8c57bb32bc464524c7d3d9/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1937027/486e59249c373752409ccb9df757ae063d64d546/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1600758/aad142702907f13e911494c1a7b312bad34f692a/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1889210/f053f10d689490bf5100a7fbf8cc00cf1b09e227/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1936997/a36e4ac0da388a8e1ac773455c73c5a459846a00/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1130705/278b00e80d012b1440a24faff121f276542a077b/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1880306/db71d33ed2525c218cc0dde3f5b7b9ce7f9aa317/resource.tar.gz{, .log} |68.6%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/py3test |68.6%| [TS] {RESULT} ydb/tests/functional/suite_tests/import_test |68.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part11/ydb-library-yql-tests-sql-dq_file-part11 |68.6%| [DL] $(B)/canondata_storage/1936273/20339d8b097f50e14eb6d266a222b77abdc42846/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937492/7826fe0d6b2cfb712d11a7f0758863664d172cb9/resource.tar.gz{, .log} >> TPDiskTest::PDiskIncreaseLogChunksLimitAfterRestart [GOOD] >> TPDiskTest::TestChunkWriteCrossOwner |68.5%| [DL] $(B)/canondata_storage/1925821/4ae9bde3c1ecde0f833266f025b433a41c077ebf/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1773845/4743168c84575c5ee74764d6369a8a7b6f309d6e/resource.tar.gz{, .log} |68.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part15/ydb-library-yql-tests-sql-dq_file-part15 |68.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |68.5%| [DL] $(B)/canondata_storage/1773845/df65899dab8a6000128816aea623c99e5f1dd537/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1946324/c8058686a7decbc255d7e28ecdcab6420a760e15/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1809005/4f269cc7890e7d43a65cb3699c5c4dfb301b1577/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1775059/3040ff3725a2cd9541d62c4c0f59acbf85d6e8e9/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937027/fc46b3cc97880e1d193902512d82ecde372bc654/resource.tar.gz{, .log} >> TPDiskTest::TestChunkWriteCrossOwner [GOOD] |68.5%| [DL] $(B)/canondata_storage/1937367/ead83488482c124a8c95469b3b45c4c638595905/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1900335/4b60bb5e71999895e5687b055a2f48946e4a072b/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1814674/8032c8c75c4a0135917efb7e8a36a553203d3792/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1903885/e665e50194f0fdfa8d7857ead4c5d407752c80c8/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1130705/1ce6c7c35a3d1f6575dcd3f9fb981d727082535c/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937027/dd79a308e7a46e9ee81ea6630da3b4a111a26336/resource.tar.gz{, .log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestChunkDeletionWhileWriting [GOOD] Test command err: 2024-11-21T17:42:33.427988Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:33.450219Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16115072107677219661 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 1658880 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T17:42:33.642544Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:33.652428Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 14777073174041707659 MagicNextLogChunkReference: 5458154703902729215 MagicLogChunk: 2773649382289768015 MagicDataChunk: 14342419942526799205 MagicSysLogChunk: 9053859693409100599 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210953568899 (2024-11-21T17:42:33.568899Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:33.660455Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:33.661758Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:33.661980Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:33.666941Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:33.667430Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:33.667510Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:33.744696Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1530392 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:33.798790Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:33.820705Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T17:42:33.829182Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T17:42:33.896355Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:33.896376Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T17:42:33.896623Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 5211918733399338832 MagicLogChunk: 622269256924953659 MagicDataChunk: 8753759375171698880 MagicSysLogChunk: 668486387086778647 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210953866946 (2024-11-21T17:42:33.866946Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:33.897899Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:33.899053Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:33.899074Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:33.899208Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:33.899343Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:33.899390Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:33.900373Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1516336 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:33.980756Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:33.981249Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 16473211211747620538 MagicNextLogChunkReference: 16838893026456891799 MagicLogChunk: 5395692282249119466 MagicDataChunk: 6371073530140333120 MagicSysLogChunk: 3015245868857699295 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210953933698 (2024-11-21T17:42:33.933698Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:33.983132Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:33.984375Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:33.984397Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:33.984614Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:33.984811Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:33.985032Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:34.077579Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1845181 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:34.171885Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:34.176684Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 9558487225221931840 MagicNextLogChunkReference: 5644449013400999747 MagicLogChunk: 12181865671665986130 MagicDataChunk: 14387954250073745331 MagicSysLogChunk: 1062223776979160141 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210954104507 (2024-11-21T17:42:34.104507Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:34.184304Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:34.188511Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:34.188539Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2 ... 123-125 firstLsnToKeep# 0},},{chunkIdx# 2 users# 1 endOfSplice# 0 {owner# 3 lsn# 125-125 firstLsnToKeep# 0},},{chunkIdx# 3 users# 1 endOfSplice# 0 {owner# 3 lsn# 125-125 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:36.126012Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:36.172183Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T17:42:36.175395Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 3} PDiskId# 1 2024-11-21T17:42:37.053204Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:37.065090Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 8178443036748815648 MagicNextLogChunkReference: 11873664596149896956 MagicLogChunk: 102151517006879135 MagicDataChunk: 13164627773381130262 MagicSysLogChunk: 7377627457655605764 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210957029834 (2024-11-21T17:42:37.029834Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:37.068596Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:37.072328Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:37.072357Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:37.072576Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:37.076270Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:37.076374Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:37.160424Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1983595 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:37.192827Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:37.204330Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 8178443036748815648 MagicNextLogChunkReference: 11873664596149896956 MagicLogChunk: 102151517006879135 MagicDataChunk: 13164627773381130262 MagicSysLogChunk: 7377627457655605764 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210957029834 (2024-11-21T17:42:37.029834Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:37.211776Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1098933 NonceLog# 1983595 NonceData# 2025737} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:42:37.216788Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:42:37.216817Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 2 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 8192} PDiskId# 1 2024-11-21T17:42:37.216834Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 8192} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:37.216950Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-1 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:37.220418Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-1 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:37.220548Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:37.294478Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T17:42:37.295240Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:919} PDiskId# 1 chunk owned by the system for ownerId# 3 can't read chunkIdx# 2 PDiskId# 1 2024-11-21T17:42:37.341744Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:37.347168Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 3217989432662345549 MagicNextLogChunkReference: 17197912033263568015 MagicLogChunk: 2411090515904644013 MagicDataChunk: 16844718566261332879 MagicSysLogChunk: 16271326586332626041 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210957310727 (2024-11-21T17:42:37.310727Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:37.353129Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:37.356528Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:37.356561Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:37.356723Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:37.356836Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:37.356877Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:37.435626Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1976757 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:37.473321Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:37.473574Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 16777216000 bytes (16 GB) Guid: 14458941659089847652 MagicNextLogChunkReference: 9120303205508017425 MagicLogChunk: 7903977423687105842 MagicDataChunk: 17292189747712937099 MagicSysLogChunk: 6443434532255351599 MagicFormatChunk: 17332287817462050952 ChunkSize: 18874368 bytes (18 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210957453889 (2024-11-21T17:42:37.453889Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:37.474865Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:37.475444Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:37.475479Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:37.475557Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:37.475640Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:37.475673Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:37.572768Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1441657 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 |68.6%| [DL] $(B)/canondata_storage/1946324/5a0f5c697ea00923466b0cb0991a1c2a5af1384a/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1599023/bc279b2dfef04f4a94e7c85a598af9d954bef4ff/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937429/44565291a008d35ab2663966004d6717f2618b42/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1917492/261649fbdcb7f81af5118b7c9c9f7b8353abbfd8/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1936842/1230453fda0206fef63c6f7a723461640d941221/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1599023/0bd57d257eeb1652a68140e9608a6813bf473a94/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1689644/4d9701667c235827e22152c557341a6339db2761/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1130705/acc206ee45aa5b7f7e78d232059c9535f249dda0/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1936273/5c7aadbc9513617ac94bd8f103a74bc39b4edfe5/resource.tar.gz{, .log} |68.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |68.5%| [DL] $(B)/canondata_storage/212715/2e75ebcaab6ea23aae1ed39a602e4ad780c354ab/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1942278/40ea988eaa18293a322e85a441a68b521e416660/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1130705/0ef38e114204c1ac6d0d5ad14792a285fb1413c8/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1777230/8d6bc20c3c548691ed47463aed0d508dcd185ce3/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1903885/4df104aa60634735da6e3543917f736870f3f18b/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1936842/e2f5b27b418549665a04de58de4b4e487f33c292/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1599023/99c2356674b1e20f456cfa1987af5df85eb4bfa3/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937150/752b46c5b03a79553eede6ce218ca961ba7c10c2/resource.tar.gz{, .log} |68.5%| [DL] $(B)/canondata_storage/1889210/7f3c41a1a9a952dcc8b95a828e079ca0b5c57243/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1597364/8ab87482c625c5d6a6e486201ec940e41b09bc3e/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1899731/ac2fe0744925be7f8cc1556d084138437b7eeab5/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1814674/712a09ba024a489ac40fb8f6a036e48974fe809d/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1942415/1e09e4342cd71819f75a6b9adf843137f6d9a325/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937424/c9a4c8efbcba2c1a1772ede4bf146f439970ae1a/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1942100/e137fd991c9f3857dfbc144867f53c199fe00560/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1773845/3e79b21e1668f131709c246df5e9fced8d4bf38f/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1916746/fc9859eda7833569c636bd5c91d3cefea7eb47fa/resource.tar.gz{, .log} |68.6%| [TS] {RESULT} ydb/tools/tstool/import_test |68.6%| [DL] $(B)/canondata_storage/1880306/bae66b3e317c04615399cbe68d1b1628bb7a6b67/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1781765/cf2d7def7c41b9fae02dc0acaab8437909472138/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1777230/96896022731d921e0e3ef80f527dbcadef5d13f9/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1903885/dc53b4edac607ebf3b277ca9598c7c26218fd737/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937429/b424baaadf0728e7424d639c06b7246427532e0b/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1599023/65f04a32ef767a9d58baa3504831aac82a785d9c/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1871002/6a2014c70e9b412ca99aa36a7e7375d181155757/resource.tar.gz{, .log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskTest::TestChunkWriteCrossOwner [GOOD] Test command err: 2024-11-21T17:42:22.267916Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.300542Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 12561539472770807063 MagicNextLogChunkReference: 11604863845848727103 MagicLogChunk: 7477079138658994905 MagicDataChunk: 18156396643540079442 MagicSysLogChunk: 4536555524788943102 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942225366 (2024-11-21T17:42:22.225366Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.308329Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:22.310405Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:22.310634Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.311097Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.311616Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.311801Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.313900Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1931726 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.323114Z node 1 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:900} PDiskId# 1 Can't write chunkIdx# 2 destination chunk has CommitState# DATA_COMMITTED_DELETE_IN_PROGRESS ownerId# 3 PDiskId# 1 2024-11-21T17:42:22.493735Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.494020Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 18195549518896415805 MagicNextLogChunkReference: 7773177047672445031 MagicLogChunk: 18128871624161332282 MagicDataChunk: 8137482255476349229 MagicSysLogChunk: 9335636970302459443 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942453679 (2024-11-21T17:42:22.453679Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.495389Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:22.497097Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:22.497118Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.497249Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.497353Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.497398Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.497599Z node 2 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1205187 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.499534Z node 2 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:900} PDiskId# 1 Can't write chunkIdx# 2 destination chunk has CommitState# DATA_COMMITTED_DELETE_IN_PROGRESS ownerId# 3 PDiskId# 1 2024-11-21T17:42:22.654815Z node 3 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.655351Z node 3 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 3912690402904677147 MagicNextLogChunkReference: 351428384977312846 MagicLogChunk: 8074132354568529608 MagicDataChunk: 13868079285596812399 MagicSysLogChunk: 4097467395298419785 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942584293 (2024-11-21T17:42:22.584293Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.661207Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:22.668853Z node 3 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:22.668881Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.669035Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.669156Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.669394Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.673003Z node 3 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1544132 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.688599Z node 3 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:900} PDiskId# 1 Can't write chunkIdx# 2 destination chunk has CommitState# DATA_RESERVED_DELETE_ON_QUARANTINE ownerId# 3 PDiskId# 1 2024-11-21T17:42:22.761231Z node 4 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.772676Z node 4 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 4509334738004964015 MagicNextLogChunkReference: 13344926745467869555 MagicLogChunk: 17272169968632295153 MagicDataChunk: 8673486865559037844 MagicSysLogChunk: 13035155619203696578 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942722806 (2024-11-21T17:42:22.722806Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.776282Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:22.780678Z node 4 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:22.780695Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.780915Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.781003Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.781043Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.782827Z node 4 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1952598 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.874600Z node 5 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.888526Z node 5 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 2710944392266537259 MagicNextLogChunkReference: 3982825518388025263 MagicLogChunk: 8792138195622325577 MagicDataChunk: 12587084810805431667 MagicSysLogChunk: 17734540649123511433 MagicForma ... firstLsnToKeep# 0},},{chunkIdx# 94 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-5 firstLsnToKeep# 0},},{chunkIdx# 95 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-5 firstLsnToKeep# 0},},{chunkIdx# 96 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-5 firstLsnToKeep# 0},},{chunkIdx# 97 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-5 firstLsnToKeep# 0},},{chunkIdx# 98 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-6 firstLsnToKeep# 0},},{chunkIdx# 99 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 100 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 101 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 102 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 103 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 104 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 105 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 106 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 107 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 108 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 109 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 110 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 111 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 112 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 113 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 114 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 115 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 116 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 117 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 118 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 119 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 120 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 121 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 122 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 123 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 124 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 125 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 126 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 127 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 128 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 129 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 130 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-6 firstLsnToKeep# 0},},{chunkIdx# 131 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-7 firstLsnToKeep# 0},},{chunkIdx# 132 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 133 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 134 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 135 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 136 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 137 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 138 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 139 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 140 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 141 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 142 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 143 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 144 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 145 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 146 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 147 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 148 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 149 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 150 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 151 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 152 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 153 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 154 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 155 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 156 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 157 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 158 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 159 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 160 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 161 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 162 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},{chunkIdx# 163 users# 1 endOfSplice# 0 {owner# 3 lsn# 7-7 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:37.947852Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:37.948077Z node 30 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [f:4294967295:0:0:0] OwnerId# 3 OwnerRound# 101 PDiskId# 1 2024-11-21T17:42:37.949526Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 12288} isEndOfLog# false StatusFlags# IsValid Results.size# 2} PDiskId# 1 2024-11-21T17:42:38.021220Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 1 OffsetInChunk# 12288} nextPosition# { ChunkIdx# 33 OffsetInChunk# 1069056} isEndOfLog# false StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T17:42:38.079534Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 33 OffsetInChunk# 1069056} nextPosition# { ChunkIdx# 66 OffsetInChunk# 40960} isEndOfLog# false StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T17:42:38.142903Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 66 OffsetInChunk# 40960} nextPosition# { ChunkIdx# 98 OffsetInChunk# 1097728} isEndOfLog# false StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T17:42:38.201331Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 98 OffsetInChunk# 1097728} nextPosition# { ChunkIdx# 131 OffsetInChunk# 69632} isEndOfLog# false StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T17:42:38.231566Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 131 OffsetInChunk# 69632} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T17:42:38.553905Z node 31 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:38.564308Z node 31 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 3208523513439946512 MagicNextLogChunkReference: 14729034531519305255 MagicLogChunk: 8436884488946523945 MagicDataChunk: 15091649368210598083 MagicSysLogChunk: 3364916122404835808 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210958497194 (2024-11-21T17:42:38.497194Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:38.568715Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:38.573164Z node 31 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:38.573191Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:38.576352Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:38.576562Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:38.576637Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:38.583883Z node 31 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [10:_:0:0:0] FirstNonceToKeep# 1235989 CutLogId# [0:0:0] ownerRound# 102 PDiskId# 1 2024-11-21T17:42:38.588385Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T17:42:38.596372Z node 31 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 4 vDiskId# [11:_:0:0:0] FirstNonceToKeep# 1235990 CutLogId# [0:0:0] ownerRound# 103 PDiskId# 1 2024-11-21T17:42:38.601168Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 4 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T17:42:38.602119Z node 31 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:900} PDiskId# 1 Can't write chunkIdx# 3 chunk is owner by another owner. chunk's owner# 4 request's owner# 3 PDiskId# 1 2024-11-21T17:42:38.602209Z node 31 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:900} PDiskId# 1 Can't write chunkIdx# 2 chunk is owner by another owner. chunk's owner# 3 request's owner# 4 PDiskId# 1 |68.6%| [DL] $(B)/canondata_storage/1925821/76f31386bb8c24a8c6deb4852e43a6b3c032e597/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1784117/27be18c4c655d803ac4ad0ec88e5308caa093c37/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1871102/afb11e7450182b29736d5d351c8e22acd046f1d9/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1773845/153cf0652cafcb3f7d3b789e3a4ceb4a9f7cfccf/resource.tar.gz{, .log} >> TYardTest::TestChunkWriteReadMultiple [GOOD] >> TYardTest::TestChunkWriteReadMultipleWithHddSectorMap |68.6%| [DL] $(B)/canondata_storage/1871002/ecb1006531e1f9b13e35feb30ba820285342f340/resource.tar.gz{, .log} |68.6%| [TM] {RESULT} ydb/core/blob_depot/ut/unittest |68.6%| [DL] $(B)/canondata_storage/1942100/34bf60ad890ba4690ed9d3377dd96472d59bed69/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1936947/2fc43e3b7bf2ac6312b395248938656a7fa50fcc/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1900335/26b8736381377fc28b74c97ea9e134e6da21c6aa/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1871102/61453f3b91b2a15fb0e2d2c6c61875e360070851/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937027/4608fb8cff903881d29660feb5fbd40491ccea1b/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1889210/414a59e63d9da4dbc9c919df47879a3079faff08/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1814674/a5c2cbff45dd20fa22702fa4b2539a64145b10f1/resource.tar.gz{, .log} |68.7%| [TS] {RESULT} ydb/library/benchmarks/runner/flake8 |68.6%| [DL] $(B)/canondata_storage/1871102/b46931b83ca87df6a7e16b1851216c6a79ea251a/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1936842/97245a35fa0fd90edbac42284ba4ffdc229ef791/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1777230/07925bc76f621b8c24d146f499334eda41f5710e/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1871002/b59ed2ad938015ca28be6d459030014e4b6ff1ea/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1777230/4b5479e2ebed213e8e8d9a64aa0b5a72bb3ea4dd/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1942525/493b103fe236994f6bd102fa072d08bd1a2e5f4a/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937001/ceb29095b5e35ed4cc1eb104072815b0be7ac715/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1784117/357d3ccdef7d0372b6d86bbe259ca7f35b60e595/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937027/fb2f5e7f8384ed26549a5b12ab64879613b094df/resource.tar.gz{, .log} |68.6%| [TS] {RESULT} ydb/tests/library/ut/py3test |68.6%| [DL] $(B)/canondata_storage/937458/55f0812aa779206291ac8c4b283b7a80472fd1fb/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937001/263d267ee75572162f813e618cad51ab3a452ca0/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1809005/7478904042df2a3888a84b6a917dd7cf55a05d66/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1889210/fede666d039e0167053e2c4cfe8623cff4b33d24/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937150/33871135ec615a5b145f2dd1de63f51229e4902b/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1917492/490d2547d88a61b315b5aab0d1f524b08d4d202a/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1775059/7b52ffc33b8c7dedbb0053fd02466c7710fcea86/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/937458/e483a0c8e724beb228563eb224be67227cd805b3/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1931696/1afb7fe05c694a846953b38edeceb5eee38c90d9/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1784826/000103add1e1f60c471b7c43f6b4a44a1a2734bf/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1881367/e98bbd650c45a3f4f6bc628cf8be62baa88c6183/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/937458/2c8d5c047a2d1f115b2b21f5412518c762d2aa0d/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937429/30d3e476b0604091faf300d00ac05dc03b916b08/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871102/7f5d56703d2b94fe32fa31a38a20ab8e2a4279f2/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937150/c7ef5e7ab0593d2cfcd01b9f6de38d47362d86ae/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1931696/564c43f4aa944aa26e85d54f2d25c16b9ce359f6/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1784117/3885f0a76b64a32a48487f8866602d3fff1e416a/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937027/71af45db04c1ee6fdd37f84594c6cfe28ff65598/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871002/2fcd813e80c98be1c3c62b9a854d09ccca8851f8/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1847551/ccf33d99ba5f1b411488f6139e835241216d9532/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1936947/343756f2ef88ab4d13ae0d8e1780223f838da842/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1925821/03b39acc689a2972a275bb747421c2da6d4a7ca2/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1814674/65f4d58c5b2358f2ab558af11162f9a7adab9578/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1889210/2d9182e0bd6dcb9ba0a784a38b456240cb386874/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1917492/c506d630588c442847f7a867bd50c315238502e6/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937492/3cc712c3196f7027398ddcbbf6597f57ccd7dfeb/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871102/a3f4b73c2abee8d4bf99b0344946712ef43a9193/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1600758/5edd0ab283f8fdd1cf03c1031ca810ce8554c459/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1923547/408f17dd7de3f1f4f32904831b08b3c57e38a7c7/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1946324/b33c6fdfc40b5508ecac58ab31e2d2f4fe0617fd/resource.tar.gz{, .log} |68.7%| [TS] {RESULT} ydb/tests/functional/ttl/import_test |68.7%| [DL] $(B)/canondata_storage/1817427/46729b354b9b15ea89f67bf14fefd2face8b402b/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1689644/34ad75626afa74e843eda917420447310062489d/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1130705/9726e618a91dba7201c15a2e45d93b6d8fd96178/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937367/43e9598c5fc4f9a8466e4891deda513069430997/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937429/8dfaad7a4316e425c9a664520399cae79a4471ab/resource.tar.gz{, .log} |68.7%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/import_test |68.7%| [DL] $(B)/canondata_storage/1031349/10fd2cab5eaef26b8b41b09e42e0b0458004c724/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1031349/f562047a0458cc3f13d0bd9bc809240f0048d755/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937150/89c1317ed6832f17c721211f8be3858ffaac0d95/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1773845/ab4dbe9a0023541b50ed970cdbec735405c6ed77/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1880306/bb6abca9465bc61a78aff64ccce71d1aa9416680/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1931696/e1e81addd8ea3e15863a8ba2a48dd9580611eaa7/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1924537/4ece22823ee95186ecb519415692146559b02395/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1889210/12a5d5a4658cc8076f89deb914f7f1f0f6ad647f/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1847551/14f613117c7e3a3941ccff240390414bf6219eb7/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1916746/21c597d64388f6c41b4782746fbaf7f1b842bcda/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1903885/50fd147bcde0799910ee9f4cbb71ec257f43ca4f/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1817427/dd0485c09a30d742e18a0ec21502ddd405eb8be8/resource.tar.gz{, .log} |68.8%| [LD] {RESULT} $(B)/ydb/library/yql/tools/yqlrun/yqlrun |68.7%| [DL] $(B)/canondata_storage/1871002/2a28301cd702f47961195a0e9d71a1a846884662/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1847551/652d7dbdeb88758415d87d3e7c2cc15c983c3ea8/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1889210/e26b9c7fc72b580fe82c1126f535456e73306c2c/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937027/17767c14f451959962681cf3d7fdbcfc98a6f63b/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/995452/7d7eb4b4cb892b72a47c31068e42a9aeef5fa875/resource.tar.gz{, .log} |68.8%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |68.8%| [DL] $(B)/canondata_storage/1777230/af30d016cca75b9d11b6ed54e7d270e255deb404/resource.tar.gz{, .log} |68.8%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test |68.8%| [DL] $(B)/canondata_storage/1937001/504ba0e32db3717a9a770414a8a028855403e56f/resource.tar.gz{, .log} |68.7%| [TS] {RESULT} ydb/tests/functional/hive/import_test |68.7%| [DL] $(B)/canondata_storage/1936273/921006dac2a4100d3f0822b61dc56296f0c6ef83/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937424/022b4c4aaf443124c76bb3e388177d9b3de00044/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1899731/d439cec3e297e3dd031f751fd90f3935129a1094/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937367/1aaf5198b7497ff892746fdaf479cd906eda5ef0/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1917492/f6af24e9333bc438fcad14a4e8bac6e6b0e07d87/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1817427/93e2a35960dc2868ae1d1a162c39c2808f9cf84d/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1600758/14d5560c6b6df65b25a7d0e4e072602b1a2a8743/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1130705/9a8e26c0a28d46dba8d12985fe62df3bee2a07d3/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1130705/20757ef3a83dd8dafce4369b1fa87cb0a0ae2b28/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942671/431d6f4e1a38d9a83c442de2f50cfc3e38e449d6/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1936842/15d1b251a19a947bc78bcd914d26903ce91d665f/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936947/8207550781992515886b573c884a057b16fa83e3/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936273/866bbbad6d025c44baaf3df88c5cf7edb202eed7/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1775059/0320e0a444559c89851159b0ca77b3fb930f0227/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1130705/151a45bbb65479e0367fc50d4fa7085f38b11c36/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1773845/7b0ece7963d2534b82e6da693f82ff79c8bfc07f/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1871182/02c4e8ba45dea18da2d4af195dc4a2de592050d0/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1925842/54dd0ebb803d2e27e6086b3d88e35fed569d9a96/resource.tar.gz{, .log} |68.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/py2_flake8 |68.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/py2_flake8 |68.8%| [DL] $(B)/canondata_storage/1942100/7cc0999fbc2528b08c47f3289c99f1f628ae5fd4/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1880306/553cec1ec75c6f92ca7aa0593ca041b68c096464/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1923547/dbb09fc9d877657acef8a7d59b3a1cdfa4706056/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1924537/404644a5cd7e050e8d183aa0c8a5c70a417e4c80/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942415/7197d6f538e589afc0cd6fe1285c07fd138fb450/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1600758/2bae720d354fef176d7a7ae70957b1a227ff538a/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1817427/c76f32a844a68e74a51674b34fdd7e00e417e1e1/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1130705/4556d96ceeebd1ca514c0387ef0b269cb852e13b/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1871182/49dbac5c2482cb10e39b8801a9850d0528746393/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942671/d37704c7d0acd85d95c86ed2cbc5abdedc6da1fe/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/212715/89a4a534b98f0006d500be0545402722592e483c/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942415/6669484e62bb116018d49d82b2e465bc96b01a14/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1814674/8156a7ce6ad6eceb82586ac4874de57d87023039/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1773845/d2141055cd31d948959dc44b2043b73e0eedb61e/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1777230/166430366a3ee35292b0a0e4ec6ba38e166ec8c5/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1900335/a5a16b7313d07b162a608c1abeab1e68e6175117/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1924537/40c66e62107c2a9e3733dec809479087bdd8f6d6/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1130705/bafe275fa937679d2b25012fec947db4686c5a93/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/995452/916a3abc0188ce4b46268a98f2f6487c53d8a14c/resource.tar.gz{, .log} |68.9%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |68.8%| [DL] $(B)/canondata_storage/1925821/97ab382df374f58dbf4509c69cb8d6f0df937287/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1600758/164a788024a2adf2945e5df7b5b1983ab8de1a1b/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942100/1a3ac34e6a22249edacf19f24601f290b692c3cb/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1031349/a8c086bf83c7b097d941bd5f51b9690bf204f31f/resource.tar.gz{, .log} |68.8%| [LD] {RESULT} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |68.8%| [DL] $(B)/canondata_storage/1936997/26840f7e11cb9eef225eaf1c7e2dc7e15d3b69c3/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1942525/26eb3bd3f3177ba00d382b62045c570f72937d8e/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1937424/1f14a59ff195cdb4e3cda1c103baeeeee659e945/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1130705/2c54111b512b354592ae02009bd206b6b4bc7a92/resource.tar.gz{, .log} |68.9%| [TS] {RESULT} ydb/core/config/ut/unittest |68.9%| [DL] $(B)/canondata_storage/1031349/8ad72b2dd458e088eb333e5553dfd99dbae9e9d7/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1937027/b16c09e9a13e802c8e9af73cf9508048ead5f2e5/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/937458/9a583559753b9ebbe934c023f3a211aa7e017405/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936997/f0d91ddfd51cc8a6414fa05ea0aac05c34813467/resource.tar.gz{, .log} |68.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |68.9%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |68.9%| [DL] $(B)/canondata_storage/1923547/22c2c509a5434879db9a69ad8b9605c384a0c07b/resource.tar.gz{, .log} |68.9%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/import_test |68.9%| [DL] $(B)/canondata_storage/1871182/18e14cc850154a330057b23fc8c6576e30e17147/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1031349/eb01bd7ff66ea4e3791b69ec15b5a2ac35547ace/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1881367/207835e4d274fcf7987814492f265b341fdcd02b/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1784117/392a16b8c9d13c4b4284c3048ca355142cf1aa9e/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1871102/40bae405b3e45c05f8213a4d6de07ce04c617d22/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1889210/46413869b9a6422f358888eff087d092b7ec0356/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1871102/7b4b0482d4b48dd41de6cb20a8532e8054ae5f81/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1942100/7dbb0fabca371736b54699a0459fc74089bb4c57/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1937001/23c4a86c24169a35556c576ee32ff34654fb44f6/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/212715/7f481604d75f86fddead511124887e2e0fa01e78/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1809005/036501517c77edaf8dfa3239345c24cc4e73081c/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1942415/bc58f4bafc7f7c25e28d8dc76fd80da6616f0b89/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1903280/e30d5fd7cc30b67d921c737fb0af1ae5c12759b0/resource.tar.gz{, .log} |68.9%| [LD] {RESULT} $(B)/ydb/mvp/meta/bin/mvp_meta |68.9%| [DL] $(B)/canondata_storage/1775059/8eed37259d411fc80649c1b2311ad3abfd9ee15e/resource.tar.gz{, .log} |68.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |68.9%| [DL] $(B)/canondata_storage/1903280/6ac862756a9225bab7885d29cf2289a6202a5ff1/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1784826/0e334a6b657f494d6225ebade2ce12411632a8e5/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1942671/4db54c8ba9dedccdc8391210d1657c5ca4bd34ec/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1936947/c075b3a6b857003250f6fcdaddd6e5508fb9d58f/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1599023/c48eea35b704ce8968912971b5424c6d295839a1/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1924537/bb09f7f7f49f479d6bdbad2ad3eb185564d33ca0/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1600758/eca34ca0feab807128d7b13432dc1134d34cb65c/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1781765/8061b1bca1dbb79a1a8135dd98452c011059369f/resource.tar.gz{, .log} |68.9%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |68.9%| [DL] $(B)/canondata_storage/995452/b8907c48ceee21fc9f22e90025a80d21b7f812d0/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1942525/79c6b0061ac9b7af78aeb1e98efd080a410caa89/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1937027/ca37dc23c4a42a42fb6cfd05c1ad5ae3f4853941/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1814674/de17576700fc11fc02ec994a616abc5adadd5f40/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1924537/907e79379e1e72f9d09545e57f65dee63f42dbfe/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1781765/028f42f897160b53900546b39900217bb2eb9fb1/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1784826/837b0487932600ba51f58ab5300b34e847536f72/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1937027/ec284f0d49e0f6e26fe5e8922028ffb903db3bc7/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1871002/99771a64b313e4ed0d87c6e59cde6bafe069ce35/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1880306/51cf42e38aedb850f758a02645c1575dcd57d9e6/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1775059/0211445827e77a089557f709a929c720409a58d4/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1937492/7f01a8f9ac63e111f95c1b473211464f75350133/resource.tar.gz{, .log} |68.9%| [TS] {RESULT} ydb/tests/functional/autoconfig/import_test |68.9%| [TA] {RESULT} $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.9%| [DL] $(B)/canondata_storage/1889210/0ae374e78057abad24c02b5788c385884d8c559f/resource.tar.gz{, .log} |68.9%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |68.9%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |68.9%| [DL] $(B)/canondata_storage/1899731/d359e310c721425bf92779c6cc495a90085858b1/resource.tar.gz{, .log} |68.9%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/import_test |68.9%| [DL] $(B)/canondata_storage/1923547/6bb261b87a2d0ef492e8f1a5fd897369c7118506/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/995452/bdcc1c962e2cb216859f83be46f0797cbc65b816/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1936842/fa36495c13878b6808528b4f14deedabaaaf4b52/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1924537/8b609a36ae618dd93dae76d33a498930f0df8908/resource.tar.gz{, .log} >> TYardTest::TestLogWriteCutEqual [GOOD] >> TYardTest::TestLogWriteCutEqualRandomWait |68.8%| [DL] $(B)/canondata_storage/995452/b1f2dabe2f59f069a24ebbb0bcf5d5b69f26631e/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1899731/2a0d010c88d3668b64d2eceea551d15c4ea643a5/resource.tar.gz{, .log} |68.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 |68.8%| [DL] $(B)/canondata_storage/1942278/423e9b8a01d3f9d43497e4cb2ee7041e47daf356/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937027/7dd93f39b29f9f9faa0d9501189c6cde9f06926b/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942671/03c23723b22c7621c14256361f488cfb15ee75e3/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936273/2dcf3705881ddc62a114cf70453bfa6ad7f7d225/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/212715/3f9199021f498ba2943fb7c0535d2ebc21914487/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1899731/061987f55a4633fbb100deb15792166741b6bddd/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937492/a2da5ad850b8a2bacde60e0e3cf33053277777c6/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/937458/70ccebe5fbe5864b01d9dd1a04ed7658001b110c/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/212715/b9d67d9e85a77fd7731aa5719cd4ecc8994a16b9/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937424/aec2375680b5e09e454587725abefb1869ba1f0e/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1031349/f83156e7d6ffdceda631624d3f808fe3c1b393a8/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936947/3881735ec94cf6af5bd90b1c7efcaa7c1bad584b/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1031349/2beab0bd51f525f804474df3adc530a07847479a/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942173/2f9cbf1b9614aff7e11f14fc7938938d0790e3ab/resource.tar.gz{, .log} |68.8%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |68.8%| [DL] $(B)/canondata_storage/1936842/73fe0e78069055b4c244798fc9c15ebb1173a692/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1775319/23bb6663a6c40fafcef1fd59c500bcee7e579cdd/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937001/739f5ccbe8d31f87c515ca8f825e82c32f06f5a1/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1777230/c6bb3b20e729a321dd2f32060118095ac77f2dba/resource.tar.gz{, .log} |68.7%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 |68.7%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/import_test |68.7%| [DL] $(B)/canondata_storage/212715/61f0c59354c0aee96d5e21e3fd5f5993b2817ac3/resource.tar.gz{, .log} |68.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 |68.7%| [TS] {RESULT} ydb/core/metering/ut/unittest |68.7%| [DL] $(B)/canondata_storage/212715/e9c0ac99e18ec4ae27142c9c2577dc676b157d44/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1903280/8bd8bc14ef8f83b16c090d98fff065b7f9ded199/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1942415/b35d2514a5150e9f12a175bf916b9aef176e9b54/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1031349/cb7f0d6f26f3c006a7ca4ee3cb2fee451343f519/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1599023/7ca825f9742a1a057b05a268d19a99cd8f57127e/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1899731/a8e487f28e21f36eb70986f5e3381840f4f35bc0/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1784826/876422ee5a31dd410c0abb7a3417f21835990576/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1924537/c5db08849456fd743b1ee29541c5e4a60ede833f/resource.tar.gz{, .log} |68.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.7%| [DL] $(B)/canondata_storage/1937001/2a7ef44323a9583b611e77f9451ecbcf9a39cd8f/resource.tar.gz{, .log} |68.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |68.7%| [DL] $(B)/canondata_storage/1775059/e0d64b1cb8ae8bb7052270bb8eeef5ba21a3c131/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1031349/186bffdf663847fce34ef344f3142b3cf148e402/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1600758/48b7b4a69f7ce5991121870c9a2f027f9e2c7041/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1925821/6a1d049e384919d8478e0dc1a9ff789c7f500f07/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1936842/e733b8969cd6cdf87ea1ec454d4e62279f34b3ac/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871002/212f691ac7c5e7cfc43a031ca90e23988dc4e5af/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1599023/6add8cb499cc3b1dca20f22c9b17ae29fbfe727d/resource.tar.gz{, .log} |68.8%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test |68.8%| [DL] $(B)/canondata_storage/1917492/b09f81119d6db779ff6e194090c647867842db23/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1809005/df0d5940a3b3a38ba468a035aba7ce54440f0891/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1599023/d714848e02b570470fc7f4a8a1315869ac70b513/resource.tar.gz{, .log} |68.8%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |68.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/py2_flake8 |68.7%| [TS] {RESULT} ydb/tests/fq/s3/import_test |68.7%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/import_test |68.7%| [DL] $(B)/canondata_storage/1942173/2b6d37b434944472410a121082ca65dee724c848/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871002/09c7103fe942f664e52ea9943175fcce1c927b80/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1784826/cbc63541f63d78da712c6e11ae70c4ee10dfb428/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1903885/e571a3fbea26622636c12f349d5811739c3c6677/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1871182/996a8d2d865b3f19f5c68201bf7e2cfe8f1268f4/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1903885/e4adce0662ce21f7571f319c0ebb24df1985df0e/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936947/eb45622f6e742230b1613aebb0bac678cc96fb83/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1925821/b41ee142eb0ecec97fb696b52ade07057abd9b3c/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1775319/581989ddfd844cd7fb811fb9f47c5b23d36a9346/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942525/02313b653cd90bd52d23ab748eeb6f19dd31efb9/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937492/3b472eddc14507ca61231b1a308e847ec2b7b2bf/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1599023/0fde09ac1ad0a850ad1ab93edf9eee9e0688dde1/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1781765/42df89988fcd33edfdab26a81def80f0820c3235/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1130705/9004be6a07264a5bf29a5f00e7ff1eb4e47458ad/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1777230/1be81c0af4d894e438b1e6abeb6641aa0309b29e/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1600758/0b2ec7f57dbbd2c69f7894fb1ef04f94367de8d9/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936947/8c234556c048ec2212784e80981b34176fe97cf6/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1597364/75cbdd585b0656f6fb0390e1698cb16d6290c17c/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942278/f85b5ff273f15c86ee649e6dcb392b4194b897cc/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1936947/21cc5de50f594b1190d08542a7f262a2327e625b/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937492/fae2471f79672290055b05939c32d42b13b0819b/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871182/cdd8fb9ea4ddd53c6670aa1140203ceb50634749/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1847551/fc7297e0cdc0ebe075e27df94088bbf4da4a2595/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1942278/4753a3574c7d4c4cc4a6ef5262a4559e7e493c80/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1936842/50b264d4daedd2dfbc510cffe988c135e4f73a28/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/995452/f3edc5905f3fec9aade63210a7de845a74964f60/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937429/5bcfa7fa889e048eab4fac33f32363a8c63e5b0b/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1923547/3837386c5673f42d0a262b53fa145c1210e06267/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1817427/0c40572784ba0c378f9763d962c3c5e8b7787ec6/resource.tar.gz{, .log} >> TYardTest::TestStartingPointReboots [GOOD] >> TYardTest::TestRestartAtNonceJump |68.7%| [DL] $(B)/canondata_storage/1773845/ebbc0e7a6553d487ca6f9443345b87dc94e5ba64/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1777230/1db1903a6e0dabe5575aead91fb71d857f3f9a30/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937424/058ac03eb1d0747a8ff4e9834da6c7421cc76622/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1931696/0e52d5b9778b2943992171dc32150f40daf8bfa5/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1880306/0dff39a47da7c6db82403c4d9d953892f43da982/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1931696/966609c330b2f749a4acb766d57d3b5dcca3d7f5/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942278/340f722a851e4412d2c35b434f3ee6113a9f7959/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942525/e1a82d4e7077e073e6175abb7c2d712d0cd08dc4/resource.tar.gz{, .log} |68.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |68.7%| [DL] $(B)/canondata_storage/1924537/bc0aa6d2dc96c8e2d21b35c367a15ca1ca298c7c/resource.tar.gz{, .log} |68.7%| [TS] {RESULT} ydb/tests/functional/kv_workload/flake8 |68.7%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/import_test |68.7%| [TS] {RESULT} ydb/library/yaml_config/static_validator/ut/unittest |68.7%| [TA] {RESULT} $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.7%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |68.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 |68.7%| [TS] {RESULT} ydb/tests/functional/clickbench/import_test |68.7%| [DL] $(B)/canondata_storage/1817427/1d09a6a9bd95b3d23b0ad7e5fb8ca247962a9167/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1946324/1db652d5b002ab03b5138ef5dd01126c2deab600/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1903280/55ee056094134146d6b228e0e2827a4a0b1bae59/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1936947/e9b2989833eb2cb143a6b33579463fddacfe47db/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1031349/45aada6c316544e03166fc51527848ab05146f50/resource.tar.gz{, .log} >> TPDiskUtil::DriveEstimator [GOOD] |68.6%| [DL] $(B)/canondata_storage/1925821/5c7988bca7ff7631d849ea3fc0177b71ea70a9e8/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1130705/f62fe18a04b048878dcf5b69770e9d14e5d379e4/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1937001/3df1bf80f5738c3f0205526961db8957f75fdaea/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1881367/0038fdd5944649d910caa3afaa1f132a60fb35b8/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1936273/f7ac782bb4f6fe95601764c0efdfb9f8d7bb7d49/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1946324/208a50d83749c76dc119c7025e7f828673e1f366/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1937424/67ccb4bc28f59f5eaedbfe7e4d59615be370bf27/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1924537/ed5c3cfadad0d4915690e6595935fd0ac4b575d5/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1923547/9e635ae8d87d6d91f29e1dd2b0f82d9958ebefe5/resource.tar.gz{, .log} |68.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node >> TPDiskUtil::OffsetParsingCorrectness >> TPDiskRaces::KillOwnerWhileDeletingChunk [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflight >> TPDiskUtil::OffsetParsingCorrectness [GOOD] >> TPDiskUtil::FormatSectorMap |68.6%| [DL] $(B)/canondata_storage/1917492/d5eb47a4ac49b0fe0cd9d069e3197c610317bc8c/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1889210/b3ec54c8ba5425d52cf7fa3db3638fad22de7e87/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1942173/e772b9f5e7fdee47a02e467e47e5db2ae21c1ecf/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1847551/98babfb2e71230fbc636c0bf4e21403b16782b74/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/212715/5139a033a064dcf51fe12f342340ef4c205e977a/resource.tar.gz{, .log} |68.6%| [DL] $(B)/canondata_storage/1599023/1b6e8347ca7cf43e4ffb87f89e02cf72c8adfa32/resource.tar.gz{, .log} |68.6%| COMPACTING CACHE 30.1GiB |68.7%| [DL] $(B)/canondata_storage/1946324/dc6ee267af5d1b0e264188916e19d0d7a07f9201/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1917492/75449c24a279528381d8f6bec1271caa90cd7a95/resource.tar.gz{, .log} |68.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |68.7%| [TS] {RESULT} ydb/tests/functional/config/flake8 |68.7%| [DL] $(B)/canondata_storage/1942173/a6fc778ac459c83f8c2a8bab8c997c6c223d9eff/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1871002/41ae725b67896da2823a1ca29e32600f981785c2/resource.tar.gz{, .log} |68.7%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 |68.7%| [DL] $(B)/canondata_storage/1937492/2bb2455c1ebb5ccab2ce4acc1aa8fb7defa3f4b8/resource.tar.gz{, .log} >> TPDiskUtil::FormatSectorMap [GOOD] |68.7%| [DL] $(B)/canondata_storage/1880306/f04a57e691589def2f527c532c54402c486d5974/resource.tar.gz{, .log} |68.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/py2_flake8 |68.7%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |68.7%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 |68.7%| [DL] $(B)/canondata_storage/1937429/3ec353865b88f20c966196a0ce16243c37e12190/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1600758/4facc3887d7be655ebe11f112eb8a7dc7a544811/resource.tar.gz{, .log} |68.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/py2_flake8 |68.7%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |68.7%| [DL] $(B)/canondata_storage/1775319/3515b86fb929979a6751f93bd43a0291eaa01262/resource.tar.gz{, .log} |68.7%| [DL] $(B)/canondata_storage/1775319/f824086f9aede9fe69b74b082af09c546782c449/resource.tar.gz{, .log} |68.7%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |68.8%| [TS] {RESULT} ydb/core/io_formats/arrow/ut/unittest |68.8%| [DL] $(B)/canondata_storage/1936997/f69902e9df436dbd7c079c9b996bb43c65b9828c/resource.tar.gz{, .log} |68.8%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/import_test |68.8%| [TS] {RESULT} ydb/tools/ydbd_slice/bin/import_test |68.8%| [TS] {RESULT} ydb/core/blobstorage/base/ut/gtest |68.8%| [DL] $(B)/canondata_storage/1689644/40519f669ee0ff4cd14681f4648f099da23d476a/resource.tar.gz{, .log} >> TYardTest::TestRestartAtNonceJump [GOOD] >> TYardTest::TestRestartAtChunkEnd |68.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |68.7%| [DL] $(B)/canondata_storage/1031349/1e1ff3377d9e6463687741aa3509395b92a00445/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1871102/76c86b8b78af73dd74b03bc83dba0b0e32bd1cd6/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1946324/bb6f9b1bdca4cf325d4b3c175cc7c05431da0bee/resource.tar.gz{, .log} |68.8%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |68.8%| [DL] $(B)/canondata_storage/1925842/0dddd8be953c72538d28f43c54fb364cfe8111e7/resource.tar.gz{, .log} |68.8%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |68.8%| [TS] {RESULT} ydb/tests/functional/kv_workload/import_test |68.8%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/import_test |68.8%| [DL] $(B)/canondata_storage/1817427/cd1ccdb594fb5c0d7ccd4116b70e61c92e494ff1/resource.tar.gz{, .log} |68.8%| [TS] {RESULT} ydb/tests/functional/config/import_test |68.8%| [DL] $(B)/canondata_storage/1936947/ac258f02a615b46fc7a88b9fef9062f73aca53fa/resource.tar.gz{, .log} |68.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 |68.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 |68.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 |68.8%| [DL] $(B)/canondata_storage/1689644/6a03d147fd8c24ca8f22e2c016de5b0418f13570/resource.tar.gz{, .log} |68.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |68.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/py2_flake8 |68.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 |68.8%| [TS] {RESULT} ydb/tests/tools/nemesis/driver/flake8 |68.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |68.8%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |68.8%| [DL] $(B)/canondata_storage/1903885/9ebea4f8b0d9c14e629045992b2a0566b4da0814/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1942100/090fa9e99dfe7f43e6470439372ea4a84a495992/resource.tar.gz{, .log} |68.8%| [TS] {RESULT} ydb/library/yaml_config/tools/simple_json_diff/import_test |68.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/py2_flake8 |68.8%| [TS] {RESULT} ydb/tests/functional/rename/import_test |68.8%| [DL] $(B)/canondata_storage/1925842/ec6e9d018b38ccaf9fc6296a792f1e60022c1c22/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1937424/ca0bc12088c1a293fde3df7327441001cc5f0af1/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1773845/6921a43b8331634020d1aa346b91c56ad87a3ae0/resource.tar.gz{, .log} |68.8%| [DL] $(B)/canondata_storage/1130705/851b827e92b1d2a782f09dc8f909cdc1f88c0a5d/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1924537/de922a973d80db7430ccc36b4b196b0fe3a08ff2/resource.tar.gz{, .log} |68.9%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |68.9%| [DL] $(B)/canondata_storage/1597364/26c9cd4ddf7d11c6a72eed900146bed3a8e037de/resource.tar.gz{, .log} |68.9%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/tpch/tpch |68.9%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |68.9%| [TS] {RESULT} ydb/core/viewer/json/ut/unittest |68.9%| [TS] {RESULT} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest |68.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |68.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.9%| [TS] {RESULT} ydb/tests/functional/limits/import_test |68.9%| [TA] {RESULT} $(B)/ydb/core/fq/libs/metrics/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.9%| [DL] $(B)/canondata_storage/1889210/5f0f82e4a2bed51403d8667507a43b3b2e40bfb4/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1923547/14c0d60ad63ffaedb974b51b52039901f095b5c5/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1775319/53c99957370beeb350847e11554dd471106250fd/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1937150/69ba5a293eaaa6b3e25fe140a1e593fd4605b88a/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1777230/1e94bfc4170d2c00272e8b088c9a3c26c6d066d2/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1942100/c3d647446edbe752077b3f908285ed4a4a032d7f/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1784117/eb116fd507d59419f5df95216e2268a87630509b/resource.tar.gz{, .log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskUtil::FormatSectorMap [GOOD] Test command err: 2024-11-21T17:42:23.008312Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:23.021045Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 10929362251361818833 MagicNextLogChunkReference: 13153689361901200751 MagicLogChunk: 11330462577780983329 MagicDataChunk: 9898047516389006013 MagicSysLogChunk: 918198870829883125 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942945221 (2024-11-21T17:42:22.945221Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:23.021061Z node 1 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:543} PDiskId# 1Can't start due to a guid error expected# 10929362251361818832 on-disk# 10929362251361818833 PDiskId# 1 2024-11-21T17:42:23.099181Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:23.099319Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 14275527394326999555 MagicNextLogChunkReference: 8150681428867930228 MagicLogChunk: 6118511980094638931 MagicDataChunk: 16850267723749365670 MagicSysLogChunk: 12834425283038586754 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210943089459 (2024-11-21T17:42:23.089459Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:23.100564Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:23.101438Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:23.101460Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:23.101545Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.101646Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.101687Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:23.101821Z node 2 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1245007 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:23.102085Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [0:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1245007 StartingPoints: {} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2024-11-21T17:42:23.105037Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:23.116853Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 14275527394326999555 MagicNextLogChunkReference: 8150681428867930228 MagicLogChunk: 6118511980094638931 MagicDataChunk: 16850267723749365670 MagicSysLogChunk: 12834425283038586754 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210943089459 (2024-11-21T17:42:23.089459Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:23.123955Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1170028 NonceLog# 1245007 NonceData# 1721296} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:42:23.128380Z node 2 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:42:23.128399Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 1 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 4096} PDiskId# 1 2024-11-21T17:42:23.128414Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 4096} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:23.130335Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.130819Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.131181Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:23.294531Z node 3 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:23.314233Z node 3 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 18019557171105121079 MagicNextLogChunkReference: 12724904165068151875 MagicLogChunk: 14402744083490953326 MagicDataChunk: 18033347170408076065 MagicSysLogChunk: 12253250322485310996 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210943257077 (2024-11-21T17:42:23.257077Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:23.340311Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:23.348333Z node 3 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:23.348364Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:23.348569Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.355853Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.356295Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:23.390085Z node 3 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1607576 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:23.396043Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2111} removed owner from chunks Keeper OwnerId# 3 PDiskId# 1 2024-11-21T17:42:23.396059Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.396071Z node 3 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2145} KillOwner ownerId# 3 ownerRound# 2 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2024-11-21T17:42:23.413873Z node 3 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 4 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1607576 CutLogId# [0:0:0] ownerRound# 3 PDiskId# 1 2024-11-21T17:42:23.416718Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2111} removed owner from chunks Keeper OwnerId# 4 PDiskId# 1 2024-11-21T17:42:23.416732Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.416741Z node 3 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2145} KillOwner ownerId# 4 ownerRound# 3 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2024-11-21T17:42:23.417478Z node 3 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 5 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1607576 CutLogId# [0:0:0] ownerRound# 4 PDiskId# 1 2024-11-21T17:42:23.421950Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2111} removed owner from chunks Keeper OwnerId# 5 PDiskId# 1 2024-11-21T17:42:23.421962Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.421968Z node 3 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2145} KillOwner ownerId# 5 ownerRound# 4 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2024-11-21T17:42:23.428433Z node 3 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 6 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1607576 CutLogId# [0:0:0] ownerRound# 5 PDiskId# 1 2024-11-21T17:42:23.429078Z node 3 :BS_PDISK NO ... ize: 1168 (current sizeof: 1168) TimestampUs: 1732210950699552 (2024-11-21T17:42:30.699552Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:30.732002Z node 8 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:30.735755Z node 8 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:30.735777Z node 8 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:30.735956Z node 8 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:30.737168Z node 8 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:30.737334Z node 8 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:30.738485Z node 8 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2011432 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:30.804194Z node 9 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:30.808739Z node 9 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 2288769169845275092 MagicNextLogChunkReference: 9103536124053650506 MagicLogChunk: 1093571645788294895 MagicDataChunk: 6640494670596769428 MagicSysLogChunk: 8390546863000863136 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210950756657 (2024-11-21T17:42:30.756657Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:30.810608Z node 9 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:30.812874Z node 9 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:30.812903Z node 9 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:30.813017Z node 9 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:30.813189Z node 9 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:30.813226Z node 9 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:30.816464Z node 9 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1675322 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:30.816970Z node 9 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [0:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1675322 StartingPoints: {} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2024-11-21T17:42:30.819332Z node 9 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:30.819615Z node 9 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:998} HandlePoison, PDiskThread stopped PDiskId# 1 2024-11-21T17:42:30.820326Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2288769169845275092 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T17:42:30.905552Z node 10 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:30.915358Z node 10 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 5314429255837907089 MagicNextLogChunkReference: 7351853141846974619 MagicLogChunk: 810501453858361810 MagicDataChunk: 15917689788848967047 MagicSysLogChunk: 14944219046063692889 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210950848449 (2024-11-21T17:42:30.848449Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:30.925357Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:30.942517Z node 10 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:30.942542Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:30.943278Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:30.943464Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:30.943512Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:30.950844Z node 10 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [2:_:0:0:0] FirstNonceToKeep# 1994339 CutLogId# [0:0:0] ownerRound# 14 PDiskId# 1 2024-11-21T17:42:30.958022Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T17:42:30.964557Z node 10 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [2:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1994339 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {2}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {3..982} PDiskId# 1 2024-11-21T17:42:30.970727Z node 10 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:30.992313Z node 10 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5314429255837907089 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 |68.9%| [TS] {RESULT} ydb/tests/functional/cms/import_test |68.9%| [TS] {RESULT} ydb/tests/functional/suite_tests/flake8 |68.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/py2_flake8 |68.9%| [DL] $(B)/canondata_storage/1936273/98e4b41d6221eb1e25a8689e7a8c9e8e9f83c75f/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1889210/cdea4d984d293e4c4894b43fbddd80f6768144c4/resource.tar.gz{, .log} |68.9%| [DL] $(B)/canondata_storage/1600758/bba12fda8a5a68a3753c70d51907e240b2e6a66b/resource.tar.gz{, .log} |68.9%| [TS] {RESULT} ydb/core/tx/scheme_board/ut_double_indexed/unittest |68.9%| [DL] $(B)/canondata_storage/1924537/8515a8b3787c5105aaf393c0e112ddb75d305ccf/resource.tar.gz{, .log} |68.9%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |68.9%| [TS] {RESULT} ydb/core/config/init/ut/unittest |68.9%| [TS] {RESULT} ydb/tests/fq/yds/import_test |68.9%| [DL] $(B)/canondata_storage/1599023/227f2914a8aa929f2f2a755dc19dd4060d33f2c5/resource.tar.gz{, .log} |69.0%| [TS] {RESULT} ydb/core/blobstorage/crypto/ut/unittest |69.0%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |69.0%| [TS] {RESULT} ydb/apps/dstool/flake8 |69.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/py2_flake8 |69.0%| [DL] $(B)/canondata_storage/1784117/498626a06fb2650088930c390bdc96147a0bc505/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1942100/da694607ab211ab453c7880d608269737da0e1e0/resource.tar.gz{, .log} |69.0%| [TS] {RESULT} ydb/tests/stability/ydb/import_test |69.0%| [TS] {RESULT} ydb/library/yql/tests/sql/solomon/py2_flake8 |69.0%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |69.0%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |69.0%| [TS] {RESULT} ydb/core/blobstorage/vdisk/defrag/ut/unittest |69.0%| [TS] {RESULT} ydb/tests/functional/scheme_tests/import_test |69.0%| [TS] {RESULT} ydb/tests/functional/ydb_cli/import_test |69.0%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/flake8 |69.0%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |69.0%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |69.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 |69.0%| [TS] {RESULT} ydb/tests/functional/large_serializable/flake8 |69.0%| [DL] $(B)/canondata_storage/1600758/1260842a548b9eeb101aabd689cd26a911953004/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1814674/3660a2396e26152d8c8f050da9f28116b76739a2/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1784826/ae144ae65f45caf0cf861d9528ef4fc3e1c5e830/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1880306/975391d46ff9d241fae3efa496fefe1b49dc5396/resource.tar.gz{, .log} |69.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |69.0%| [DL] $(B)/canondata_storage/995452/cf615d0761fdf54ff78f8d33100e0f379784db10/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1775319/f5d325e9942124752494893299a6edbdfb1a1d2d/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1917492/d983c8e69867e7a5af2aad3db8b5eeebdf959284/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1600758/6d0d27fad1cf46a244c609129a6009834bc45a9a/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1923547/b6378128d274e5d1ef2e0c1c37e1cdcb2bbd21c4/resource.tar.gz{, .log} |69.0%| [TS] {RESULT} ydb/tests/functional/clickbench/flake8 |69.0%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |69.0%| [DL] $(B)/canondata_storage/1923547/556a26215837428f8bd2b4b512f313930615d82b/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1784826/25cbabce687b21eded79fabb140f901221253ab9/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1917492/86ab0de654a60bf1e3145a3d8e3d7eae4a9f26b8/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1923547/fea898f087e0f27f17f93176391f1a45065a7fa5/resource.tar.gz{, .log} |69.0%| [DL] $(B)/canondata_storage/1937429/64e39c366e0b462b94fd9e04f579348331e65cd1/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1881367/943a50aaa7841517b3581cb3efc1c4693dfe6c56/resource.tar.gz{, .log} >> TYardTest::TestRestartAtChunkEnd [GOOD] >> TYardTestRestore::TestRestore15 |69.0%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/flake8 |69.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/py2_flake8 |69.0%| [DL] $(B)/canondata_storage/1942415/5dd4bbc3b4370798b80250a55a4da5d1863033ad/resource.tar.gz{, .log} |69.0%| [TS] {RESULT} ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/unittest |69.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 |69.1%| [TS] {RESULT} ydb/tests/fq/multi_plane/import_test |69.1%| [DL] $(B)/canondata_storage/1925842/8b22a63573110228fc6a5e75beb97252b4db0e2a/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1936842/8f78d4e91e4f9982eb78e4a8b888794c6f76e3cf/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1942525/71aa87e8531eaa616ea40214f4172330acf1be1c/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1130705/a877e9a38d4cdcd3a3048f1fe39ff52ef1e78652/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1903885/f9d45bc250f07f42a2353007c7f2648896a84384/resource.tar.gz{, .log} |69.1%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/import_test |69.1%| [DL] $(B)/canondata_storage/1900335/78b0311d619a60a4d1b9cef34b0261de23138f5f/resource.tar.gz{, .log} >> TYardTestRestore::TestRestore15 [GOOD] |69.1%| [DL] $(B)/canondata_storage/1031349/ff2d90d606cdc417d573d7d2f32329f10cf0be11/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1942525/b841c1f7e178a6bdcbcc7188f97e9d64098db934/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1880306/a64cbd36324c0aa2db14c1bae670848250f6f405/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/937458/cc57ea281d0b003d397eca8623f6324d4f1e6ded/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1847551/6ea0f0d238a8a57c98cf719da4e87036e3ffdde6/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1871002/b21941ba70054720e6cf10accab3a568d92d2d97/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1775059/c78334a5a54c55b78c6157e0006c3af42c43b3aa/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1871002/fd83b9fcca23643110586ceb3cb213cea846db7b/resource.tar.gz{, .log} |69.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |69.1%| [TS] {RESULT} ydb/core/resource_pools/ut/unittest |69.1%| [DL] $(B)/canondata_storage/1775319/16e55349c1d8a123c91f7d512b301ac22c034701/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1597364/3d3a2dffac5b64baabad6f932284c93dcb205cd6/resource.tar.gz{, .log} |69.1%| [TS] {RESULT} ydb/tests/library/ut/import_test |69.1%| [TS] {RESULT} ydb/tools/cfg/bin/flake8 |69.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 |69.2%| [TS] {RESULT} ydb/tools/tstool/flake8 |69.1%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/import_test |69.1%| [DL] $(B)/canondata_storage/1600758/8967dbeed4cbcf01ab4f5cf532c0a6b1652e0625/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1936947/59872f3b1f1eddc6f2194f87604a00cc7300d8e1/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1942525/f7240bfb895abd9165a9251745a77a5737396a6b/resource.tar.gz{, .log} |69.1%| [DL] $(B)/canondata_storage/1923547/9239cf6dc7870b94856822425e617d80d75f9a89/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1899731/a42b3541f94a87ba84e6f819e45d301c084940d9/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1775059/4ce689cacc1b04e7b955e62a2269c8180fca36bb/resource.tar.gz{, .log} |69.2%| [TS] {RESULT} ydb/core/debug_tools/ut/unittest |69.2%| [DL] $(B)/canondata_storage/1925821/e20a6041e7f58f4d79973b167aed78646db5868f/resource.tar.gz{, .log} |69.2%| [TA] {RESULT} $(B)/ydb/core/config/tools/protobuf_plugin/ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.2%| [TS] {RESULT} ydb/mvp/oidc_proxy/ut/unittest |69.2%| [TS] {RESULT} ydb/tests/functional/blobstorage/import_test |69.1%| [TS] {RESULT} ydb/tests/functional/dynumber/flake8 |69.1%| [TS] {RESULT} ydb/tests/functional/restarts/import_test |69.1%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |69.2%| [DL] $(B)/canondata_storage/1925821/3149e5c24f2e47440679ea6c5e1f6d7e1b2b75ac/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1942173/e32f1de19c4f2770a6f215d1dc22bc97e318bf22/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1871182/03581f8f43b6630387f93dcffb64efda102a5104/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1784826/23b1299e7f12d5cf020984a5f7c964801c31ebc5/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1936842/de8a3d5f5dbc206e6c8aac1877a6c2c6816ea52f/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1817427/6ea5274dd6217b2229e46445dc75d3ec401bb15f/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1920236/56560fc4eb0991ee6681b0a1b288f62576ec0df7/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1777230/85281113cfa8b551c6ab2fb41421ab9120c1851f/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1871102/5190906b0c0babdbbc337b471790697bf5591d3a/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1937492/9ba008b22e29bc0b3fc3b0b722d6d7c245775122/resource.tar.gz{, .log} |69.2%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |69.2%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator/unittest |69.2%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 |69.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 |69.2%| [TS] {RESULT} ydb/library/benchmarks/runner/import_test |69.2%| [DL] $(B)/canondata_storage/1936842/848a979971caeba2efc272b5418157aab954923a/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1130705/2dbc543e7e2156e1086b7eff9aaab72ade9022c4/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1784826/6fb10875fc2d13209580debefd9e32c0586b2ae6/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1031349/a955c852651ea9f8124bef13bd770d8d15af6c2e/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1600758/a42cbf1b9e8a45c29dfbe8c1c1f3d9fac0eb3d7d/resource.tar.gz{, .log} |69.2%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |69.3%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |69.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/py2_flake8 |69.3%| [DL] $(B)/canondata_storage/1773845/6c44bb7a3842ecf9adf65f1679c6e8b589fec21a/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1880306/9f93bbb7f8cdbc54330d6e0f905404d0e826ce1a/resource.tar.gz{, .log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTestRestore::TestRestore15 [GOOD] Test command err: 2024-11-21T17:42:22.332745Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.348379Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 7218917181663109770 MagicNextLogChunkReference: 8913901947521715581 MagicLogChunk: 11756609458084239332 MagicDataChunk: 8248698610269358855 MagicSysLogChunk: 10443552137556885793 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942278476 (2024-11-21T17:42:22.278476Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.353034Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:22.356653Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:22.356861Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.357050Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.357243Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.357380Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.428218Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1661759 CutLogId# [0:0:0] ownerRound# 5 PDiskId# 1 2024-11-21T17:42:22.428596Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.473685Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.474347Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T17:42:22.475116Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T17:42:22.556912Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.556940Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T17:42:22.582840Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 16034697670878485633 MagicLogChunk: 17135853760367108797 MagicDataChunk: 326321154667423436 MagicSysLogChunk: 7250531327397547903 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942495987 (2024-11-21T17:42:22.495987Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.588403Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:22.591521Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:22.591547Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.591720Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.591935Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.592061Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.594142Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 7 vDiskId# [4:_:0:0:0] FirstNonceToKeep# 1059831 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.594364Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 4 vDiskId# [1:_:0:0:0] FirstNonceToKeep# 1059831 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.594559Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1059831 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.594716Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 5 vDiskId# [2:_:0:0:0] FirstNonceToKeep# 1059831 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.595085Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 6 vDiskId# [3:_:0:0:0] FirstNonceToKeep# 1059831 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.860469Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},{chunkIdx# 102 users# 5 endOfSplice# 0 {owner# 3 lsn# 171-340 firstLsnToKeep# 301}, {owner# 4 lsn# 171-340 firstLsnToKeep# 301}, {owner# 5 lsn# 170-339 firstLsnToKeep# 201}, {owner# 6 lsn# 170-338 firstLsnToKeep# 201}, {owner# 7 lsn# 171-340 firstLsnToKeep# 301},},{chunkIdx# 103 users# 5 endOfSplice# 0 {owner# 3 lsn# 341-509 firstLsnToKeep# 301}, {owner# 4 lsn# 341-510 firstLsnToKeep# 301}, {owner# 5 lsn# 339-508 firstLsnToKeep# 201}, {owner# 6 lsn# 339-508 firstLsnToKeep# 201}, {owner# 7 lsn# 341-510 firstLsnToKeep# 301},},{chunkIdx# 104 users# 5 endOfSplice# 0 {owner# 3 lsn# 510-679 firstLsnToKeep# 301}, {owner# 4 lsn# 510-679 firstLsnToKeep# 301}, {owner# 5 lsn# 509-678 firstLsnToKeep# 201}, {owner# 6 lsn# 509-678 firstLsnToKeep# 201}, {owner# 7 lsn# 511-679 firstLsnToKeep# 301},},{chunkIdx# 105 users# 5 endOfSplice# 0 {owner# 3 lsn# 680-849 firstLsnToKeep# 301}, {owner# 4 lsn# 680-849 firstLsnToKeep# 301}, {owner# 5 lsn# 679-848 firstLsnToKeep# 201}, {owner# 6 lsn# 679-847 firstLsnToKeep# 201}, {owner# 7 lsn# 680-849 firstLsnToKeep# 301},},{chunkIdx# 106 users# 5 endOfSplice# 0 {owner# 3 lsn# 850-1018 firstLsnToKeep# 301}, {owner# 4 lsn# 850-1019 firstLsnToKeep# 301}, {owner# 5 lsn# 848-1017 firstLsnToKeep# 201}, {owner# 6 lsn# 848-1017 firstLsnToKeep# 201}, {owner# 7 lsn# 850-1019 firstLsnToKeep# 301},},{chunkIdx# 107 users# 5 endOfSplice# 0 {owner# 3 lsn# 1019-1188 firstLsnToKeep# 301}, {owner# 4 lsn# 1019-1188 firstLsnToKeep# 301}, {owner# 5 lsn# 1018-1187 firstLsnToKeep# 201}, {owner# 6 lsn# 1018-1187 firstLsnToKeep# 201}, {owner# 7 lsn# 1020-1188 firstLsnToKeep# 301},},{chunkIdx# 108 users# 5 endOfSplice# 0 {owner# 3 lsn# 1189-1358 firstLsnToKeep# 301}, {owner# 4 lsn# 1189-1358 firstLsnToKeep# 301}, {owner# 5 lsn# 1188-1357 firstLsnToKeep# 201}, {owner# 6 lsn# 1188-1356 firstLsnToKeep# 201}, {owner# 7 lsn# 1189-1358 firstLsnToKeep# 301},},{chunkIdx# 109 users# 5 endOfSplice# 0 {owner# 3 lsn# 1359-1527 firstLsnToKeep# 301}, {owner# 4 lsn# 1359-1528 firstLsnToKeep# 301}, {owner# 5 lsn# 1357-1526 firstLsnToKeep# 201}, {owner# 6 lsn# 1357-1526 firstLsnToKeep# 201}, {owner# 7 lsn# 1359-1528 firstLsnToKeep# 301},},{chunkIdx# 110 users# 5 endOfSplice# 0 {owner# 3 lsn# 1528-1697 firstLsnToKeep# 301}, {owner# 4 lsn# 1528-1697 firstLsnToKeep# 301}, {owner# 5 lsn# 1527-1696 firstLsnToKeep# 201}, {owner# 6 lsn# 1527-1696 firstLsnToKeep# 201}, {owner# 7 lsn# 1529-1697 firstLsnToKeep# 301},},{chunkIdx# 111 users# 5 endOfSplice# 0 {owner# 3 lsn# 1698-1867 firstLsnToKeep# 301}, {owner# 4 lsn# 1698-1867 firstLsnToKeep# 301}, {owner# 5 lsn# 1697-1866 firstLsnToKeep# 201}, {owner# 6 lsn# 1697-1865 firstLsnToKeep# 201}, {owner# 7 lsn# 1698-1867 firstLsnToKeep# 301},},{chunkIdx# 112 users# 5 endOfSplice# 0 {owner# 3 lsn# 1868-2004 firstLsnToKeep# 301}, {owner# 4 lsn# 1868-2004 firstLsnToKeep# 301}, {owner# 5 lsn# 1866-2003 firstLsnToKeep# 201}, {owner# 6 lsn# 1866-2003 firstLsnToKeep# 201}, {owner# 7 lsn# 1868-2004 firstLsnToKeep# 301},},] PDiskId# 1 2024-11-21T17:42:22.860703Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo cut tail log LogChunks# [{chunkIdx# 102 users# 5 endOfSplice# 0 {owner# 3 lsn# 171-340 firstLsnToKeep# 301}, {owner# 4 lsn# 171-340 firstLsnToKeep# 301}, {owner# 5 lsn# 170-339 firstLsnToKeep# 201}, {owner# 6 lsn# 170-338 firstLsnToKeep# 201}, {owner# 7 lsn# 171-340 firstLsnToKeep# 301},},{chunkIdx# 103 users# 5 endOfSplice# 0 {owner# 3 lsn# 341-509 firstLsnToKeep# 301}, {owner# 4 lsn# 341-510 firstLsnToKeep# 301}, {owner# 5 lsn# 339-508 firstLsnToKeep# 201}, {owner# 6 lsn# 339-508 firstLsnToKeep# 201}, {owner# 7 lsn# 341-510 firstLsnToKeep# 301},},{chunkIdx# 104 users# 5 endOfSplice# 0 {owner# 3 lsn# 510-679 firstLsnToKeep# 301}, {owner# 4 lsn# 510-679 firstLsnToKeep# 301}, {owner# 5 lsn# 509-678 firstLsnToKeep# 201}, {owner# 6 lsn# 509-678 firstLsnToKeep# 201}, {owner# 7 lsn# 511-679 firstLsnToKeep# 301},},{chunkIdx# 105 users# 5 endOfSplice# 0 {owner# 3 lsn# 680-849 firstLsnToKeep# 301}, {owner# 4 lsn# 680-849 firstLsnToKeep# 301}, {owner# 5 lsn# 679-848 firstLsnToKeep# 201}, {owner# 6 lsn# 679-847 firstLsnToKeep# 201}, {owner# 7 lsn# 680-849 firstLsnToKeep# 301},},{chunkIdx# 106 users# 5 endOfSplice# 0 {owner# 3 lsn# 850-1018 firstLsnToKeep# 301}, {owner# 4 lsn# 850-1019 firstLsnToKeep# 301}, {owner# 5 lsn# 848-1017 firstLsnToKeep# 201}, {owner# 6 lsn# 848-1017 firstLsnToKeep# 201}, {owner# 7 lsn# 850-1019 firstLsnToKeep# 301},},{chunkIdx# 107 users# 5 endOfSplice# 0 {owner# 3 lsn# 1019-1188 firstLsnToKeep# 301}, {owner# 4 lsn# 1019-1188 firstLsnToKeep# 301}, {owner# 5 lsn# 1018-1187 firstLsnToKeep# 201}, {owner# 6 lsn# 1018-1187 firstLsnToKeep# 201}, {owner# 7 lsn# 1020-1188 firstLsnToKeep# 301},},{chunkIdx# 108 users# 5 endOfSplice# 0 {owner# 3 lsn# 1189-1358 firstLsnToKeep# 301}, {owner# 4 lsn# 1189-1358 ... kId# 1 2024-11-21T17:42:43.418554Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:43.418574Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:43.418708Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:43.418880Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:43.419002Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:43.512347Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1099313 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:43.512676Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2024-11-21T17:42:43.877938Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:43.883645Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 4513995048864277829 MagicNextLogChunkReference: 9821046558444427915 MagicLogChunk: 17195002461197308489 MagicDataChunk: 4213453302993514721 MagicSysLogChunk: 2945979471931787665 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210963324844 (2024-11-21T17:42:43.324844Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:43.890174Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1316079 NonceLog# 1099313 NonceData# 2070303} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:42:43.891288Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:42:43.892894Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 2 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 2 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:43.892918Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 2 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:43.893150Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-508 firstLsnToKeep# 0},},{chunkIdx# 2 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:43.893442Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-508 firstLsnToKeep# 0},},{chunkIdx# 2 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:43.893584Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:43.968543Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T17:42:43.977119Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 508} PDiskId# 1 2024-11-21T17:42:44.060672Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:44.076845Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 4513995048864277829 MagicNextLogChunkReference: 9821046558444427915 MagicLogChunk: 17195002461197308489 MagicDataChunk: 4213453302993514721 MagicSysLogChunk: 2945979471931787665 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210963324844 (2024-11-21T17:42:43.324844Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:44.084868Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 3388328 NonceLog# 2385541 NonceData# 3442950} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:42:44.092622Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:42:44.099253Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 2 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 1099821 " with nonceJumpLogPageHeader2->PreviousNonce# "# 1099821 PDiskId# 1 2024-11-21T17:42:44.100640Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 3 SectorIdx# 186 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 3 OffsetInChunk# 761856} PDiskId# 1 2024-11-21T17:42:44.100660Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 3 OffsetInChunk# 761856} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:44.100800Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-508 firstLsnToKeep# 0},},{chunkIdx# 2 users# 1 endOfSplice# 0 {owner# 3 lsn# 509-1016 firstLsnToKeep# 0},},{chunkIdx# 3 users# 1 endOfSplice# 0 {owner# 3 lsn# 1017-1202 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:44.100912Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-508 firstLsnToKeep# 0},},{chunkIdx# 2 users# 1 endOfSplice# 0 {owner# 3 lsn# 509-1016 firstLsnToKeep# 0},},{chunkIdx# 3 users# 1 endOfSplice# 0 {owner# 3 lsn# 1017-1202 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:44.100958Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:44.168607Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T17:42:44.174962Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1202} PDiskId# 1 2024-11-21T17:42:44.213313Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:44.224387Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 10071888900749938101 MagicNextLogChunkReference: 6882173949281855726 MagicLogChunk: 11137949245254213387 MagicDataChunk: 16834358440709203075 MagicSysLogChunk: 11060185822804308672 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210964190756 (2024-11-21T17:42:44.190756Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:44.228016Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:44.232787Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:44.232820Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:44.232967Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:44.233100Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:44.233198Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:44.315698Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1556575 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 |69.3%| [TS] {RESULT} ydb/tests/fq/plans/import_test |69.3%| [TM] {RESULT} ydb/core/driver_lib/version/ut/unittest |69.2%| [DL] $(B)/canondata_storage/1600758/85ce7147e6a553c51a5d28db0989bd29a0aa0a8b/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1936842/11d23d4a39031af80d6dc470ce99f9427771e7d4/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1871182/3ca7d9f793310690733c1f09756d621fb525e562/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1031349/3fb25bad7a135d8493b2fd4782bc9ca920c7e4e4/resource.tar.gz{, .log} |69.2%| [DL] $(B)/canondata_storage/1777230/3c117824725bda13a89aad6b07b22541746fa215/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1817427/5d304fd6b37c848dfc0dd95f9f02b44991b176c5/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1937492/0916a4c5121c755975bb98db3f6bbff60eb63132/resource.tar.gz{, .log} |69.3%| [TS] {RESULT} ydb/core/tx/sequenceshard/public/ut/unittest |69.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/py2_flake8 |69.3%| [DL] $(B)/canondata_storage/1942525/e7939b4cfb5e85a7bd57688517d44a82bd824253/resource.tar.gz{, .log} |69.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/py2_flake8 |69.3%| [TS] {RESULT} ydb/tests/fq/http_api/import_test |69.3%| [DL] $(B)/canondata_storage/1889210/9f0ba7bc92451aa4a498112bc8c2e703011101c2/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1889210/a6f1d19efb8c2d66757fb3f23bc191e0ff7fca4e/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1942278/d3f67196e7e0096e289743f5dbfd5dc2f990f9e6/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1773845/fe357240ad41d1044e07d94e45c2e6ad7022cddd/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1600758/5d223afc08b0c616f7a151a55660aa50e5a078a3/resource.tar.gz{, .log} |69.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |69.3%| [TS] {RESULT} ydb/tests/stability/ydb/flake8 |69.3%| [TS] {RESULT} ydb/tests/functional/canonical/import_test |69.3%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |69.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 |69.3%| [DL] $(B)/canondata_storage/1924537/9d702629c20241b52be3899488a0fbc1c3dc0a5e/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1600758/d71e8e715781d39882e1a1876aa775946961dc49/resource.tar.gz{, .log} |69.3%| [DL] $(B)/canondata_storage/1899731/d7118ad96c050279cfcfe95a4f9577de9c404054/resource.tar.gz{, .log} |69.3%| [TS] {RESULT} ydb/core/log_backend/ut/unittest |69.3%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/flake8 |69.3%| [TS] {RESULT} ydb/core/kqp/ut/federated_query/common/clang_format |69.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/py2_flake8 |69.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |69.3%| [DL] $(B)/canondata_storage/1937429/8922776cd7638b44d90c03cdd08ebd58640651e4/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1597364/69d3fa450d1d1d788d13776cdd4c993faeab88d0/resource.tar.gz{, .log} |69.4%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/import_test |69.4%| [DL] $(B)/canondata_storage/1937027/b9d8bf5296438b5378e7a452d0f1d00c40561e66/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1936947/44e14ea63b2c348af47a6bfcf39d44d85b07321c/resource.tar.gz{, .log} |69.4%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/import_test >> TYardTest::TestChunkWriteReadMultipleWithHddSectorMap [GOOD] >> TYardTest::TestChunkWriteReadWhole |69.4%| [DL] $(B)/canondata_storage/1881367/fd6fe303f95983c7923be22740c4aa07b052e199/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1784826/fe2bef548a55eb11e26daaded455ba74fda33a1b/resource.tar.gz{, .log} |69.4%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |69.4%| [DL] $(B)/canondata_storage/1942671/5a994316452c786807f2de3ed136ba6ab54f260c/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1773845/8df154f4c78ff2cb24f0eca84702e3c40b845284/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1936842/7066d2b363541e6b64af6ab839e11485df1f472d/resource.tar.gz{, .log} |69.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 |69.4%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |69.4%| [TS] {RESULT} ydb/tests/tools/pq_read/test/import_test |69.4%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |69.4%| [DL] $(B)/canondata_storage/1871182/1b070eaa6816a28c47f92666b5d664e443ea4c80/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1903885/804adfc45546340f178ce737d7f1d1e8feb56e81/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1809005/b9b47ee4a9e9f1d94a493c099f72559fa9f3f498/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1917492/73fd38e9ffcd658585f52c248a634ae9046b0ff5/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1920236/bb5064df2cdae357296347e4d44f50d713e3ae40/resource.tar.gz{, .log} |69.4%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |69.4%| [DL] $(B)/canondata_storage/1936273/7a32049e7d34640d0891b0eccadb21c671bd9ed5/resource.tar.gz{, .log} |69.4%| [TS] {RESULT} ydb/core/pgproxy/ut/unittest |69.4%| [DL] $(B)/canondata_storage/1689644/21bb382fd3dd5b7958e963a35fdeff43f15acd6b/resource.tar.gz{, .log} |69.4%| [TS] {RESULT} ydb/tests/functional/postgresql/import_test |69.4%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |69.4%| [DL] $(B)/canondata_storage/1942525/de296b35a0b1102cd2228744e8e164bffd57d12e/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1899731/35c5b505df7f2be9fcca6f830802312313cb4fc5/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1809005/7e4dc59583cad760822faf30fa4695e365329148/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1881367/a20f4e5c3e96e288ca11729296218a60f545a061/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1773845/151e1e36181dc4f51864bb618bfd0ac1b52111fc/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1937424/ef4272c0e98c55575149317381e8efaa85a26157/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1784826/1d88e578beafe01d6bba5ff3a3b2fecf2c6033c8/resource.tar.gz{, .log} >> TYardTest::TestChunkWriteReadWhole [GOOD] >> TYardTest::TestChunkWriteReadWholeWithHddSectorMap |69.4%| [DL] $(B)/canondata_storage/1936842/08ca4a4a28db24dc522f19e01dabe6125d37fb33/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1784117/9a53e0c31670253d78108c8a3b4f81fc219a1c68/resource.tar.gz{, .log} |69.4%| [DL] $(B)/canondata_storage/1931696/6c4a36931a6a48d4590d231802e33da9ab40cb1e/resource.tar.gz{, .log} |69.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 |69.5%| [TS] {RESULT} ydb/tests/tools/nemesis/driver/import_test |69.5%| [DL] $(B)/canondata_storage/1937492/7ae37c32b42bb57d4df171a62ced7ab76867a8ea/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1946324/7382f2c221782186a6e8551e5722de6e9105c16c/resource.tar.gz{, .log} |69.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 |69.5%| [TS] {RESULT} ydb/tests/library/ut/flake8 |69.5%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator_checks/unittest |69.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.5%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |69.5%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 |69.5%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/py3test |69.5%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |69.5%| [DL] $(B)/canondata_storage/1923547/ab32f83f1ebe23caf0a6a3a190fc45da21e11461/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1775319/2835bc5228be8e9c43a55ecd1a258cb2da433197/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1784826/2218e5c9d19235479cfb35f6537a97d87cb1a514/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1925821/2978b18b76f4a1f7b0e4690d2015acea4775834c/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1871002/01b60ff3bfc2c8aa5cff8ebbe693bdbbfe6a1c1c/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1925842/5ebb3b5676761f341f736480110bd8ab8a78d858/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1599023/4e9b507a0cbcf5cfc31288de53bbb8560bb1a4bf/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1903280/e6bf7ee13ef64bc10434d7740c7b7cfcb072066a/resource.tar.gz{, .log} |69.5%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |69.5%| [DL] $(B)/canondata_storage/1599023/aac9f84343918a59a2e7eb28e31ca1d36cf30297/resource.tar.gz{, .log} |69.5%| [TM] {RESULT} ydb/core/tablet_flat/ut_util/unittest |69.5%| [DL] $(B)/canondata_storage/1942100/9da380c43b5de6ff69f9fc402c31f0485dedddb1/resource.tar.gz{, .log} |69.5%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/import_test |69.5%| [DL] $(B)/canondata_storage/1809005/867a928da4f0d2fb398d2c33bb67d6d401827633/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1899731/3bbf0846401e09b064add80d60e61e7654f87412/resource.tar.gz{, .log} |69.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 |69.5%| [DL] $(B)/canondata_storage/1871182/c8ce39b7abe3399c49b5207663c8bb6922411d50/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1899731/c90b03e90440900b48d7af60d2e03d478d5e354f/resource.tar.gz{, .log} |69.5%| [DL] $(B)/canondata_storage/1942100/50ef34247500569eb5a4a2bd9f3afffdda4ff62e/resource.tar.gz{, .log} |69.5%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 |69.5%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |69.5%| [DL] $(B)/canondata_storage/1784826/664a8fd8dece5fbba1057a5f4bdc597c2c7b2e59/resource.tar.gz{, .log} |69.5%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/flake8 |69.5%| [TS] {RESULT} ydb/tests/functional/encryption/import_test |69.6%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |69.6%| [DL] $(B)/canondata_storage/937458/8c856e93170e875aa0a26dc4a34de068016e4377/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1600758/25298d1c532d0c67aa8f12ec2205b095eb24cdb4/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1600758/3d0654608172a6c45b1fa51e959c06528202c760/resource.tar.gz{, .log} |69.6%| [TS] {RESULT} ydb/tests/functional/wardens/import_test |69.6%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |69.6%| [TS] {RESULT} ydb/public/tools/local_ydb/flake8 |69.6%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 |69.6%| [DL] $(B)/canondata_storage/1031349/f5278948946380da3d5514360765e6ba76347c46/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1899731/8371f5cbc66c5d22b5e30e94f49d4c3423336bd2/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1917492/cf57bdebe9d9af3fecbb7cd419893dd2ae22667e/resource.tar.gz{, .log} |69.6%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |69.6%| [DL] $(B)/canondata_storage/1937001/cf2822c292da12910b7e5a0fd062f9cafa22374e/resource.tar.gz{, .log} |69.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/py2_flake8 |69.6%| [DL] $(B)/canondata_storage/1936273/7c78e1e45ae282daee686c006624daa21a7c6ca6/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1889210/f7b9171f10cdcd52016b9256bc91d2696630be46/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1871002/fb3dce8e5e8c0a86fa3b3841c5b4dfd00310d4f2/resource.tar.gz{, .log} |69.6%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |69.6%| [TS] {RESULT} ydb/tests/functional/compatibility/import_test |69.6%| [TS] {RESULT} ydb/tests/functional/tenants/import_test |69.6%| [DL] $(B)/canondata_storage/1923547/94f377eaa1d93890e1345ac4940cc6fa07bddd4f/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1942100/43fa07fed3cf8aed32ae0b5fedbb00bd2bab6b27/resource.tar.gz{, .log} |69.6%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/flake8 |69.6%| [DL] $(B)/canondata_storage/1936273/9193d75f8d6b5367c9ef3700c0a94d57ec6a3352/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1880306/94e1527dc445194ce3caa514976f24f846cc663f/resource.tar.gz{, .log} |69.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dqrun/dqrun |69.6%| [DL] $(B)/canondata_storage/1599023/5eeb37b7a60896a1dd87c5c5ea8dea5d33c2134c/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1937424/bd676d9ef123703690e03ed87d87e5057ac9f7c7/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1031349/fade0e1ab4ddcf96add4ba75388b76b0ae6970f8/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1937150/a3ed05ae8ad4fea60a051f6171424c733487f045/resource.tar.gz{, .log} |69.6%| [TS] {RESULT} ydb/tests/functional/sqs/common/import_test |69.6%| [DL] $(B)/canondata_storage/1817427/e644870a8f51ab795f33a09b1cdec1fcd2063713/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1937367/bc1ddd52a5c80a7e52dc1d8a7570c5b38a712194/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1924537/85d9d2dc5ead7566100ca824520016d0c6b8d113/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1931696/bd23f2602b9ebbb7cd7a6085fa771927b4dc81e9/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1903280/5c2923264d785a87c86dd7095d632b6354624dc5/resource.tar.gz{, .log} |69.6%| [DL] $(B)/canondata_storage/1942415/bce8c45faf79a59c214fe2bf46e33d9cc351ed18/resource.tar.gz{, .log} |69.6%| [TS] {RESULT} ydb/core/client/metadata/ut/unittest |69.6%| [DL] $(B)/canondata_storage/1600758/fa72a23c77bab9a775b9e8e822e0be1a9841d508/resource.tar.gz{, .log} |69.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |69.6%| [DL] $(B)/canondata_storage/1900335/dd59ce09b5b70054bb239659c9dedc5218a4d0cd/resource.tar.gz{, .log} |69.7%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |69.7%| [DL] $(B)/canondata_storage/1936997/9ec8b4b9f89889c3a5dbb346465333d3b7417d16/resource.tar.gz{, .log} |69.7%| [TS] {RESULT} ydb/tests/fq/mem_alloc/import_test |69.7%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |69.7%| [TS] {RESULT} ydb/tests/fq/common/flake8 |69.7%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/flake8 |69.7%| [DL] $(B)/canondata_storage/1880306/5213fbc312a45950f1152a68258af55d6e4976a2/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1937424/c129022da3a346dda28c53da9edba53b3eb3f07a/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1931696/fca86c589326e9bc05817a71a47f8b9d16219dcc/resource.tar.gz{, .log} |69.7%| [TS] {RESULT} ydb/library/yaml_config/static_validator/ut/example_configs/unittest |69.7%| [DL] $(B)/canondata_storage/1937027/8b932d9e8daf49fb9e777a1b6fa53c785126e3d6/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/212715/3045678bab9ba65eca350a0c5b4618902a97028e/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1784117/1f7c2e1c35d03b983fa1a69e594b4cacf4c42d7b/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1847551/c04b6845f7d6b8061d0f3bb18348cc2396fe3c4b/resource.tar.gz{, .log} >> TYardTest::TestChunkWriteReadWholeWithHddSectorMap [GOOD] >> TYardTest::TestChunkWrite20Read02 |69.7%| [DL] $(B)/canondata_storage/1942525/6b25db4f7d87b2343d69fa758c7b0e0d2c385243/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1920236/ea9b79a4af23814e47242a86125bfc9db48e103e/resource.tar.gz{, .log} |69.7%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 |69.7%| [TS] {RESULT} ydb/core/erasure/ut_perf/unittest |69.7%| [DL] $(B)/canondata_storage/1925842/698bbd06dcb399988109e3543f9819966d5e9daa/resource.tar.gz{, .log} |69.7%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |69.7%| [TS] {RESULT} ydb/tests/functional/api/flake8 |69.7%| [DL] $(B)/canondata_storage/1781765/75774e90f574004e23fc9aacf32e1f561a8c66ec/resource.tar.gz{, .log} |69.7%| [TS] {RESULT} ydb/core/tx/long_tx_service/public/ut/unittest |69.7%| [DL] $(B)/canondata_storage/1937367/ffc26952304424e6a4538295e7d27e30362a4e89/resource.tar.gz{, .log} |69.7%| [TS] {RESULT} ydb/mvp/meta/ut/unittest |69.7%| [DL] $(B)/canondata_storage/1775059/b354e53d4914595d2de6dddba4f1af4ec0b36621/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1775319/864d0177d3988207c37d5c5eda7be9164a718f0d/resource.tar.gz{, .log} >> TYardTest::TestChunkWrite20Read02 [GOOD] >> TYardTest::TestChunkUnlock |69.8%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/import_test |69.8%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |69.8%| [DL] $(B)/canondata_storage/1925821/9c3baaef9cf7cf541749b011b756ac9d83887457/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1946324/f0844b7187f1db0315c7ba22b24ff34c0bddf188/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1889210/431569691fa60b20bf9ef4cc94610d8f1b1518e2/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1773845/4aaca50c52fbfe0fc1a237a3c226e5e498d0a750/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1871182/0b81d4e80c80fb4df981caded22cf7246cf56ec0/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1871182/54497cb67187ae0d3ca73f41cfdfc13334cad2d3/resource.tar.gz{, .log} |69.7%| [DL] $(B)/canondata_storage/1817427/6b9d6900149bd623684788c18b56b70ca178d680/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1597364/bf005de2d34f6496206315fd4a9226fb6f90b88a/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1925842/ed80ec5985c654eed9c46bc1e3ce1c860557e572/resource.tar.gz{, .log} |69.8%| [TS] {RESULT} ydb/tests/functional/audit/import_test |69.8%| [TS] {RESULT} ydb/tests/functional/scheme_shard/import_test |69.8%| [DL] $(B)/canondata_storage/1880306/b7c0983a1c6c9c608654f7a228532df5441ad227/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1781765/e6ccfc9e44a62c32a107d9b796d30e78c8539094/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1942415/0be95092588fe1a5379e1336687f83ad5f8d20f7/resource.tar.gz{, .log} |69.8%| [TS] {RESULT} ydb/tests/fq/common/import_test |69.8%| [DL] $(B)/canondata_storage/1931696/c6aa257a7050331fd824bbdb0d587a5a0f000ab3/resource.tar.gz{, .log} |69.8%| [TS] {RESULT} ydb/library/yaml_config/ut/unittest |69.8%| [DL] $(B)/canondata_storage/1937027/9074da5ec3159ab717d6f0fee0639313448b4579/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1937367/dbee9962f462acf3732a651327b1b87b5361f327/resource.tar.gz{, .log} >> TYardTest::TestChunkUnlock [GOOD] >> TYardTest::TestChunkUnlockHarakiri |69.8%| [DL] $(B)/canondata_storage/1903280/e8f3ad772a90a9a3975d3f5f482904d0052c1f16/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1925842/c5c946201c75187c543428fea19a69e208afda3e/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1942415/4efc96736f3d5e3406745ae6daac7330e100c4f4/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1942415/69832751508a31a66677889fced4735a42f62092/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1937429/5c4bfbf1589eb61d7300d31dac8b0581c1292c14/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1936842/2efaf15f30b906d1247f2ec0553f1f18fd6acee8/resource.tar.gz{, .log} |69.8%| [TS] {RESULT} ydb/tools/statistics_workload/import_test |69.8%| [DL] $(B)/canondata_storage/1773845/461e7989a09a65be78c660f6a49d876212096306/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1899731/e198b7ef79a77ed789fe5760772f063e72267539/resource.tar.gz{, .log} |69.8%| [TS] {RESULT} ydb/tests/functional/sqs/large/import_test |69.8%| [DL] $(B)/canondata_storage/1923547/e4e818b787fc28bd7492f949b98701f356713fca/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1916746/d3717a9cac09b32a4d5ddfdae32677177e3620f2/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1936273/0cce21b284076a33a7d8bf253f8daebd8c196efa/resource.tar.gz{, .log} |69.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/py2_flake8 >> TYardTest::TestChunkUnlockHarakiri [GOOD] >> TYardTest::TestChunkUnlockRestart |69.8%| [DL] $(B)/canondata_storage/937458/cb7ada421497d2e974c2fde615e498ee3c1fe8cf/resource.tar.gz{, .log} |69.8%| [TS] {RESULT} ydb/core/config/validation/ut/unittest |69.8%| [DL] $(B)/canondata_storage/1880306/b8a146dff266e2b5388e4e9ae22aa20c1b4fbc64/resource.tar.gz{, .log} |69.8%| [DL] $(B)/canondata_storage/1936273/8ae056ade64c358b3bb34eda82c0969989c5ee85/resource.tar.gz{, .log} |69.9%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |69.9%| [DL] $(B)/canondata_storage/1871182/035cffc2aba7adbed541caae65e59e0ddf49b527/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1942100/ea5d6f04ae9c974212a40f8582f093e394dd64d6/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1942671/580c7dcd26532517044d16107182929c3788d099/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1871102/8763764d391af8a7276b9788d89479d09d42e9ed/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1899731/b0129b6b11699859a431a34fdada149de2dbc7ef/resource.tar.gz{, .log} >> TYardTest::TestChunkUnlockRestart [GOOD] |69.8%| [DL] $(B)/canondata_storage/1942278/5982c0f62a0472a9822a5612ad84d4aae9998491/resource.tar.gz{, .log} |69.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 |69.9%| [DL] $(B)/canondata_storage/937458/301394d7110b98554eb68f02df41115069509de8/resource.tar.gz{, .log} |69.9%| [TS] {RESULT} ydb/tests/functional/script_execution/import_test |69.9%| [DL] $(B)/canondata_storage/1925842/3b19aeedb10a29b1dbd9b746d8269c577ec91ea2/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1871182/221936951ba4de2ea4362b03723f3995ef8d3fe7/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1903885/4a384ef3fd6e8cf628d678d9322eef7d381022a7/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1600758/945d8bd5a89c655f23736d13a6248011860cd506/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1931696/59c974a5d18c41e65f27bd82416d6f8307cc1616/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1936997/ad7538cf8edf8e81865f7eee42c2de851daf1211/resource.tar.gz{, .log} |69.9%| [DL] $(B)/canondata_storage/1936842/ef250a2d1fa4278f450bd1ca39ae94b0e4ccec23/resource.tar.gz{, .log} |69.9%| [TS] {RESULT} ydb/tests/functional/large_serializable/import_test |69.9%| [TS] {RESULT} ydb/tools/statistics_workload/flake8 |69.9%| [DL] $(B)/canondata_storage/1773845/9450e6a9e418f128c33ccd34fc163a655ef7efb6/resource.tar.gz{, .log} >> TYardTest::TestChunkReserve >> TYardTest::TestChunkReserve [GOOD] |69.9%| [TS] {RESULT} ydb/tests/functional/dynumber/import_test |69.9%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/import_test |69.9%| [DL] $(B)/canondata_storage/1900335/8db5941a4ed2bc94d6ae42d0eae7b6c741fa5a59/resource.tar.gz{, .log} >> TYardTest::TestChunkRecommit |69.9%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/import_test |69.9%| [TS] {RESULT} ydb/tests/functional/query_cache/import_test |69.9%| [TS] {RESULT} ydb/core/fq/libs/http_api_client/flake8 |69.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/py2_flake8 |70.0%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 >> TYardTest::TestChunkRecommit [GOOD] >> TYardTest::TestChunkRestartRecommit |70.0%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |70.0%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |70.0%| [TS] {RESULT} ydb/public/tools/ydb_recipe/import_test |70.0%| [TS] {RESULT} ydb/tests/functional/api/import_test |70.0%| [TS] {RESULT} ydb/core/fq/libs/hmac/ut/unittest |70.0%| [TS] {RESULT} ydb/tests/functional/tpc/import_test ------- [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/mrrun/mrrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'longjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_detach' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'puts' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'siglongjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'malloc_stats' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'uname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcslen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined |70.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/mrrun/mrrun |70.0%| [TS] {RESULT} ydb/tests/functional/serializable/import_test |70.0%| [LD] {RESULT} $(B)/ydb/library/yql/tools/mrrun/mrrun |70.0%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/import_test |70.0%| [TS] {RESULT} ydb/apps/dstool/import_test |70.0%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 >> TYardTest::TestChunkRestartRecommit [GOOD] >> TYardTest::TestDamagedFirstRecordToKeep |70.0%| [TA] $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.0%| [TS] {RESULT} ydb/core/base/generated/ut/unittest |70.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/py2_flake8 |70.0%| [TS] {RESULT} ydb/tests/functional/serverless/import_test |70.0%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/flake8 |70.0%| [TS] {RESULT} ydb/public/tools/ydb_recipe/flake8 |70.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 |70.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/py2_flake8 |70.0%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/flake8 |70.0%| [TA] $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |70.0%| [TA] $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |70.0%| [TS] {RESULT} ydb/core/fq/libs/signer/ut/unittest |70.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/py2_flake8 |70.0%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/flake8 |70.1%| [TS] {RESULT} ydb/library/yaml_config/tools/simple_json_diff/flake8 |70.1%| [TS] {RESULT} ydb/library/yql/providers/s3/common/ut/unittest |70.1%| [TS] {RESULT} ydb/public/tools/local_ydb/import_test |70.1%| [TS] {RESULT} ydb/tests/fq/restarts/import_test |70.1%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |70.1%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |70.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.1%| [TA] {RESULT} $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.1%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |70.1%| [TA] {RESULT} $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |70.1%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator_builder/unittest |70.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 |70.2%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |70.2%| [TS] {RESULT} ydb/tests/functional/tpc/flake8 |70.2%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |70.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 |70.2%| [TS] {RESULT} ydb/mvp/core/ut/unittest |70.5%| [TS] {RESULT} ydb/core/formats/arrow/ut/unittest >> BootstrapTabletsValidatorTests::TestUnknownNodeForTablet [GOOD] >> NameserviceConfigValidatorTests::TestAddNewNode [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingHostPort [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] >> StatsFormat::AggregateStat [GOOD] |71.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |71.4%| [TA] {RESULT} $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TYardTest::TestDamagedFirstRecordToKeep [GOOD] >> TYardTest::TestDamageAtTheBoundary >> TActorTest::TestSendEvent [GOOD] >> TActorTest::TestSendAfterDelay |71.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |71.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |71.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> TActorTest::TestSendAfterDelay [GOOD] >> TActorTest::TestHandleEvent [GOOD] >> TActorTest::TestGetCtxTime >> TActorTest::TestGetCtxTime [GOOD] |71.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] |71.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |71.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |72.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat [GOOD] |72.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendAfterDelay [GOOD] |72.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestGetCtxTime [GOOD] >> TYardTest::TestDamageAtTheBoundary [GOOD] >> TYardTest::TestCutMultipleLogChunks >> TYardTest::TestCutMultipleLogChunks [GOOD] >> TYardTest::TestChunkPriorityBlock >> TYardTest::TestChunkPriorityBlock [GOOD] |73.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |73.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |73.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |73.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |73.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |73.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |73.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |73.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestChunkPriorityBlock [GOOD] Test command err: 2024-11-21T17:42:22.125231Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.136542Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 13833470512285320998 MagicNextLogChunkReference: 14656146801109094490 MagicLogChunk: 3367272734721985027 MagicDataChunk: 1633809735995749896 MagicSysLogChunk: 534278137697604279 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942073920 (2024-11-21T17:42:22.073920Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.148816Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:22.156324Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:22.156540Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.160077Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.160303Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.160434Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.220606Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1817026 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.359013Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.372072Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 110042305933631310 MagicNextLogChunkReference: 9511546497542289538 MagicLogChunk: 9715112393642764093 MagicDataChunk: 10806088101866144844 MagicSysLogChunk: 2068939199964283732 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942266889 (2024-11-21T17:42:22.266889Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.374079Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:22.375257Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:22.375282Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.375410Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.375525Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.375573Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.448627Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1141336 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:28.238667Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:28.248972Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 5474071918888376153 MagicNextLogChunkReference: 12692589271813476710 MagicLogChunk: 17173117095538223350 MagicDataChunk: 1440526852159582727 MagicSysLogChunk: 14902797336831601834 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210948158424 (2024-11-21T17:42:28.158424Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:28.260898Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:28.268074Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:28.268109Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:28.268286Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:28.268398Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:28.268443Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:28.332603Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1843965 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:30.052930Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:30.064637Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 3609008317725584671 MagicNextLogChunkReference: 17998455274132931881 MagicLogChunk: 8663465160118385247 MagicDataChunk: 10620818305077175860 MagicSysLogChunk: 18004121621495049579 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210949999028 (2024-11-21T17:42:29.999028Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:30.072838Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:30.080600Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:30.080633Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:30.080848Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:30.081169Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:30.081361Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:30.152315Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2007296 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:33.488413Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:33.500460Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 5242880000 bytes (5 GB) Guid: 3801639521636233112 MagicNextLogChunkReference: 1773036482910905224 MagicLogChunk: 6847566816001823258 MagicDataChunk: 2968673186464225143 MagicSysLogChunk: 16865672772782418704 MagicFormatChunk: 17332287817462050952 ChunkSize: 6291456 bytes (6 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210953464017 (2024-11-21T17:42:33.464017Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:33.501777Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:33.502758Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024 ... cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 17326457029182971858 MagicNextLogChunkReference: 14636358428594840650 MagicLogChunk: 5856638672768569999 MagicDataChunk: 7639742029950637459 MagicSysLogChunk: 10911161843632962116 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210970836790 (2024-11-21T17:42:50.836790Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:51.576381Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 5607234 NonceLog# 4618188 NonceData# 5156326} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:42:51.584537Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:42:51.618687Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 1776 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 7274496} PDiskId# 1 2024-11-21T17:42:51.618714Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 7274496} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:51.620433Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 13-15 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:51.620803Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 13-15 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:42:51.620872Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:51.632539Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T17:42:51.721886Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:51.752273Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T17:42:51.756447Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T17:42:51.900416Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:51.900440Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T17:42:51.913173Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 6566564799738064240 MagicLogChunk: 11675867853687234048 MagicDataChunk: 6725792956568046156 MagicSysLogChunk: 16548805107368515454 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210971795836 (2024-11-21T17:42:51.795836Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:51.933185Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:51.944169Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:51.944203Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:51.945473Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:51.945691Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:51.945760Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:51.950328Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2064914 CutLogId# [1:7439789473587826927:2050] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:52.031292Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},{chunkIdx# 2 users# 0 endOfSplice# 0},{chunkIdx# 3 users# 0 endOfSplice# 0},{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 2-3 firstLsnToKeep# 3},},] PDiskId# 1 2024-11-21T17:42:52.031308Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo cut tail log LogChunks# [{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 2-3 firstLsnToKeep# 3},},] PDiskId# 1 2024-11-21T17:42:52.073731Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:52.096493Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 6566564799738064240 MagicLogChunk: 11675867853687234048 MagicDataChunk: 6725792956568046156 MagicSysLogChunk: 16548805107368515454 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210971795836 (2024-11-21T17:42:51.795836Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:52.104414Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1729300 NonceLog# 2066715 NonceData# 1929068} LogHeadChunkIdx# 4 LogHeadChunkPreviousNonce# 2066442 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:42:52.133063Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 4 SectorIdx# 273 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 4 OffsetInChunk# 1118208} PDiskId# 1 2024-11-21T17:42:52.133090Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4 OffsetInChunk# 1118208} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:52.133276Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 3-4 firstLsnToKeep# 3},},] PDiskId# 1 2024-11-21T17:42:52.133538Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 3-4 firstLsnToKeep# 3},},] PDiskId# 1 2024-11-21T17:42:52.133645Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:52.168608Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T17:42:52.461717Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:52.479518Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 16303567871023932635 MagicNextLogChunkReference: 16661432962055267536 MagicLogChunk: 3223878761131414900 MagicDataChunk: 14886623300341813922 MagicSysLogChunk: 13519303830977403833 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210972326568 (2024-11-21T17:42:52.326568Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:52.491271Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:52.493246Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:52.493269Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:52.504839Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:52.506289Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:52.506469Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:52.552117Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1492434 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |73.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |73.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |73.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |73.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/blobsan/blobsan |73.5%| [LD] {RESULT} $(B)/ydb/tools/blobsan/blobsan >> test.py::test[pg_catalog-pg_auth_members-default.txt-Debug] |73.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |73.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit >> test.py::test[action-dep_world_action_quote-default.txt-Analyze] >> test.py::test[insert-append_sorted-to_sorted-Debug] |73.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |73.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Results] >> test.py::test[select-corr_name_in_select_seq-default.txt-Plan] >> test.py::test[join-two_aggrs-default.txt-Analyze] >> test.py::test[action-eval_column--Debug] >> test.py::test[pg-select_join_right_one-default.txt-Debug] >> test.py::test[pg-pg_types_literal_with_length-default.txt-Results] >> test.py::test[lineage-if_struct-default.txt-Debug] >> test.py::test[insert-multiappend_sorted-default.txt-Plan] >> test.py::test[lineage-if_struct-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-if_struct-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-if_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-isolated-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-isolated-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-isolated-default.txt-Results] [SKIPPED] >> test.py::test[lineage-list_literal1-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-list_literal1-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-list_literal1-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_filter-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-select_field_filter-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-select_field_filter-default.txt-Results] [SKIPPED] >> test.py::test[lineage-some_tablerow-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-some_tablerow-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-some_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[lineage-union_all_tablerow-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-union_all_tablerow-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-union_all_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[match_recognize-simple_paritioning-streaming-default.txt-Debug] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Debug] >> test.py::test[pg-tpcds-q89-default.txt-Results] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Debug] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Plan] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_small_primary--Debug] >> test.py::test[pg_catalog-pg_auth_members-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_auth_members-default.txt-ForceBlocks] >> test.py::test[compute_range-in3-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-in3-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-in3-default.txt-Results] [SKIPPED] >> test.py::test[compute_range-multiply_limit_with_nulls-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-multiply_limit_with_nulls-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-multiply_limit_with_nulls-default.txt-Results] [SKIPPED] >> test.py::test[compute_range-norange-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-norange-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-norange-default.txt-Results] [SKIPPED] >> test.py::test[count-boolean_count--Debug] |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group >> test.py::test[optimizers-nonselected_direct_row--Debug] |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut >> test.py::test[aggr_factory-avg_if-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-12620_stage_multiuse--ForceBlocks] |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut >> test.py::test[coalesce-coalesce--Results] >> test.py::test[action-dep_world_action_quote-default.txt-Analyze] [GOOD] >> test.py::test[action-dep_world_action_quote-default.txt-Debug] |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/blobsan/blobsan >> test.py::test[solomon-Subquery-default.txt-Plan] >> test.py::test[action-action_eval_cluster_table_for--Debug] >> test.py::test[solomon-Subquery-default.txt-Plan] [SKIPPED] >> test.py::test[solomon-Subquery-default.txt-Results] [SKIPPED] >> test.py::test[table_range-concat_with_view--Debug] >> test.py::test[blocks-combine_all_pg_filter--Debug] >> test.py::test[action-dep_world_quote_code-default.txt-Debug] |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut >> test.py::test[aggr_factory-transform_input-default.txt-Debug] |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client >> test.py::test[select-corr_name_in_select_seq-default.txt-Plan] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt-Results] >> test.py::test[join-two_aggrs-default.txt-Analyze] [GOOD] >> test.py::test[join-two_aggrs-default.txt-Debug] >> test.py::test[produce-reduce_lambda-default.txt-Results] >> test.py::test[join-premap_common_right_tablecontent--Debug] >> test.py::test[match_recognize-simple_paritioning-streaming-default.txt-Debug] [GOOD] >> test.py::test[match_recognize-simple_paritioning-streaming-default.txt-Plan] [GOOD] >> test.py::test[match_recognize-simple_paritioning-streaming-default.txt-Results] >> test.py::test[pg_catalog-pg_auth_members-default.txt-ForceBlocks] [GOOD] |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut >> test.py::test[expr-dict_comp-default.txt-Debug] >> test.py::test[pg_catalog-pg_auth_members-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_auth_members-default.txt-Results] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Debug] >> test.py::test[join-premap_map_cross-off-Analyze] >> test.py::test[table_range-concat_sorted_max_tables--Plan] >> test.py::test[pg-select_join_right_one-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_right_one-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_right_one-default.txt-Results] >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Results] [GOOD] >> test.py::test[produce-process_lambda_opt_args-default.txt-Debug] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] >> test.py::test[pg-pg_types_literal_with_length-default.txt-Results] [GOOD] >> test.py::test[pg-point-default.txt-Analyze] >> test.py::test[binding-table_range_binding-default.txt-Debug] |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |73.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit >> test.py::test[optimizers-yql-6038_direct_row--Debug] >> test.py::test[expr-struct_gather_spread-default.txt-Debug] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Debug] >> test.py::test[optimizers-nonselected_direct_row--Debug] [GOOD] >> test.py::test[join-mergejoin_small_primary--Debug] [GOOD] >> test.py::test[join-mergejoin_small_primary--Plan] [GOOD] >> test.py::test[join-mergejoin_small_primary--Results] >> test.py::test[select-corr_name_in_select_seq-default.txt-Results] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Analyze] >> test.py::test[pg-select_between-default.txt-Debug] >> test.py::test[optimizers-nonselected_direct_row--Plan] >> test.py::test[pg_catalog-pg_auth_members-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_timezone_names-default.txt-Analyze] >> test.py::test[optimizers-nonselected_direct_row--Plan] [GOOD] >> test.py::test[optimizers-nonselected_direct_row--Results] >> test.py::test[insert-append_sorted-to_sorted-Debug] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-Plan] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-Results] >> test.py::test[match_recognize-simple_paritioning-streaming-default.txt-Results] [GOOD] >> test.py::test[match_recognize-test_type_predicate--Debug] [SKIPPED] >> test.py::test[match_recognize-test_type_predicate--Plan] >> test.py::test[count-boolean_count--Debug] [GOOD] >> test.py::test[count-boolean_count--Plan] [GOOD] >> test.py::test[count-boolean_count--Results] >> test.py::test[match_recognize-test_type_predicate--Plan] [SKIPPED] >> test.py::test[match_recognize-test_type_predicate--Results] >> TBtreeIndexTPartLarge::SmallKeys1GB [GOOD] >> TBtreeIndexTPartLarge::MiddleKeys1GB >> test.py::test[aggr_factory-transform_input-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-transform_input-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-transform_input-default.txt-Results] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Results] >> test.py::test[tpch-q1-default.txt-Plan] >> test.py::test[pg-point-default.txt-Analyze] [GOOD] >> test.py::test[pg-point-default.txt-Debug] >> test.py::test[join-premap_map_cross-off-Analyze] [GOOD] >> test.py::test[join-premap_map_cross-off-Debug] >> test.py::test[join-two_aggrs-default.txt-Debug] [GOOD] >> test.py::test[join-two_aggrs-default.txt-ForceBlocks] >> test.py::test[action-dep_world_action_quote-default.txt-Debug] [GOOD] >> test.py::test[action-dep_world_action_quote-default.txt-ForceBlocks] >> test.py::test[action-action_eval_cluster_table_for--Debug] [GOOD] >> test.py::test[action-action_eval_cluster_table_for--Plan] [GOOD] >> test.py::test[action-action_eval_cluster_table_for--Results] >> test.py::test[coalesce-coalesce--Results] [GOOD] >> test.py::test[expr-dict_comp-default.txt-Debug] [GOOD] >> test.py::test[expr-dict_comp-default.txt-Plan] >> test.py::test[pg-select_join_right_one-default.txt-Results] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Analyze] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Debug] >> test.py::test[column_group-hint_anon-disable-Analyze] [SKIPPED] >> test.py::test[column_group-hint_anon-disable-Debug] [SKIPPED] >> test.py::test[column_group-hint_anon-disable-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon-disable-Plan] [SKIPPED] >> test.py::test[column_group-hint_anon-disable-Results] [SKIPPED] >> test.py::test[column_group-publish-perusage-Analyze] [SKIPPED] >> test.py::test[column_group-publish-perusage-Debug] [SKIPPED] >> test.py::test[binding-table_range_binding-default.txt-Debug] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Plan] >> test.py::test[expr-dict_comp-default.txt-Plan] [GOOD] >> test.py::test[expr-dict_comp-default.txt-Results] >> test.py::test[pg-select_plusminus-default.txt-Debug] >> test.py::test[column_group-publish-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-publish-perusage-Plan] [SKIPPED] >> test.py::test[column_group-publish-perusage-Results] [SKIPPED] >> test.py::test[column_order-align_publish_native--Analyze] >> test.py::test[pg_catalog-pg_timezone_names-default.txt-Analyze] [GOOD] >> test.py::test[pg_catalog-pg_timezone_names-default.txt-Debug] >> test.py::test[binding-table_range_binding-default.txt-Plan] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Results] >> test.py::test[pg-range_function_multi-default.txt-ForceBlocks] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflight [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflightMock >> test.py::test[table_range-concat_sorted_max_tables--Plan] [GOOD] >> test.py::test[table_range-concat_sorted_max_tables--Results] >> test.py::test[pg-tpcds-q89-default.txt-Results] [GOOD] >> test.py::test[action-dep_world_quote_code-default.txt-Debug] [GOOD] >> test.py::test[action-dep_world_quote_code-default.txt-Plan] [GOOD] >> test.py::test[action-dep_world_quote_code-default.txt-Results] >> test.py::test[pg-select_between-default.txt-Debug] [GOOD] >> test.py::test[match_recognize-test_type_predicate--Results] [GOOD] >> test.py::test[pg_catalog-lambda--Debug] >> test.py::test[optimizers-field_subset_for_multiusage--Debug] >> test.py::test[produce-process_lambda_opt_args-default.txt-Debug] [GOOD] >> test.py::test[join-premap_common_right_tablecontent--Debug] [GOOD] >> test.py::test[pg-select_between-default.txt-Plan] [GOOD] >> test.py::test[pg-select_between-default.txt-Results] >> test.py::test[join-premap_common_right_tablecontent--Plan] [GOOD] >> test.py::test[join-premap_common_right_tablecontent--Results] >> test.py::test[expr-to_sorted_set_tuple_key-default.txt-Results] [GOOD] >> test.py::test[expr-yql-15485-default.txt-Debug] >> test.py::test[join-mergejoin_small_primary--Results] [GOOD] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner--Debug] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner--Plan] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_inner--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_norename--Debug] >> test.py::test[aggr_factory-transform_input-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Debug] >> test.py::test[produce-process_lambda_opt_args-default.txt-Plan] [GOOD] >> test.py::test[produce-process_lambda_opt_args-default.txt-Results] >> test.py::test[optimizers-nonselected_direct_row--Results] [GOOD] >> test.py::test[optimizers-wide_if_present_over_double_just-default.txt-Debug] >> test.py::test[insert-append_sorted-to_sorted-Results] [GOOD] >> test.py::test[insert-insert_from_other--Debug] >> test.py::test[action-action_eval_cluster_table_for--Results] [GOOD] >> test.py::test[action-action_opt_args-default.txt-Debug] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] [GOOD] >> test.py::test[select-use_cluster-default.txt-Analyze] >> test.py::test[table_range-concat_sorted_max_tables--Results] [GOOD] >> test.py::test[table_range-range_with_view--Analyze] >> test.py::test[pg-select_between-default.txt-Results] [GOOD] >> test.py::test[pg-select_is_null-default.txt-Debug] >> test.py::test[join-premap_map_cross-off-Debug] [GOOD] >> test.py::test[join-premap_map_cross-off-ForceBlocks] >> test.py::test[count-boolean_count--Results] [GOOD] >> test.py::test[count-count_by_nulls--Debug] >> test.py::test[join-premap_map_cross-off-ForceBlocks] [SKIPPED] >> test.py::test[join-premap_map_cross-off-Plan] [GOOD] >> test.py::test[join-premap_map_cross-off-Results] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Analyze] >> test.py::test[table_range-concat_with_view--Debug] [GOOD] >> test.py::test[table_range-concat_with_view--Plan] [GOOD] >> test.py::test[table_range-concat_with_view--Results] >> test.py::test[pg-point-default.txt-Debug] [GOOD] >> test.py::test[pg-point-default.txt-ForceBlocks] >> test.py::test[binding-table_range_binding-default.txt-Results] [GOOD] >> test.py::test[binding-tie_bad_count_fail--Debug] [SKIPPED] >> test.py::test[select-dot_name_subrequest-default.txt-Debug] [GOOD] >> test.py::test[column_order-align_publish_native--Analyze] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-ForceBlocks] >> test.py::test[column_order-align_publish_native--Debug] >> test.py::test[pg-select_plusminus-default.txt-Debug] [GOOD] >> test.py::test[pg-select_plusminus-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_timezone_names-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_timezone_names-default.txt-ForceBlocks] >> test.py::test[join-two_aggrs-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_plusminus-default.txt-Results] >> test.py::test[join-two_aggrs-default.txt-Plan] [GOOD] >> test.py::test[join-two_aggrs-default.txt-Results] >> test.py::test[optimizers-yql-12620_stage_multiuse--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Plan] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] >> test.py::test[binding-tie_bad_count_fail--Plan] [SKIPPED] >> test.py::test[binding-tie_bad_count_fail--Results] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Debug] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Plan] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Results] >> test.py::test[expr-yql-15485-default.txt-Debug] [GOOD] >> test.py::test[expr-yql-15485-default.txt-Plan] [GOOD] >> test.py::test[expr-yql-15485-default.txt-Results] >> test.py::test[aggr_factory-avg_if-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-avg_if-default.txt-Plan] >> test.py::test[expr-dict_comp-default.txt-Results] [GOOD] >> test.py::test[expr-empty_dict_ops-default.txt-Debug] >> test.py::test[action-dep_world_action_quote-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-dep_world_action_quote-default.txt-Plan] [GOOD] >> test.py::test[action-dep_world_action_quote-default.txt-Results] >> test.py::test[aggr_factory-avg_if-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-avg_if-default.txt-Results] >> test.py::test[action-eval_column--Debug] [GOOD] >> test.py::test[action-eval_column--Plan] [GOOD] >> test.py::test[action-eval_column--Results] >> test.py::test[action-action_opt_args-default.txt-Debug] [GOOD] >> test.py::test[action-action_opt_args-default.txt-Plan] [GOOD] >> test.py::test[action-action_opt_args-default.txt-Results] >> test.py::test[pg-select_plusminus-default.txt-Results] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-Debug] >> test.py::test[produce-reduce_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Debug] >> test.py::test[blocks-combine_all_pg_filter--Debug] [GOOD] >> test.py::test[blocks-combine_all_pg_filter--Plan] [GOOD] >> test.py::test[blocks-combine_all_pg_filter--Results] >> test.py::test[produce-process_lambda_opt_args-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_python_as_struct-default.txt-Debug] [SKIPPED] >> test.py::test[produce-process_with_python_as_struct-default.txt-Plan] >> test.py::test[select-use_cluster-default.txt-Analyze] [GOOD] >> test.py::test[select-use_cluster-default.txt-Debug] >> test.py::test[produce-process_with_python_as_struct-default.txt-Plan] [SKIPPED] >> test.py::test[produce-process_with_python_as_struct-default.txt-Results] [SKIPPED] >> test.py::test[produce-process_with_udf_validate-default.txt-Debug] >> test.py::test[action-dep_world_quote_code-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-lambda--Debug] [GOOD] >> test.py::test[pg_catalog-lambda--Plan] [GOOD] >> test.py::test[pg_catalog-lambda--Results] >> test.py::test[action-eval_atom_wrong_type_expr--Debug] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_expr--Plan] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_expr--Results] [SKIPPED] >> test.py::test[action-eval_each_input_table-default.txt-Debug] >> test.py::test[optimizers-wide_if_present_over_double_just-default.txt-Debug] [GOOD] >> test.py::test[optimizers-wide_if_present_over_double_just-default.txt-Plan] [GOOD] >> test.py::test[optimizers-wide_if_present_over_double_just-default.txt-Results] >> test.py::test[insert-insert_from_other--Debug] [GOOD] >> test.py::test[insert-insert_from_other--Plan] [GOOD] >> test.py::test[insert-insert_from_other--Results] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Analyze] >> test.py::test[table_range-range_with_view--Analyze] [GOOD] >> test.py::test[table_range-range_with_view--Debug] >> test.py::test[join-premap_common_right_tablecontent--Results] [GOOD] >> test.py::test[join-premap_common_right_tablecontent-off-Debug] [SKIPPED] >> test.py::test[pg-point-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-point-default.txt-Plan] [GOOD] >> test.py::test[expr-struct_gather_spread-default.txt-Debug] [GOOD] >> test.py::test[expr-struct_gather_spread-default.txt-ForceBlocks] >> test.py::test[pg_catalog-pg_timezone_names-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg_catalog-pg_timezone_names-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_timezone_names-default.txt-Results] >> test.py::test[binding-tie_bad_count_fail--Results] [GOOD] >> test.py::test[blocks-bitcast_block--Debug] >> test.py::test[optimizers-yql-6038_direct_row--Debug] [GOOD] >> test.py::test[optimizers-yql-6038_direct_row--Plan] [GOOD] >> test.py::test[optimizers-yql-6038_direct_row--Results] >> test.py::test[join-premap_common_right_tablecontent-off-Plan] [SKIPPED] >> test.py::test[join-premap_common_right_tablecontent-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_inner--Debug] >> test.py::test[pg-point-default.txt-Results] >> test.py::test[action-action_opt_args-default.txt-Results] [GOOD] >> test.py::test[action-define_simple_action-default.txt-Debug] >> test.py::test[table_range-concat_with_view--Results] [GOOD] >> test.py::test[table_range-each_with_non_existing-all_fail-Debug] [SKIPPED] >> test.py::test[table_range-each_with_non_existing-all_fail-Plan] [SKIPPED] >> test.py::test[table_range-each_with_non_existing-all_fail-Results] >> test.py::test[join-premap_merge_extrasort1--Analyze] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Debug] >> test.py::test[expr-yql-15485-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_by_opt_dict--Debug] >> test.py::test[count-count_by_nulls--Debug] [GOOD] >> test.py::test[count-count_by_nulls--Plan] [GOOD] >> test.py::test[count-count_by_nulls--Results] >> test.py::test[pg-range_function_multi-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-range_function_multi-default.txt-Plan] [GOOD] >> test.py::test[pg-range_function_multi-default.txt-Results] >> test.py::test[optimizers-wide_if_present_over_double_just-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Debug] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-Debug] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-Plan] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-Results] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Debug] [SKIPPED] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Plan] [SKIPPED] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Results] [SKIPPED] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Debug] >> test.py::test[pg-select_is_null-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_timezone_names-default.txt-Results] [GOOD] >> test.py::test[pragma-classic_division-default.txt-Analyze] >> test.py::test[pg-select_is_null-default.txt-Plan] [GOOD] >> test.py::test[pg-select_is_null-default.txt-Results] >> test.py::test[pg_catalog-lambda--Results] [GOOD] >> test.py::test[pg_catalog-pg_class-default.txt-Debug] >> test.py::test[insert-insert_from_other--Results] [GOOD] >> test.py::test[insert-replace_inferred--Debug] >> test.py::test[tpch-q1-default.txt-Plan] [GOOD] >> test.py::test[tpch-q1-default.txt-Results] >> test.py::test[select-dot_name_subrequest-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Plan] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Results] >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] >> test.py::test[optimizers-field_subset_for_multiusage--Debug] [GOOD] >> test.py::test[optimizers-field_subset_for_multiusage--Plan] [GOOD] >> test.py::test[optimizers-field_subset_for_multiusage--Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Analyze] [GOOD] >> test.py::test[expr-empty_dict_ops-default.txt-Debug] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Debug] >> test.py::test[expr-empty_dict_ops-default.txt-Plan] [GOOD] >> test.py::test[expr-empty_dict_ops-default.txt-Results] >> test.py::test[pg-point-default.txt-Results] [GOOD] >> test.py::test[pg-select_agg_gs_grouping-default.txt-Analyze] >> test.py::test[produce-process_with_udf_validate-default.txt-Debug] [GOOD] >> test.py::test[produce-process_with_udf_validate-default.txt-Plan] [GOOD] >> test.py::test[produce-process_with_udf_validate-default.txt-Results] >> test.py::test[column_order-align_publish_native--Debug] [GOOD] >> test.py::test[insert-multiappend_sorted-default.txt-Plan] [GOOD] >> test.py::test[pg-range_function_multi-default.txt-Results] [GOOD] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Analyze] >> test.py::test[action-eval_each_input_table-default.txt-Debug] [GOOD] >> test.py::test[action-eval_each_input_table-default.txt-Plan] [GOOD] >> test.py::test[action-eval_each_input_table-default.txt-Results] >> test.py::test[action-define_simple_action-default.txt-Debug] [GOOD] >> test.py::test[action-define_simple_action-default.txt-Plan] [GOOD] >> test.py::test[action-define_simple_action-default.txt-Results] >> test.py::test[column_order-align_publish_native--ForceBlocks] >> test.py::test[aggr_factory-avg_if-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-booland-default.txt-Analyze] >> test.py::test[insert-multiappend_sorted-default.txt-Results] >> test.py::test[table_range-range_with_view--Debug] [GOOD] >> test.py::test[table_range-range_with_view--ForceBlocks] >> test.py::test[select-use_cluster-default.txt-Debug] [GOOD] >> test.py::test[select-use_cluster-default.txt-ForceBlocks] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted--Debug] >> test.py::test[action-dep_world_action_quote-default.txt-Results] [GOOD] >> test.py::test[action-eval_astagged-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] Test command err: testing erasure block-3-1 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 256 cases, took 47 us testing erasure stripe-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 108861 us testing erasure block-2-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 1048576 cases, took 597089 us testing erasure stripe-3-1 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 256 cases, took 48 us testing erasure stripe-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 32768 cases, took 7168 us testing erasure stripe-2-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 1048576 cases, took 592819 us >> test.py::test[pg-select_is_null-default.txt-Results] [GOOD] >> test.py::test[pg-select_literals-default.txt-Debug] >> test.py::test[pg-select_proj_ref_group_by_expr-default.txt-Results] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_star-default.txt-Debug] >> test.py::test[blocks-bitcast_block--Debug] [GOOD] >> test.py::test[blocks-bitcast_block--Plan] [GOOD] >> test.py::test[blocks-bitcast_block--Results] >> test.py::test[table_range-each_with_non_existing-all_fail-Results] [GOOD] >> test.py::test[table_range-range_over_filter--Debug] [SKIPPED] >> test.py::test[table_range-range_over_filter--Plan] [SKIPPED] >> test.py::test[table_range-range_over_filter--Results] [SKIPPED] >> test.py::test[tpch-q14-default.txt-Debug] >> test.py::test[join-two_aggrs-default.txt-Results] [GOOD] >> test.py::test[join-yql-8980-off-Analyze] >> test.py::test[blocks-combine_all_pg_filter--Results] [GOOD] >> test.py::test[blocks-date_top_sort--Debug] >> test.py::test[count-count_by_nulls--Results] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Debug] >> test.py::test[produce-reduce_multi_in-empty-Debug] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Plan] [GOOD] >> test.py::test[produce-reduce_multi_in-empty-Results] >> test.py::test[expr-empty_dict_ops-default.txt-Results] [GOOD] >> test.py::test[expr-expr_named_yql_lambda_multiline_atoms-default.txt-Debug] >> test.py::test[join-premap_merge_extrasort1--Debug] [GOOD] >> test.py::test[join-premap_merge_extrasort1--ForceBlocks] >> test.py::test[pg_catalog-pg_class-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_class-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_class-default.txt-Results] >> test.py::test[action-eval_each_input_table-default.txt-Results] [GOOD] >> test.py::test[action-eval_filter--Debug] >> test.py::test[join-premap_merge_inner--Debug] [GOOD] >> test.py::test[join-premap_merge_inner--Plan] [GOOD] >> test.py::test[join-premap_merge_inner--Results] >> test.py::test[select-dot_name_subrequest-default.txt-Results] [GOOD] >> test.py::test[select-two_select_without_semi-default.txt-Analyze] >> test.py::test[produce-process_with_udf_validate-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_flat_python_stream-default.txt-Debug] [SKIPPED] >> test.py::test[produce-reduce_with_flat_python_stream-default.txt-Plan] [SKIPPED] >> test.py::test[produce-reduce_with_flat_python_stream-default.txt-Results] [SKIPPED] >> test.py::test[sampling-bind_default-default.txt-Debug] >> test.py::test[action-define_simple_action-default.txt-Results] [GOOD] >> test.py::test[action-eval_capture--Debug] >> test.py::test[pragma-classic_division-default.txt-Analyze] [GOOD] >> test.py::test[pragma-classic_division-default.txt-Debug] >> test.py::test[blocks-bitcast_block--Results] [GOOD] >> test.py::test[optimizers-yql-6038_direct_row--Results] [GOOD] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Debug] >> test.py::test[insert-replace_inferred--Debug] [GOOD] >> test.py::test[insert-replace_inferred--Plan] [GOOD] >> test.py::test[insert-replace_inferred--Results] >> test.py::test[blocks-block_input_sys_columns--Debug] [SKIPPED] >> test.py::test[blocks-block_input_sys_columns--Plan] [SKIPPED] >> test.py::test[blocks-block_input_sys_columns--Results] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Debug] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Plan] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Results] [SKIPPED] >> test.py::test[blocks-combine_all_avg--Debug] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Debug] >> test.py::test[pg-select_agg_gs_grouping-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_agg_gs_grouping-default.txt-Debug] >> test.py::test[select-use_cluster-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-use_cluster-default.txt-Plan] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Debug] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--ForceBlocks] >> test.py::test[pg-select_proj_ref_group_by_star-default.txt-Debug] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_star-default.txt-Plan] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_star-default.txt-Results] >> test.py::test[select-use_cluster-default.txt-Plan] [GOOD] >> test.py::test[select-use_cluster-default.txt-Results] >> test.py::test[action-eval_astagged-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_astagged-default.txt-Debug] >> test.py::test[aggr_factory-booland-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-booland-default.txt-Debug] >> test.py::test[tpch-q1-default.txt-Results] [GOOD] >> test.py::test[tpch-q13-default.txt-Debug] >> test.py::test[insert-multiappend_sorted-default.txt-Results] [GOOD] >> test.py::test[insert_monotonic-truncate_fail--Analyze] [SKIPPED] >> test.py::test[insert_monotonic-truncate_fail--Debug] [SKIPPED] >> test.py::test[insert_monotonic-truncate_fail--ForceBlocks] [SKIPPED] >> test.py::test[insert_monotonic-truncate_fail--Plan] [SKIPPED] >> test.py::test[insert_monotonic-truncate_fail--Results] >> test.py::test[pg-select_literals-default.txt-Debug] [GOOD] >> test.py::test[pg-select_literals-default.txt-Plan] [GOOD] >> test.py::test[pg-select_literals-default.txt-Results] >> test.py::test[optimizers-field_subset_for_multiusage--Results] [GOOD] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Debug] >> test.py::test[table_range-range_with_view--ForceBlocks] [GOOD] >> test.py::test[table_range-range_with_view--Plan] [GOOD] >> test.py::test[table_range-range_with_view--Results] >> test.py::test[expr-struct_gather_spread-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-struct_gather_spread-default.txt-Plan] [GOOD] >> test.py::test[expr-struct_gather_spread-default.txt-Results] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Results] >> test.py::test[join-selfjoin_on_sorted--Debug] [GOOD] >> test.py::test[join-selfjoin_on_sorted--Plan] [GOOD] >> test.py::test[join-selfjoin_on_sorted--Results] >> test.py::test[action-eval_column--Results] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--Debug] >> test.py::test[join-yql-8980-off-Analyze] [GOOD] >> test.py::test[join-yql-8980-off-Debug] >> test.py::test[produce-reduce_multi_in-empty-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Debug] >> test.py::test[select-two_select_without_semi-default.txt-Analyze] [GOOD] >> test.py::test[select-two_select_without_semi-default.txt-Debug] >> test.py::test[count-count_no_grouping-default.txt-Debug] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Plan] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Results] >> test.py::test[expr-expr_named_yql_lambda_multiline_atoms-default.txt-Debug] [GOOD] >> test.py::test[expr-expr_named_yql_lambda_multiline_atoms-default.txt-Plan] [GOOD] >> test.py::test[expr-expr_named_yql_lambda_multiline_atoms-default.txt-Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Debug] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Plan] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] >> test.py::test[action-eval_capture--Debug] [GOOD] >> test.py::test[action-eval_filter--Debug] [GOOD] >> test.py::test[action-eval_filter--Plan] [GOOD] >> test.py::test[action-eval_capture--Plan] [GOOD] >> test.py::test[action-eval_filter--Results] >> test.py::test[action-eval_capture--Results] >> test.py::test[insert-replace_inferred--Results] [GOOD] >> test.py::test[insert-trivial_literals-default.txt-Debug] >> test.py::test[join-premap_merge_inner--Results] [GOOD] >> test.py::test[join-right_trivial--Debug] >> test.py::test[pg-select_literals-default.txt-Results] [GOOD] >> test.py::test[pg-select_plusminus_unary-default.txt-Debug] >> test.py::test[flatten_by-flatten_by_opt_dict--Debug] [GOOD] >> test.py::test[flatten_by-flatten_by_opt_dict--Plan] [GOOD] >> test.py::test[flatten_by-flatten_by_opt_dict--Results] >> test.py::test[join-mergejoin_with_different_key_names_norename--Debug] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename--Plan] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] >> test.py::test[select-use_cluster-default.txt-Results] [GOOD] >> test.py::test[tpch-q1-default.txt-Analyze] >> test.py::test[column_order-align_publish_native--ForceBlocks] [GOOD] >> test.py::test[column_order-align_publish_native--Plan] [GOOD] >> test.py::test[column_order-align_publish_native--Results] >> test.py::test[pragma-classic_division-default.txt-Debug] [GOOD] >> test.py::test[pragma-classic_division-default.txt-ForceBlocks] >> test.py::test[pg-select_proj_ref_group_by_star-default.txt-Results] [GOOD] >> test.py::test[pg-sort_nulls_priority_window-default.txt-Debug] >> test.py::test[join-premap_merge_extrasort1--ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Plan] >> test.py::test[tpch-q14-default.txt-Debug] [GOOD] >> test.py::test[tpch-q14-default.txt-Plan] [GOOD] >> test.py::test[tpch-q14-default.txt-Results] >> test.py::test[join-premap_merge_extrasort1--Plan] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Results] >> test.py::test[sampling-bind_default-default.txt-Debug] [GOOD] >> test.py::test[sampling-bind_default-default.txt-Plan] [GOOD] >> test.py::test[sampling-bind_default-default.txt-Results] >> test.py::test[action-eval_astagged-default.txt-Debug] [GOOD] >> test.py::test[action-eval_astagged-default.txt-ForceBlocks] >> test.py::test[count-count_no_grouping-default.txt-Results] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Debug] >> test.py::test[insert_monotonic-truncate_fail--Results] [GOOD] >> test.py::test[join-inner_with_order-off-Analyze] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Debug] [GOOD] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-ForceBlocks] >> test.py::test[expr-expr_named_yql_lambda_multiline_atoms-default.txt-Results] [GOOD] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-Debug] >> test.py::test[pg-select_agg_gs_grouping-default.txt-Debug] [GOOD] >> test.py::test[pg-select_agg_gs_grouping-default.txt-ForceBlocks] >> test.py::test[join-selfjoin_on_sorted--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter--Debug] >> test.py::test[action-eval_capture--Results] [GOOD] >> test.py::test[action-eval_filter--Results] [GOOD] >> test.py::test[action-eval_drop--Debug] >> test.py::test[action-eval_resourcetype-default.txt-Debug] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Plan] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] >> test.py::test[select-two_select_without_semi-default.txt-Debug] [GOOD] >> test.py::test[select-two_select_without_semi-default.txt-ForceBlocks] >> test.py::test[pg_catalog-pg_class-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_database-default.txt-Debug] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Debug] [GOOD] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Plan] [GOOD] >> test.py::test[table_range-range_with_view--Results] [GOOD] >> test.py::test[tpch-q12-default.txt-Analyze] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Results] >> test.py::test[aggr_factory-booland-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-booland-default.txt-ForceBlocks] >> test.py::test[join-yql-8980-off-Debug] [GOOD] >> test.py::test[join-yql-8980-off-ForceBlocks] [SKIPPED] >> test.py::test[join-yql-8980-off-Plan] [GOOD] >> test.py::test[pg-select_plusminus_unary-default.txt-Debug] [GOOD] >> test.py::test[pg-select_plusminus_unary-default.txt-Plan] [GOOD] >> test.py::test[pg-select_plusminus_unary-default.txt-Results] >> test.py::test[column_order-align_publish_native--Results] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-Analyze] >> test.py::test[join-yql-8980-off-Results] [GOOD] >> test.py::test[json-json_query/on_empty_exception--Analyze] [SKIPPED] >> test.py::test[json-json_query/on_empty_exception--Debug] [SKIPPED] >> test.py::test[json-json_query/on_empty_exception--ForceBlocks] [SKIPPED] >> test.py::test[json-json_query/on_empty_exception--Plan] [SKIPPED] >> test.py::test[json-json_query/on_empty_exception--Results] >> test.py::test[pragma-classic_division-default.txt-ForceBlocks] [GOOD] >> test.py::test[pragma-classic_division-default.txt-Plan] [GOOD] >> test.py::test[pragma-classic_division-default.txt-Results] >> test.py::test[pg-sort_nulls_priority_window-default.txt-Debug] [GOOD] >> test.py::test[pg-sort_nulls_priority_window-default.txt-Plan] [GOOD] >> test.py::test[pg-sort_nulls_priority_window-default.txt-Results] >> test.py::test[tpch-q1-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q1-default.txt-Debug] >> test.py::test[sampling-bind_default-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-Debug] >> test.py::test[action-eval_input_output_table_subquery--Debug] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--Plan] [GOOD] >> test.py::test[action-eval_input_output_table_subquery--Results] >> test.py::test[insert-trivial_literals-default.txt-Debug] [GOOD] >> test.py::test[insert-trivial_literals-default.txt-Plan] [GOOD] >> test.py::test[insert-trivial_literals-default.txt-Results] >> TBtreeIndexTPartLarge::MiddleKeys1GB [GOOD] >> TBtreeIndexTPartLarge::BigKeys1GB >> test.py::test[join-premap_merge_extrasort1--Results] [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-Analyze] >> test.py::test[tpch-q14-default.txt-Results] [GOOD] >> test.py::test[tpch-q2-default.txt-Debug] >> test.py::test[blocks-date_top_sort--Debug] [GOOD] >> test.py::test[blocks-date_top_sort--Plan] [GOOD] >> test.py::test[blocks-date_top_sort--Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--Debug] >> test.py::test[action-eval_astagged-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_astagged-default.txt-Plan] [GOOD] >> test.py::test[action-eval_astagged-default.txt-Results] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-Debug] [GOOD] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-Plan] [GOOD] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-Results] >> test.py::test[join-inner_with_order-off-Analyze] [GOOD] >> test.py::test[join-inner_with_order-off-Debug] >> test.py::test[flatten_by-flatten_by_opt_dict--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr_groupby--Debug] >> test.py::test[pg-select_plusminus_unary-default.txt-Results] [GOOD] >> test.py::test[pg-select_sort_project_expr-default.txt-Debug] >> test.py::test[action-eval_drop--Debug] [GOOD] >> test.py::test[action-eval_drop--Plan] [GOOD] >> test.py::test[action-eval_drop--Results] >> test.py::test[produce-reduce_multi_in_sampling--Debug] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Plan] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling--Results] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Plan] [GOOD] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Results] >> test.py::test[pg_catalog-pg_database-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_database-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_database-default.txt-Results] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Debug] [GOOD] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Plan] [GOOD] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Results] >> test.py::test[action-eval_resourcetype-default.txt-Debug] [GOOD] >> test.py::test[action-eval_resourcetype-default.txt-Plan] [GOOD] >> test.py::test[action-eval_resourcetype-default.txt-Results] >> test.py::test[select-two_select_without_semi-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-two_select_without_semi-default.txt-Plan] [GOOD] >> test.py::test[select-two_select_without_semi-default.txt-Results] >> test.py::test[pragma-classic_division-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf_rows-default.txt-Analyze] [SKIPPED] >> test.py::test[produce-process_with_udf_rows-default.txt-Debug] [SKIPPED] >> test.py::test[produce-process_with_udf_rows-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-process_with_udf_rows-default.txt-Plan] [SKIPPED] >> test.py::test[produce-process_with_udf_rows-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_presort--Analyze] >> test.py::test[expr-struct_gather_spread-default.txt-Results] [GOOD] >> test.py::test[expr-to_hashed_dict_tuple_key-default.txt-Analyze] >> test.py::test[insert-trivial_literals-default.txt-Results] [GOOD] >> test.py::test[insert-use_anon_table_without_fill_fail--Debug] [SKIPPED] >> test.py::test[insert-use_anon_table_without_fill_fail--Plan] [SKIPPED] >> test.py::test[insert-use_anon_table_without_fill_fail--Results] >> test.py::test[json-json_query/on_empty_exception--Results] [GOOD] >> test.py::test[key_filter-lambda_with_null_filter--Analyze] >> test.py::test[join-right_trivial--Debug] [GOOD] >> test.py::test[join-right_trivial--Plan] [GOOD] >> test.py::test[join-right_trivial--Results] >> test.py::test[pg-select_agg_gs_grouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_agg_gs_grouping-default.txt-Plan] [GOOD] >> test.py::test[pg-select_agg_gs_grouping-default.txt-Results] >> test.py::test[tpch-q12-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q12-default.txt-Debug] >> test.py::test[aggregate-aggregate_distinct_expr_with_groupby_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-Debug] >> test.py::test[action-eval_input_output_table_subquery--Results] [GOOD] >> test.py::test[action-insert_after_eval_xlock--Debug] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Results] [GOOD] >> test.py::test[order_by-extract_members_over_sort_desc--Debug] >> test.py::test[action-eval_astagged-default.txt-Results] [GOOD] >> test.py::test[action-eval_ensuretype-default.txt-Analyze] >> test.py::test[join-selfjoin_on_sorted_with_filter--Debug] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter--Plan] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-Analyze] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-Debug] >> test.py::test[expr-implicit_cast_literals_under_if-default.txt-Results] [GOOD] >> test.py::test[expr-lds_empty_cast-default.txt-Debug] >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Analyze] >> test.py::test[join-premap_merge_extrasort1-off-Analyze] [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-Debug] >> test.py::test[action-eval_resourcetype-default.txt-Results] [GOOD] >> test.py::test[action-mixed_eval_typeof_world1--Debug] [SKIPPED] >> test.py::test[action-mixed_eval_typeof_world1--Plan] [SKIPPED] >> test.py::test[action-mixed_eval_typeof_world1--Results] >> test.py::test[tpch-q13-default.txt-Debug] [GOOD] >> test.py::test[tpch-q13-default.txt-Plan] [GOOD] >> test.py::test[tpch-q13-default.txt-Results] >> test.py::test[select-two_select_without_semi-default.txt-Results] [GOOD] >> test.py::test[seq_mode-simple2-default.txt-Analyze] >> test.py::test[flatten_by-flatten_expr_groupby--Debug] [GOOD] >> test.py::test[flatten_by-flatten_expr_groupby--Plan] [GOOD] >> test.py::test[flatten_by-flatten_expr_groupby--Results] >> TYardTest::TestEnormousDisk [GOOD] >> test.py::test[action-eval_drop--Results] [GOOD] >> test.py::test[action-eval_for-default.txt-Debug] >> test.py::test[aggr_factory-booland-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-booland-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-booland-default.txt-Results] >> test.py::test[pg-select_agg_expr_key_under_agg-default.txt-Results] [GOOD] >> test.py::test[pg-select_agg_one_row-default.txt-Analyze] >> test.py::test[pg_catalog-pg_database-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_get_userbyid-default.txt-Debug] >> test.py::test[count-count_nullable_sub-default.txt-Debug] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Plan] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Results] >> test.py::test[pg-select_sort_project_expr-default.txt-Debug] [GOOD] >> test.py::test[pg-select_sort_project_expr-default.txt-Plan] [GOOD] >> test.py::test[pg-select_sort_project_expr-default.txt-Results] >> test.py::test[tpch-q1-default.txt-Debug] [GOOD] >> test.py::test[tpch-q1-default.txt-ForceBlocks] >> test.py::test[sampling-bind_multiple_sample-default.txt-Debug] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-Plan] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] >> test.py::test[blocks-date_top_sort--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestEnormousDisk [GOOD] Test command err: 2024-11-21T17:42:22.297973Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.315341Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T17:42:22.320462Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T17:42:22.412162Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.412180Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T17:42:22.428995Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 10298741817031369652 MagicLogChunk: 6157192166227631391 MagicDataChunk: 1216186777940753948 MagicSysLogChunk: 10702831385587360866 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942377035 (2024-11-21T17:42:22.377035Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.433964Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:22.435295Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:22.435474Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.435692Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.436143Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.436296Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.436480Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1176613 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.486141Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.497136Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 10298741817031369652 MagicLogChunk: 6157192166227631391 MagicDataChunk: 1216186777940753948 MagicSysLogChunk: 10702831385587360866 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942377035 (2024-11-21T17:42:22.377035Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.508895Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1930477 NonceLog# 1176613 NonceData# 1538651} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:42:22.514068Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:42:22.514090Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 1 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 4096} PDiskId# 1 2024-11-21T17:42:22.514111Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 4096} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.516332Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.516598Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.516700Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.615628Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.645801Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 10298741817031369652 MagicLogChunk: 6157192166227631391 MagicDataChunk: 1216186777940753948 MagicSysLogChunk: 10702831385587360866 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942377035 (2024-11-21T17:42:22.377035Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.652317Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 3725343 NonceLog# 2938296 NonceData# 2836681} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:42:22.655973Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:42:22.655992Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 2 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 8192} PDiskId# 1 2024-11-21T17:42:22.656009Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 8192} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.657753Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.657925Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.657993Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.703827Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T17:42:22.776900Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.814527Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T17:42:22.819692Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T17:42:23.120942Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:23.120960Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T17:42:23.121853Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 12280758968950009504 MagicLogChunk: 7612407092469385622 MagicDataChunk: 9015255043536468579 MagicSysLogChunk: 17434544765135216015 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210943062455 (2024-11-21T17:42:23.062455Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:23.128197Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:23.132790Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:23.132810Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:23.133268Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.133561Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.133613Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:23.136851Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId ... _pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:43:05.452395Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 97 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 397312} PDiskId# 1 2024-11-21T17:43:05.455271Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 397312} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:43:05.464130Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-1 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:43:05.500804Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-1 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:43:05.522407Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:43:05.581601Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T17:43:07.271912Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:43:07.272148Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 5948507781049450596 MagicNextLogChunkReference: 16151371319980822814 MagicLogChunk: 9032829670544953493 MagicDataChunk: 304711349701866462 MagicSysLogChunk: 7466581410444205477 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210951154614 (2024-11-21T17:42:31.154614Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:43:07.411318Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 6886307 NonceLog# 4832914 NonceData# 4247636787} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:43:07.423084Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:43:08.146323Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 126331 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 517451776} PDiskId# 1 2024-11-21T17:43:08.149375Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 517451776} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:43:08.155469Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-16 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:43:08.162448Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-16 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:43:08.169069Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:43:08.170945Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T17:43:08.799809Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:43:08.800037Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 5948507781049450596 MagicNextLogChunkReference: 16151371319980822814 MagicLogChunk: 9032829670544953493 MagicDataChunk: 304711349701866462 MagicSysLogChunk: 7466581410444205477 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210951154614 (2024-11-21T17:42:31.154614Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:43:08.921426Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 8029629 NonceLog# 6479198 NonceData# 4249397867} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:43:08.929445Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:43:09.389406Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 32000 SectorIdx# 119960 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 32000 OffsetInChunk# 491356160} PDiskId# 1 2024-11-21T17:43:09.393556Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 32000 OffsetInChunk# 491356160} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:43:09.399073Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-17 firstLsnToKeep# 0},},{chunkIdx# 32000 users# 1 endOfSplice# 0 {owner# 3 lsn# 17-17 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:43:09.405675Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-17 firstLsnToKeep# 0},},{chunkIdx# 32000 users# 1 endOfSplice# 0 {owner# 3 lsn# 17-17 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:43:09.411037Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:43:09.412812Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T17:43:09.521327Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:43:09.521532Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 5948507781049450596 MagicNextLogChunkReference: 16151371319980822814 MagicLogChunk: 9032829670544953493 MagicDataChunk: 304711349701866462 MagicSysLogChunk: 7466581410444205477 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210951154614 (2024-11-21T17:42:31.154614Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:43:09.645731Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 10060521 NonceLog# 8292357 NonceData# 4251256238} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:43:09.655665Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:43:10.423850Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 32001 SectorIdx# 18915 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 32001 OffsetInChunk# 77475840} PDiskId# 1 2024-11-21T17:43:10.428008Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 32001 OffsetInChunk# 77475840} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:43:10.433408Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-17 firstLsnToKeep# 0},},{chunkIdx# 32000 users# 1 endOfSplice# 0 {owner# 3 lsn# 17-18 firstLsnToKeep# 0},},{chunkIdx# 32001 users# 1 endOfSplice# 0 {owner# 3 lsn# 18-18 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:43:10.440728Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-17 firstLsnToKeep# 0},},{chunkIdx# 32000 users# 1 endOfSplice# 0 {owner# 3 lsn# 17-18 firstLsnToKeep# 0},},{chunkIdx# 32001 users# 1 endOfSplice# 0 {owner# 3 lsn# 18-18 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:43:10.446280Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:43:10.448178Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2024-11-21T17:43:10.452800Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:419} outSectorIdx >= chunkSizeUsableSectors PDiskId# 1 OutSectorIdx# 1056832 OutLastSectorIdx# 1056832 ChunkSizeUsableSectors# 132608 Offset# 4294967295 Size# 128 2024-11-21T17:43:10.452817Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:919} PDiskId# 1 invalid size# 128 and offset# 4294967295 for ownerId# 3 can't read chunkIdx# 32002 PDiskId# 1 >> test.py::test[join-inner_with_order-off-Debug] [GOOD] >> test.py::test[join-inner_with_order-off-ForceBlocks] [SKIPPED] >> test.py::test[join-inner_with_order-off-Plan] [GOOD] >> test.py::test[join-inner_with_order-off-Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_left--Analyze] >> test.py::test[produce-reduce_multi_in_presort--Analyze] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Debug] >> test.py::test[blocks-combine_all_avg--Debug] [GOOD] >> test.py::test[blocks-combine_all_avg--Plan] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Results] [GOOD] >> test.py::test[optimizers-test_lmap_opts--Debug] >> test.py::test[insert-use_anon_table_without_fill_fail--Results] [GOOD] >> test.py::test[join-anyjoin_merge_nodup-off-Debug] [SKIPPED] >> test.py::test[join-anyjoin_merge_nodup-off-Plan] [SKIPPED] >> test.py::test[join-anyjoin_merge_nodup-off-Results] [SKIPPED] >> test.py::test[join-cbo_4tables_only_sorted_merge--Debug] [SKIPPED] >> test.py::test[join-cbo_4tables_only_sorted_merge--Plan] [SKIPPED] >> test.py::test[join-cbo_4tables_only_sorted_merge--Results] [SKIPPED] >> test.py::test[join-grace_join1--Debug] [SKIPPED] >> test.py::test[join-grace_join1--Plan] [SKIPPED] >> test.py::test[join-grace_join1--Results] [SKIPPED] >> test.py::test[join-inmem_by_uncomparable_tuples--Debug] >> test.py::test[key_filter-lambda_with_null_filter--Analyze] [GOOD] >> test.py::test[key_filter-lambda_with_null_filter--Debug] >> test.py::test[optimizers-yql-6008_limit_after_map--Debug] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--Plan] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] >> test.py::test[pg-select_agg_gs_grouping-default.txt-Results] [GOOD] >> test.py::test[pg-select_between-default.txt-Analyze] >> test.py::test[blocks-combine_all_avg--Plan] [GOOD] >> test.py::test[blocks-combine_all_avg--Results] >> test.py::test[optimizers-test_lmap_opts--Debug] [SKIPPED] >> test.py::test[optimizers-test_lmap_opts--Plan] [SKIPPED] >> test.py::test[optimizers-test_lmap_opts--Results] [SKIPPED] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Debug] >> test.py::test[join-selfjoin_on_sorted_with_filter--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-Debug] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-Plan] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_filter-replicate-Results] [SKIPPED] >> test.py::test[join-simple_columns_partial--Debug] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-Results] >> test.py::test[action-eval_ensuretype-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_ensuretype-default.txt-Debug] >> test.py::test[action-insert_after_eval_xlock--Debug] [GOOD] >> test.py::test[action-insert_after_eval_xlock--Plan] [GOOD] >> test.py::test[action-insert_after_eval_xlock--Results] >> test.py::test[expr-lds_empty_cast-default.txt-Debug] [GOOD] >> test.py::test[expr-lds_empty_cast-default.txt-Plan] [GOOD] >> test.py::test[expr-lds_empty_cast-default.txt-Results] >> test.py::test[produce-reduce_multi_in_sampling--Results] [GOOD] >> test.py::test[produce-reduce_with_python_row_repack-default.txt-Debug] [SKIPPED] >> test.py::test[produce-reduce_with_python_row_repack-default.txt-Plan] [SKIPPED] >> test.py::test[produce-reduce_with_python_row_repack-default.txt-Results] [SKIPPED] >> test.py::test[sampling-map-keyfilter-Debug] >> test.py::test[expr-to_hashed_dict_tuple_key-default.txt-Analyze] [GOOD] >> test.py::test[expr-to_hashed_dict_tuple_key-default.txt-Debug] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Analyze] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Debug] >> test.py::test[join-right_trivial--Results] [GOOD] >> test.py::test[join-strict_keys-off-Debug] [SKIPPED] >> test.py::test[join-strict_keys-off-Plan] [SKIPPED] >> test.py::test[join-strict_keys-off-Results] [SKIPPED] >> test.py::test[join-three_equalities-off-Debug] [SKIPPED] >> test.py::test[join-three_equalities-off-Plan] [SKIPPED] >> test.py::test[join-three_equalities-off-Results] >> test.py::test[pg-select_sort_project_expr-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Debug] >> test.py::test[flatten_by-flatten_expr_groupby--Results] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Debug] >> test.py::test[tpch-q12-default.txt-Debug] [GOOD] >> test.py::test[tpch-q12-default.txt-ForceBlocks] >> test.py::test[action-eval_for-default.txt-Debug] [GOOD] >> test.py::test[action-eval_for-default.txt-Plan] [GOOD] >> test.py::test[action-eval_for-default.txt-Results] >> test.py::test[action-mixed_eval_typeof_world1--Results] [GOOD] >> test.py::test[action-runtime_quote_code-default.txt-Debug] >> test.py::test[count-count_const_no_grouping-default.txt-Debug] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-ForceBlocks] >> test.py::test[join-three_equalities-off-Results] [SKIPPED] >> test.py::test[json-json_exists/common_syntax-default.txt-Debug] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order--Debug] >> test.py::test[pg-sort_nulls_priority_window-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_projection_uncorrelated-default.txt-Debug] >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark [GOOD] >> TBlobStorageGroupInfoBlobMapTest::BasicChecks >> test.py::test[seq_mode-simple2-default.txt-Analyze] [GOOD] >> test.py::test[seq_mode-simple2-default.txt-Debug] >> test.py::test[pg-select_agg_one_row-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_agg_one_row-default.txt-Debug] >> test.py::test[pg_catalog-pg_get_userbyid-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_get_userbyid-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_get_userbyid-default.txt-Results] >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-Debug] [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-ForceBlocks] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1-off-Plan] [GOOD] >> test.py::test[join-premap_merge_extrasort1-off-Results] [GOOD] >> test.py::test[join-premap_merge_inner-off-Analyze] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] [GOOD] >> test.py::test[schema-copy-read_schema-Debug] >> test.py::test[expr-lds_empty_cast-default.txt-Results] [GOOD] >> test.py::test[expr-lds_literal-default.txt-Debug] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Debug] >> test.py::test[count-count_nullable_sub-default.txt-Results] [GOOD] >> test.py::test[csee-complete_l2-default.txt-Debug] >> test.py::test[join-left_join_right_pushdown_nested_left--Analyze] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_left--Debug] >> test.py::test[order_by-extract_members_over_sort_desc--Debug] [GOOD] >> test.py::test[order_by-extract_members_over_sort_desc--Plan] [GOOD] >> test.py::test[order_by-extract_members_over_sort_desc--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] Test command err: None domains 1 new (ns): 86.43841241 None domains 1 old (ns): 43.48763799 None domains 9 new (ns): 47.58938239 None domains 9 old (ns): 50.02161379 Mirror3 domains 4 new (ns): 59.95184956 Mirror3 domains 4 old (ns): 73.36538296 Mirror3 domains 9 new (ns): 53.31223499 Mirror3 domains 9 old (ns): 36.45092033 4Plus2Block domains 8 new (ns): 41.82296005 4Plus2Block domains 8 old (ns): 37.91991041 4Plus2Block domains 9 new (ns): 55.49650828 4Plus2Block domains 9 old (ns): 68.21570869 ErasureMirror3of4 domains 8 new (ns): 45.62445711 ErasureMirror3of4 domains 8 old (ns): 43.1731961 ErasureMirror3of4 domains 9 new (ns): 38.3420648 ErasureMirror3of4 domains 9 old (ns): 36.17754919 >> test.py::test[join-inmem_by_uncomparable_tuples--Debug] [GOOD] >> test.py::test[join-inmem_by_uncomparable_tuples--Plan] [GOOD] >> test.py::test[join-inmem_by_uncomparable_tuples--Results] >> test.py::test[pg-select_between-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_between-default.txt-Debug] >> test.py::test[action-eval_for-default.txt-Results] [GOOD] >> test.py::test[action-eval_for_over_subquery-default.txt-Debug] >> test.py::test[tpch-q1-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_in_access_node_exprs-default.txt-Results] [GOOD] >> test.py::test[tpch-q1-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Debug] >> test.py::test[action-eval_ensuretype-default.txt-Debug] [GOOD] >> test.py::test[action-eval_ensuretype-default.txt-ForceBlocks] >> test.py::test[tpch-q2-default.txt-Debug] [GOOD] >> test.py::test[tpch-q2-default.txt-Plan] [GOOD] >> test.py::test[tpch-q2-default.txt-Results] >> test.py::test[aggr_factory-booland-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-Analyze] >> test.py::test[pg_catalog-pg_get_userbyid-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_timezone_abbrevs-default.txt-Debug] >> test.py::test[tpch-q1-default.txt-Results] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Debug] [GOOD] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Plan] [GOOD] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Results] >> test.py::test[action-insert_after_eval_xlock--Results] [GOOD] >> test.py::test[action-nested_rewrite_io-default.txt-Debug] >> test.py::test[key_filter-lambda_with_null_filter--Debug] [GOOD] >> test.py::test[key_filter-lambda_with_null_filter--ForceBlocks] |73.6%| [TA] $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[json-json_exists/common_syntax-default.txt-Debug] [GOOD] >> test.py::test[json-json_exists/common_syntax-default.txt-Plan] [GOOD] >> test.py::test[json-json_exists/common_syntax-default.txt-Results] >> test.py::test[action-runtime_quote_code-default.txt-Debug] [GOOD] >> test.py::test[action-runtime_quote_code-default.txt-Plan] [GOOD] >> test.py::test[action-runtime_quote_code-default.txt-Results] >> test.py::test[tpch-q13-default.txt-Results] [GOOD] >> test.py::test[tpch-q20-default.txt-Debug] >> test.py::test[pg-sublink_projection_uncorrelated-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_projection_uncorrelated-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_projection_uncorrelated-default.txt-Results] >> test.py::test[count-count_const_no_grouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-Plan] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-Results] >> test.py::test[seq_mode-simple2-default.txt-Debug] [GOOD] >> test.py::test[seq_mode-simple2-default.txt-ForceBlocks] >> test.py::test[produce-reduce_multi_in_presort--Debug] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--ForceBlocks] >> test.py::test[join-yql_465--Debug] >> test.py::test[join-premap_merge_inner-off-Analyze] [GOOD] >> test.py::test[join-premap_merge_inner-off-Debug] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Debug] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-ForceBlocks] >> test.py::test[expr-lds_literal-default.txt-Debug] [GOOD] >> test.py::test[expr-lds_literal-default.txt-Plan] [GOOD] >> test.py::test[expr-lds_literal-default.txt-Results] >> test.py::test[sampling-map-keyfilter-Debug] [GOOD] >> test.py::test[sampling-map-keyfilter-Plan] [GOOD] >> test.py::test[sampling-map-keyfilter-Results] >> test.py::test[flatten_by-flatten_member_is_struct--Debug] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Plan] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Results] >> test.py::test[tpch-q12-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q12-default.txt-Plan] [GOOD] >> test.py::test[tpch-q12-default.txt-Results] >> test.py::test[schema-copy-read_schema-Debug] [GOOD] >> test.py::test[schema-copy-read_schema-Plan] [GOOD] >> test.py::test[schema-copy-read_schema-Results] >> test.py::test[csee-complete_l2-default.txt-Debug] [GOOD] >> test.py::test[pg-select_between-default.txt-Debug] [GOOD] >> test.py::test[pg-select_between-default.txt-ForceBlocks] >> test.py::test[expr-to_hashed_dict_tuple_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_hashed_dict_tuple_key-default.txt-ForceBlocks] >> test.py::test[action-runtime_quote_code-default.txt-Results] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-Debug] >> test.py::test[join-inmem_by_uncomparable_tuples--Results] [GOOD] >> test.py::test[join-inner_all_right--Debug] >> test.py::test[json-json_exists/common_syntax-default.txt-Results] [GOOD] >> test.py::test[json-json_exists/passing-default.txt-Debug] >> test.py::test[csee-complete_l2-default.txt-Plan] [GOOD] >> test.py::test[csee-complete_l2-default.txt-Results] >> test.py::test[pg-select_subquery2_qstar-default.txt-Debug] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Plan] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] >> test.py::test[count-count_const_no_grouping-default.txt-Results] [GOOD] >> test.py::test[csee-closure_in_l2_and_l1-default.txt-Analyze] >> test.py::test[join-left_join_right_pushdown_nested_left--Debug] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_left--ForceBlocks] >> test.py::test[pg-sublink_projection_uncorrelated-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt-Debug] >> test.py::test[optimizers-yql-10042_disable_flow_fuse_depends_on-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-17715_concat_sort_desc--Debug] >> test.py::test[order_by-extract_members_over_sort_desc--Results] [GOOD] >> test.py::test[order_by-native_desc_publish--Debug] [SKIPPED] >> test.py::test[order_by-native_desc_publish--Plan] [SKIPPED] >> test.py::test[order_by-native_desc_publish--Results] >> test.py::test[aggr_factory-linear_histogram-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-Debug] >> test.py::test[join-mergejoin_with_reverse_key_order--Debug] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order--Plan] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order--Results] >> test.py::test[blocks-combine_all_avg--Results] [GOOD] >> test.py::test[blocks-combine_hashed_avg--Debug] >> test.py::test[pg_catalog-pg_timezone_abbrevs-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_timezone_abbrevs-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_timezone_abbrevs-default.txt-Results] >> test.py::test[optimizers-yql-17715_concat_sort_desc--Debug] [SKIPPED] >> test.py::test[optimizers-yql-17715_concat_sort_desc--Plan] [SKIPPED] >> test.py::test[optimizers-yql-17715_concat_sort_desc--Results] [SKIPPED] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Debug] >> test.py::test[order_by-native_desc_publish--Results] [SKIPPED] >> test.py::test[order_by-order_by_expr--Debug] >> test.py::test[action-eval_ensuretype-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_ensuretype-default.txt-Plan] [GOOD] >> test.py::test[action-eval_ensuretype-default.txt-Results] >> test.py::test[expr-lds_literal-default.txt-Results] [GOOD] >> test.py::test[expr-list_from_range-default.txt-Debug] >> test.py::test[pg-select_agg_one_row-default.txt-Debug] [GOOD] >> test.py::test[pg-select_agg_one_row-default.txt-ForceBlocks] >> test.py::test[action-eval_for_over_subquery-default.txt-Debug] [GOOD] >> test.py::test[action-eval_for_over_subquery-default.txt-Plan] [GOOD] >> test.py::test[action-eval_for_over_subquery-default.txt-Results] >> test.py::test[action-nested_rewrite_io-default.txt-Debug] [GOOD] >> test.py::test[key_filter-lambda_with_null_filter--ForceBlocks] [GOOD] >> test.py::test[key_filter-lambda_with_null_filter--Plan] [GOOD] >> test.py::test[key_filter-lambda_with_null_filter--Results] >> test.py::test[seq_mode-simple2-default.txt-ForceBlocks] [GOOD] >> test.py::test[seq_mode-simple2-default.txt-Plan] [GOOD] >> test.py::test[schema-copy-read_schema-Results] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-Debug] >> test.py::test[action-nested_rewrite_io-default.txt-Plan] [GOOD] >> test.py::test[action-nested_rewrite_io-default.txt-Results] >> test.py::test[csee-complete_l2-default.txt-Results] [GOOD] >> test.py::test[csee-const_body_diff_lambda-default.txt-Debug] >> test.py::test[sampling-map-keyfilter-Results] [GOOD] >> test.py::test[sampling-read--Debug] >> test.py::test[seq_mode-simple2-default.txt-Results] >> test.py::test[tpch-q1-default.txt-Results] [GOOD] >> test.py::test[tpch-q11-default.txt-Analyze] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Debug] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Plan] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Results] >> test.py::test[join-premap_merge_inner-off-Debug] [GOOD] >> test.py::test[join-premap_merge_inner-off-ForceBlocks] [SKIPPED] >> test.py::test[join-premap_merge_inner-off-Plan] [GOOD] >> test.py::test[join-premap_merge_inner-off-Results] >> test.py::test[action-eval_ensuretype-default.txt-Results] [GOOD] >> test.py::test[action-eval_extract-default.txt-Analyze] >> test.py::test[join-yql_465--Debug] [GOOD] >> test.py::test[join-yql_465--Plan] [GOOD] >> test.py::test[join-yql_465--Results] >> test.py::test[flatten_by-flatten_member_is_struct--Results] [GOOD] >> test.py::test[flexible_types-flatten_by-default.txt-Debug] [SKIPPED] >> test.py::test[flexible_types-flatten_by-default.txt-Plan] [SKIPPED] >> test.py::test[flexible_types-flatten_by-default.txt-Results] [SKIPPED] >> test.py::test[hor_join-group_sampling--Debug] [SKIPPED] >> test.py::test[hor_join-group_sampling--Plan] [SKIPPED] >> test.py::test[hor_join-group_sampling--Results] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--Debug] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--Plan] [SKIPPED] >> test.py::test[hor_join-yql-12610_old_table_props--Results] [SKIPPED] >> test.py::test[in-basic_in-default.txt-Debug] >> test.py::test[pg-select_between-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_between-default.txt-Plan] [GOOD] >> test.py::test[pg-select_between-default.txt-Results] >> test.py::test[pg_catalog-pg_timezone_abbrevs-default.txt-Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull--Debug] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull--Plan] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull--Results] [SKIPPED] >> test.py::test[produce-fuse_reduces_with_presort--Debug] >> test.py::test[aggregate-aggregation_with_named_node--Debug] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Plan] [GOOD] >> test.py::test[aggregate-aggregation_with_named_node--Results] >> test.py::test[join-premap_merge_inner-off-Results] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Analyze] >> test.py::test[tpch-q2-default.txt-Results] [GOOD] >> test.py::test[type_v3-json--Debug] >> test.py::test[tpch-q12-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Analyze] >> test.py::test[action-subquery_orderby1-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-Results] >> test.py::test[csee-closure_in_l2_and_l1-default.txt-Analyze] [GOOD] >> test.py::test[csee-closure_in_l2_and_l1-default.txt-Debug] >> test.py::test[json-json_exists/passing-default.txt-Debug] [GOOD] >> test.py::test[json-json_exists/passing-default.txt-Plan] [GOOD] >> test.py::test[json-json_exists/passing-default.txt-Results] >> test.py::test[produce-reduce_multi_in_presort--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Plan] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Results] >> test.py::test[pg-tpcds-q06-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt-Results] >> test.py::test[expr-list_from_range-default.txt-Debug] [GOOD] >> test.py::test[expr-list_from_range-default.txt-Plan] [GOOD] >> test.py::test[expr-list_from_range-default.txt-Results] >> test.py::test[join-mergejoin_with_reverse_key_order--Results] [GOOD] >> test.py::test[join-mergejoin_with_table_range--Debug] >> test.py::test[action-eval_for_over_subquery-default.txt-Results] [GOOD] >> test.py::test[action-eval_percentile-default.txt-Debug] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Plan] [GOOD] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Results] >> test.py::test[seq_mode-simple2-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_with_view--Analyze] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery_scalar2-default.txt-Debug] >> test.py::test[join-inner_all_right--Debug] [GOOD] >> test.py::test[join-inner_all_right--Plan] [GOOD] >> test.py::test[join-inner_all_right--Results] >> test.py::test[join-left_join_right_pushdown_nested_left--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_left--Plan] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] >> test.py::test[pg-select_between-default.txt-Results] [GOOD] >> test.py::test[pg-select_is_null-default.txt-Analyze] >> test.py::test[join-simple_columns_partial--Debug] [GOOD] >> test.py::test[action-nested_rewrite_io-default.txt-Results] [GOOD] >> test.py::test[join-simple_columns_partial--Plan] [GOOD] >> test.py::test[join-simple_columns_partial--Results] >> test.py::test[action-subquery_merge_evaluate-default.txt-Debug] >> test.py::test[order_by-order_by_expr--Debug] [GOOD] >> test.py::test[order_by-order_by_expr--Plan] [GOOD] >> test.py::test[order_by-order_by_expr--Results] >> test.py::test[join-yql_465--Results] [GOOD] >> test.py::test[key_filter-decimal--Debug] [SKIPPED] >> test.py::test[key_filter-decimal--Plan] [SKIPPED] >> test.py::test[key_filter-decimal--Results] [SKIPPED] >> test.py::test[key_filter-dict_contains-default.txt-Debug] >> test.py::test[expr-to_hashed_dict_tuple_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-to_hashed_dict_tuple_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_hashed_dict_tuple_key-default.txt-Results] >> test.py::test[action-subquery_orderby1-default.txt-Results] [GOOD] >> test.py::test[agg_apply-avg_numeric-default.txt-Debug] >> test.py::test[csee-const_body_diff_lambda-default.txt-Debug] [GOOD] >> test.py::test[csee-const_body_diff_lambda-default.txt-Plan] [GOOD] >> test.py::test[csee-const_body_diff_lambda-default.txt-Results] >> test.py::test[json-json_exists/passing-default.txt-Results] [GOOD] >> test.py::test[json-json_value/on_empty-default.txt-Debug] >> test.py::test[pg-tpcds-q06-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q12-default.txt-Debug] >> test.py::test[action-eval_extract-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_extract-default.txt-Debug] >> test.py::test[aggr_factory-linear_histogram-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-ForceBlocks] >> test.py::test[expr-list_from_range-default.txt-Results] [GOOD] >> test.py::test[expr-list_indexof-default.txt-Debug] >> test.py::test[sampling-read--Debug] [GOOD] >> test.py::test[sampling-read--Plan] [GOOD] >> test.py::test[sampling-read--Results] >> test.py::test[aggregate-aggregation_with_named_node--Results] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Analyze] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Debug] >> test.py::test[aggregate-avg_interval-default.txt-Debug] >> test.py::test[type_v3-decimal_yt_nollvm--Analyze] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Debug] >> test.py::test[tpch-q11-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q11-default.txt-Debug] >> test.py::test[schema-insert_sorted-row_spec-Debug] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-Plan] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-Results] >> test.py::test[key_filter-lambda_with_null_filter--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--Debug] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--Plan] [GOOD] >> test.py::test[key_filter-mixed_sort--Analyze] >> test.py::test[blocks-distinct_opt_state_keys--Results] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Results] [GOOD] >> test.py::test[order_by-SortByTwoFields--Debug] >> test.py::test[pg-select_agg_one_row-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_agg_one_row-default.txt-Plan] [GOOD] >> test.py::test[pg-select_agg_one_row-default.txt-Results] >> test.py::test[tpch-q20-default.txt-Debug] [GOOD] >> test.py::test[tpch-q20-default.txt-Plan] [GOOD] >> test.py::test[tpch-q20-default.txt-Results] >> test.py::test[pg-select_subquery_scalar2-default.txt-Debug] [GOOD] >> test.py::test[pg-select_subquery_scalar2-default.txt-Plan] [GOOD] >> test.py::test[pg-select_subquery_scalar2-default.txt-Results] >> test.py::test[action-eval_percentile-default.txt-Debug] [GOOD] >> test.py::test[action-eval_percentile-default.txt-Plan] >> TBtreeIndexTPartLarge::BigKeys1GB [GOOD] >> TBtreeIndexTPartLarge::CutKeys >> test.py::test[csee-const_body_diff_lambda-default.txt-Results] [GOOD] >> test.py::test[datetime-all_timezones-default.txt-Debug] >> test.py::test[action-eval_percentile-default.txt-Plan] [GOOD] >> test.py::test[action-eval_percentile-default.txt-Results] >> test.py::test[type_v3-json--Debug] [GOOD] >> test.py::test[type_v3-json--Plan] [GOOD] >> test.py::test[type_v3-json--Results] >> test.py::test[csee-closure_in_l2_and_l1-default.txt-Debug] [GOOD] >> test.py::test[csee-closure_in_l2_and_l1-default.txt-ForceBlocks] >> test.py::test[table_range-concat_with_view--Analyze] [GOOD] >> test.py::test[table_range-concat_with_view--Debug] >> test.py::test[in-basic_in-default.txt-Debug] [GOOD] >> test.py::test[in-basic_in-default.txt-Plan] [GOOD] >> test.py::test[in-basic_in-default.txt-Results] >> test.py::test[produce-reduce_multi_in_presort--Results] [GOOD] >> test.py::test[produce-reduce_with_python_few_keys_stream-default.txt-Analyze] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys_stream-default.txt-Debug] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys_stream-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys_stream-default.txt-Plan] [SKIPPED] >> test.py::test[produce-reduce_with_python_few_keys_stream-default.txt-Results] [SKIPPED] >> test.py::test[sampling-bind_small_rate-default.txt-Analyze] >> test.py::test[pg-select_is_null-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_is_null-default.txt-Debug] >> test.py::test[action-subquery_merge_evaluate-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_merge_evaluate-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_merge_evaluate-default.txt-Results] >> test.py::test[order_by-order_by_expr--Results] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Debug] >> test.py::test[join-inner_all_right--Results] [GOOD] >> test.py::test[join-inner_with_select--Debug] >> test.py::test[agg_apply-avg_numeric-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-avg_numeric-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-avg_numeric-default.txt-Results] >> test.py::test[expr-list_indexof-default.txt-Debug] [GOOD] >> test.py::test[expr-list_indexof-default.txt-Plan] [GOOD] >> test.py::test[expr-list_indexof-default.txt-Results] >> test.py::test[json-json_value/on_empty-default.txt-Debug] [GOOD] >> test.py::test[json-json_value/on_empty-default.txt-Plan] [GOOD] >> test.py::test[json-json_value/on_empty-default.txt-Results] >> test.py::test[sampling-read--Results] [GOOD] >> test.py::test[sampling-reduce-with_premap-Debug] >> test.py::test[pg-select_subquery_scalar2-default.txt-Results] [GOOD] >> test.py::test[pg-select_substring-default.txt-Debug] >> test.py::test[pg-tpcds-q12-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q12-default.txt-Plan] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-Results] [GOOD] >> test.py::test[schema-other--Debug] [SKIPPED] >> test.py::test[schema-other--Plan] [SKIPPED] >> test.py::test[schema-other--Results] [SKIPPED] >> test.py::test[schema-select_field-row_spec-Debug] >> test.py::test[action-eval_extract-default.txt-Debug] [GOOD] >> test.py::test[join-mergejoin_with_table_range--Debug] [GOOD] >> test.py::test[join-mergejoin_with_table_range--Plan] [GOOD] >> test.py::test[join-mergejoin_with_table_range--Results] >> test.py::test[pg-tpcds-q12-default.txt-Results] >> test.py::test[expr-to_hashed_dict_tuple_key-default.txt-Results] [GOOD] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-Analyze] >> test.py::test[action-eval_extract-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt-Analyze] >> test.py::test[key_filter-dict_contains-default.txt-Debug] [GOOD] >> test.py::test[key_filter-dict_contains-default.txt-Plan] [GOOD] >> test.py::test[key_filter-dict_contains-default.txt-Results] >> test.py::test[action-eval_percentile-default.txt-Results] [GOOD] >> test.py::test[action-eval_table_with_view-default.txt-Debug] >> test.py::test[type_v3-decimal_yt_nollvm--Debug] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--ForceBlocks] >> test.py::test[type_v3-json--Results] [GOOD] >> test.py::test[type_v3-non_strict--Debug] >> test.py::test[key_filter-mixed_sort--Analyze] [GOOD] >> test.py::test[key_filter-mixed_sort--Debug] >> test.py::test[action-subquery_merge_evaluate-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-Debug] >> test.py::test[datetime-all_timezones-default.txt-Debug] [GOOD] >> test.py::test[datetime-all_timezones-default.txt-Plan] [GOOD] >> test.py::test[datetime-all_timezones-default.txt-Results] >> test.py::test[blocks-combine_hashed_avg--Debug] [GOOD] >> test.py::test[blocks-combine_hashed_avg--Plan] [GOOD] >> test.py::test[blocks-combine_hashed_avg--Results] >> test.py::test[join-premap_nonseq_flatmap--Debug] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--ForceBlocks] >> test.py::test[csee-closure_in_l2_and_l1-default.txt-ForceBlocks] [GOOD] >> test.py::test[csee-closure_in_l2_and_l1-default.txt-Plan] [GOOD] >> test.py::test[csee-closure_in_l2_and_l1-default.txt-Results] >> test.py::test[order_by-SortByTwoFields--Debug] [GOOD] >> test.py::test[order_by-SortByTwoFields--Plan] [GOOD] >> test.py::test[order_by-SortByTwoFields--Results] >> test.py::test[in-basic_in-default.txt-Results] [GOOD] >> test.py::test[in-in_ansi_empty-default.txt-Debug] >> test.py::test[pg-select_agg_one_row-default.txt-Results] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Analyze] >> test.py::test[agg_apply-avg_numeric-default.txt-Results] [GOOD] >> test.py::test[agg_apply-pg_int4-default.txt-Debug] >> test.py::test[expr-list_indexof-default.txt-Results] [GOOD] >> test.py::test[expr-list_uniq-default.txt-Debug] >> test.py::test[json-json_value/on_empty-default.txt-Results] [GOOD] >> test.py::test[json-json_value/passing_exception--Debug] [SKIPPED] >> test.py::test[json-json_value/passing_exception--Plan] [SKIPPED] >> test.py::test[json-json_value/passing_exception--Results] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Analyze] >> test.py::test[sampling-bind_small_rate-default.txt-Analyze] [GOOD] >> test.py::test[sampling-bind_small_rate-default.txt-Debug] >> test.py::test[pg-select_is_null-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q12-default.txt-Results] [GOOD] >> test.py::test[pg-select_is_null-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q13-default.txt-Debug] >> test.py::test[aggr_factory-linear_histogram-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] >> test.py::test[table_range-concat_with_view--Debug] [GOOD] >> test.py::test[table_range-concat_with_view--ForceBlocks] >> test.py::test[tpch-q11-default.txt-Debug] [GOOD] >> test.py::test[tpch-q11-default.txt-ForceBlocks] >> test.py::test[tpch-q20-default.txt-Results] [GOOD] >> test.py::test[tpch-q21-default.txt-Debug] >> test.py::test[key_filter-dict_contains-default.txt-Results] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Debug] >> test.py::test[pg-select_substring-default.txt-Debug] [GOOD] >> test.py::test[pg-select_substring-default.txt-Plan] [GOOD] >> test.py::test[pg-select_substring-default.txt-Results] >> test.py::test[aggregate-avg_interval-default.txt-Debug] [GOOD] >> test.py::test[aggregate-avg_interval-default.txt-Plan] [GOOD] >> test.py::test[aggregate-avg_interval-default.txt-Results] >> test.py::test[blocks-date_less_or_equal_scalar--Debug] >> test.py::test[order_by-order_by_dynum-default.txt-Analyze] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt-Debug] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-Analyze] [GOOD] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-Debug] >> test.py::test[action-eval_extract-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_extract-default.txt-Plan] [GOOD] >> test.py::test[action-eval_extract-default.txt-Results] >> test.py::test[csee-closure_in_l2_and_l1-default.txt-Results] [GOOD] >> test.py::test[csee-yql-7237--Analyze] >> test.py::test[join-mergejoin_with_table_range--Results] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--Debug] >> test.py::test[type_v3-decimal_yt_nollvm--ForceBlocks] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Plan] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Results] >> test.py::test[schema-select_field-row_spec-Debug] [GOOD] >> test.py::test[schema-select_field-row_spec-Plan] [GOOD] >> test.py::test[schema-select_field-row_spec-Results] >> test.py::test[datetime-all_timezones-default.txt-Results] [GOOD] >> test.py::test[datetime-date_diff_compare-default.txt-Debug] >> test.py::test[order_by-SortByTwoFields--Results] [GOOD] >> test.py::test[order_by-SortByTwoFieldsDesc--Debug] >> test.py::test[action-eval_table_with_view-default.txt-Debug] [GOOD] >> test.py::test[action-eval_table_with_view-default.txt-Plan] [GOOD] >> test.py::test[action-eval_table_with_view-default.txt-Results] >> test.py::test[pg-select_substring-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_count-default.txt-Debug] >> test.py::test[action-subquery_merge_nested_world-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] >> test.py::test[action-closure_action-default.txt-Debug] >> test.py::test[in-in_ansi_empty-default.txt-Debug] [GOOD] >> test.py::test[in-in_ansi_empty-default.txt-Plan] [GOOD] >> test.py::test[in-in_ansi_empty-default.txt-Results] >> test.py::test[pg-select_columnref1-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Debug] >> test.py::test[agg_apply-pg_int4-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-pg_int4-default.txt-Plan] >> test.py::test[type_v3-non_strict--Debug] [GOOD] >> test.py::test[type_v3-non_strict--Plan] [GOOD] >> test.py::test[type_v3-non_strict--Results] >> test.py::test[key_filter-mixed_sort--Debug] [GOOD] >> test.py::test[key_filter-mixed_sort--ForceBlocks] >> test.py::test[json-json_value/passing_exception--Results] [GOOD] >> test.py::test[json-jsondocument/insert--Debug] >> test.py::test[pg-select_is_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_is_null-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q13-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q13-default.txt-Plan] >> test.py::test[agg_apply-pg_int4-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-pg_int4-default.txt-Results] >> test.py::test[action-eval_extract-default.txt-Results] [GOOD] >> test.py::test[action-eval_folder--Analyze] >> test.py::test[join-inner_with_select--Debug] [GOOD] >> test.py::test[join-inner_with_select--Plan] [GOOD] >> test.py::test[join-inner_with_select--Results] >> test.py::test[pg-tpcds-q13-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q13-default.txt-Results] >> test.py::test[pg-select_is_null-default.txt-Results] >> test.py::test[join-simple_columns_partial--Results] [GOOD] >> test.py::test[join-star_join_semionly--Debug] >> test.py::test[expr-list_uniq-default.txt-Debug] [GOOD] >> test.py::test[expr-list_uniq-default.txt-Plan] [GOOD] >> test.py::test[expr-list_uniq-default.txt-Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Analyze] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Debug] >> test.py::test[sampling-bind_small_rate-default.txt-Debug] [GOOD] >> test.py::test[sampling-bind_small_rate-default.txt-ForceBlocks] >> test.py::test[type_v3-decimal_yt_nollvm--Results] [GOOD] >> test.py::test[view-view_with_library--Analyze] >> test.py::test[join-premap_nonseq_flatmap--ForceBlocks] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Plan] [GOOD] >> test.py::test[join-premap_nonseq_flatmap--Results] >> test.py::test[schema-select_field-row_spec-Results] [GOOD] >> test.py::test[schema-select_field-schema-Debug] >> test.py::test[blocks-distinct_opt_state_keys--Results] [GOOD] >> test.py::test[blocks-filter_partial_expr--Debug] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Analyze] >> test.py::test[table_range-concat_with_view--ForceBlocks] [GOOD] >> test.py::test[table_range-concat_with_view--Plan] [GOOD] >> test.py::test[table_range-concat_with_view--Results] >> test.py::test[blocks-combine_hashed_avg--Results] [GOOD] >> test.py::test[blocks-combine_hashed_count--Debug] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-ForceBlocks] >> test.py::test[aggregate-avg_interval-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt-Debug] >> test.py::test[action-eval_table_with_view-default.txt-Results] [GOOD] >> test.py::test[action-inline_action-default.txt-Debug] >> test.py::test[csee-yql-7237--Analyze] [GOOD] >> test.py::test[csee-yql-7237--Debug] >> test.py::test[key_filter-dict_contains_optional--Debug] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Plan] >> test.py::test[in-in_ansi_empty-default.txt-Results] [GOOD] >> test.py::test[in-in_sorted--Debug] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] [GOOD] >> test.py::test[agg_apply-avg_const_interval--Debug] >> test.py::test[order_by-order_by_dynum-default.txt-Debug] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt-ForceBlocks] >> test.py::test[datetime-date_diff_compare-default.txt-Debug] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Plan] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Results] >> test.py::test[pg-select_is_null-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_inner-default.txt-Analyze] >> test.py::test[type_v3-non_strict--Results] [GOOD] >> test.py::test[type_v3-replace_diff_layout--Debug] >> test.py::test[datetime-date_diff_compare-default.txt-Plan] [GOOD] >> test.py::test[datetime-date_diff_compare-default.txt-Results] >> test.py::test[pg-select_win_count-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_count-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_count-default.txt-Results] >> test.py::test[pg-tpcds-q13-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-Debug] >> test.py::test[tpch-q11-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q11-default.txt-Plan] [GOOD] >> test.py::test[tpch-q11-default.txt-Results] >> test.py::test[action-eval_folder--Analyze] [GOOD] >> test.py::test[action-eval_folder--Debug] >> test.py::test[action-closure_action-default.txt-Debug] [GOOD] >> test.py::test[action-closure_action-default.txt-Plan] [GOOD] >> test.py::test[action-closure_action-default.txt-Results] >> test.py::test[order_by-SortByTwoFieldsDesc--Debug] [GOOD] >> test.py::test[order_by-SortByTwoFieldsDesc--Plan] [GOOD] >> test.py::test[order_by-SortByTwoFieldsDesc--Results] >> test.py::test[pg-select_columnref1-default.txt-Debug] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-ForceBlocks] >> test.py::test[expr-list_uniq-default.txt-Results] [GOOD] >> test.py::test[expr-literal_strings-default.txt-Debug] >> test.py::test[agg_apply-pg_int4-default.txt-Results] [GOOD] >> test.py::test[agg_apply-sum_type-default.txt-Debug] >> test.py::test[sampling-reduce-with_premap-Debug] [GOOD] >> test.py::test[sampling-reduce-with_premap-Plan] [GOOD] >> test.py::test[sampling-reduce-with_premap-Results] >> test.py::test[view-view_with_library--Analyze] [GOOD] >> test.py::test[view-view_with_library--Debug] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Debug] [GOOD] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Plan] [GOOD] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Results] >> test.py::test[join-inner_with_select--Results] [GOOD] >> test.py::test[join-inner_with_select-off-Debug] [SKIPPED] >> test.py::test[join-inner_with_select-off-Plan] [SKIPPED] >> test.py::test[join-inner_with_select-off-Results] >> test.py::test[json-jsondocument/insert--Debug] [GOOD] >> test.py::test[json-jsondocument/insert--Plan] [GOOD] >> test.py::test[json-jsondocument/insert--Results] >> test.py::test[action-inline_action-default.txt-Debug] [GOOD] >> test.py::test[action-inline_action-default.txt-Plan] [GOOD] >> test.py::test[action-inline_action-default.txt-Results] >> test.py::test[join-inner_with_select-off-Results] [SKIPPED] >> test.py::test[join-join_cbo_3_tables--Debug] >> test.py::test[schema-select_field-schema-Debug] [GOOD] >> test.py::test[schema-select_field-schema-Plan] [GOOD] >> test.py::test[schema-select_field-schema-Results] >> test.py::test[action-closure_action-default.txt-Results] [GOOD] >> test.py::test[action-eval_code_nested-default.txt-Debug] >> test.py::test[datetime-date_diff_compare-default.txt-Results] [GOOD] >> test.py::test[key_filter-mixed_sort--ForceBlocks] [GOOD] >> test.py::test[datetime-date_tz_cast-default.txt-Debug] >> test.py::test[key_filter-mixed_sort--Plan] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Debug] >> test.py::test[key_filter-mixed_sort--Results] >> test.py::test[sampling-bind_small_rate-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_small_rate-default.txt-Plan] [GOOD] >> test.py::test[sampling-bind_small_rate-default.txt-Results] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-Results] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Debug] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Plan] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] >> test.py::test[join-premap_nonseq_flatmap--Results] [GOOD] >> test.py::test[join-pullup_random--Analyze] >> test.py::test[order_by-order_by_dynum-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt-Plan] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt-Results] >> test.py::test[order_by-SortByTwoFieldsDesc--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Debug] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--ForceBlocks] >> test.py::test[key_filter-dict_contains_optional--Results] [GOOD] >> test.py::test[key_filter-is_null_with_condition--Debug] >> test.py::test[pg-select_join_inner-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_join_inner-default.txt-Debug] >> test.py::test[type_v3-replace_diff_layout--Debug] [GOOD] >> test.py::test[type_v3-replace_diff_layout--Plan] >> test.py::test[order_by-order_by_expr_mul_cols--Debug] >> test.py::test[type_v3-replace_diff_layout--Plan] [GOOD] >> test.py::test[type_v3-replace_diff_layout--Results] >> test.py::test[join-opt_on_opt_side_with_group--Debug] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--Plan] [GOOD] >> test.py::test[join-opt_on_opt_side_with_group--Results] >> test.py::test[pg-tpcds-q52-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-Results] >> test.py::test[action-eval_folder--Debug] [GOOD] >> test.py::test[action-eval_folder--ForceBlocks] >> test.py::test[blocks-filter_partial_expr--Debug] [GOOD] >> test.py::test[blocks-filter_partial_expr--Plan] [GOOD] >> test.py::test[blocks-filter_partial_expr--Results] >> test.py::test[tpch-q21-default.txt-Debug] [GOOD] >> test.py::test[tpch-q21-default.txt-Plan] [GOOD] >> test.py::test[tpch-q21-default.txt-Results] >> test.py::test[table_range-concat_with_view--Results] [GOOD] >> test.py::test[tpch-q16-default.txt-Analyze] >> test.py::test[action-inline_action-default.txt-Results] [GOOD] >> test.py::test[agg_apply-sum_type-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-sum_type-default.txt-Plan] >> test.py::test[expr-literal_strings-default.txt-Debug] [GOOD] >> test.py::test[expr-literal_strings-default.txt-Plan] [GOOD] >> test.py::test[expr-literal_strings-default.txt-Results] >> test.py::test[action-runtime_apply_quoted_code-default.txt-Debug] >> test.py::test[agg_apply-sum_type-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-sum_type-default.txt-Results] >> test.py::test[json-jsondocument/insert--Results] [GOOD] >> test.py::test[key_filter-complex-default.txt-Debug] >> test.py::test[in-in_sorted--Debug] [GOOD] >> test.py::test[in-in_sorted--Plan] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Plan] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Results] >> test.py::test[csee-yql-7237--Debug] [GOOD] >> test.py::test[in-in_sorted--Results] >> test.py::test[csee-yql-7237--ForceBlocks] >> test.py::test[schema-select_field-schema-Results] [GOOD] >> test.py::test[schema-user_schema_append--Debug] [SKIPPED] >> test.py::test[schema-user_schema_append--Plan] [SKIPPED] >> test.py::test[schema-user_schema_append--Results] >> test.py::test[agg_apply-avg_const_interval--Debug] [GOOD] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Analyze] >> test.py::test[sampling-bind_small_rate-default.txt-Results] [GOOD] >> test.py::test[sampling-system_sampling-io_block_size-Analyze] [SKIPPED] >> test.py::test[blocks-combine_hashed_count--Debug] [GOOD] >> test.py::test[blocks-combine_hashed_count--Plan] >> test.py::test[view-view_with_library--Debug] [GOOD] >> test.py::test[view-view_with_library--ForceBlocks] >> test.py::test[join-star_join_semionly--Debug] [GOOD] >> test.py::test[agg_apply-avg_const_interval--Plan] [GOOD] >> test.py::test[join-star_join_semionly--Plan] [GOOD] >> test.py::test[join-star_join_semionly--Results] >> test.py::test[agg_apply-avg_const_interval--Results] >> test.py::test[type_v3-replace_diff_layout--Results] [GOOD] >> test.py::test[udf-python_struct--Debug] [SKIPPED] >> test.py::test[udf-python_struct--Plan] >> test.py::test[action-eval_code_nested-default.txt-Debug] [GOOD] >> test.py::test[action-eval_code_nested-default.txt-Plan] [GOOD] >> test.py::test[action-eval_code_nested-default.txt-Results] >> test.py::test[pg-select_win_count-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_expr_agg-default.txt-Debug] >> test.py::test[sampling-system_sampling-io_block_size-Debug] [SKIPPED] >> test.py::test[sampling-system_sampling-io_block_size-ForceBlocks] [SKIPPED] >> test.py::test[sampling-system_sampling-io_block_size-Plan] [SKIPPED] >> test.py::test[sampling-system_sampling-io_block_size-Results] [SKIPPED] >> test.py::test[sampling-table_content--Analyze] >> test.py::test[blocks-combine_hashed_count--Plan] [GOOD] >> test.py::test[blocks-combine_hashed_count--Results] >> test.py::test[join-pullup_random--Analyze] [GOOD] >> test.py::test[join-pullup_random--Debug] >> test.py::test[udf-python_struct--Plan] [SKIPPED] >> test.py::test[udf-python_struct--Results] [SKIPPED] >> test.py::test[udf-same_udf_modules--Debug] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-ForceBlocks] >> test.py::test[sampling-reduce-with_premap-Results] [GOOD] >> test.py::test[sampling-subquery_default-default.txt-Debug] >> test.py::test[pg-tpcds-q52-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-Debug] >> test.py::test[blocks-filter_partial_expr--Results] [GOOD] >> test.py::test[blocks-pg--Debug] >> test.py::test[order_by-order_by_dynum-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf--Analyze] >> test.py::test[expr-literal_strings-default.txt-Results] [GOOD] >> test.py::test[expr-non_persistable_insert_into_fail--Debug] [SKIPPED] >> test.py::test[pg-select_join_inner-default.txt-Debug] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-sum_type-default.txt-Results] [GOOD] >> test.py::test[agg_phases-count_all-default.txt-Debug] >> test.py::test[expr-non_persistable_insert_into_fail--Plan] [SKIPPED] >> test.py::test[expr-non_persistable_insert_into_fail--Results] >> test.py::test[pg-select_join_inner-default.txt-ForceBlocks] >> test.py::test[aggregate-avg_with_having-default.txt-Plan] [GOOD] >> test.py::test[aggregate-avg_with_having-default.txt-Results] >> test.py::test[action-eval_folder--ForceBlocks] [GOOD] >> test.py::test[action-eval_folder--Plan] [GOOD] >> test.py::test[action-eval_folder--Results] >> test.py::test[key_filter-mixed_sort--Results] [GOOD] >> test.py::test[key_filter-multiusage--Analyze] >> test.py::test[datetime-date_tz_cast-default.txt-Debug] [GOOD] >> test.py::test[datetime-date_tz_cast-default.txt-Plan] [GOOD] >> test.py::test[datetime-date_tz_cast-default.txt-Results] >> test.py::test[key_filter-is_null_with_condition--Debug] [GOOD] >> test.py::test[key_filter-is_null_with_condition--Plan] [GOOD] >> test.py::test[key_filter-is_null_with_condition--Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--ForceBlocks] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Results] [GOOD] >> test.py::test[pg-select_having_no_from-default.txt-Analyze] >> test.py::test[action-runtime_apply_quoted_code-default.txt-Debug] [GOOD] >> test.py::test[action-runtime_apply_quoted_code-default.txt-Plan] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Plan] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Results] >> test.py::test[tpch-q16-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q16-default.txt-Debug] >> test.py::test[action-runtime_apply_quoted_code-default.txt-Results] >> test.py::test[tpch-q11-default.txt-Results] [GOOD] >> test.py::test[tpch-q20-default.txt-Analyze] >> test.py::test[schema-user_schema_append--Results] [GOOD] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Analyze] [GOOD] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Debug] >> test.py::test[select-column_labels-default.txt-Debug] >> test.py::test[action-eval_code_nested-default.txt-Results] [GOOD] >> test.py::test[action-eval_extract-default.txt-Debug] >> test.py::test[action-eval_folder--Results] [GOOD] >> test.py::test[sampling-table_content--Analyze] [GOOD] >> test.py::test[in-in_sorted--Results] [GOOD] >> test.py::test[pg-select_win_expr_agg-default.txt-Debug] [GOOD] >> test.py::test[order_by-order_by_expr_mul_cols--Debug] [GOOD] >> test.py::test[in-in_tuple_check0_fail--Debug] [SKIPPED] >> test.py::test[order_by-order_by_expr_mul_cols--Plan] [GOOD] >> test.py::test[pg-select_win_expr_agg-default.txt-Plan] [GOOD] >> test.py::test[order_by-order_by_expr_mul_cols--Results] >> test.py::test[pg-select_win_expr_agg-default.txt-Results] >> test.py::test[agg_apply-avg_const_interval--Results] [GOOD] >> test.py::test[agg_apply-pg_text-default.txt-Debug] >> test.py::test[action-eval_resourcetype-default.txt-Analyze] >> test.py::test[udf-same_udf_modules--Debug] [GOOD] >> test.py::test[udf-same_udf_modules--Plan] [GOOD] >> test.py::test[udf-same_udf_modules--Results] >> test.py::test[sampling-table_content--Debug] >> test.py::test[in-in_tuple_check0_fail--Plan] [SKIPPED] >> test.py::test[join-opt_on_opt_side_with_group--Results] [GOOD] >> test.py::test[in-in_tuple_check0_fail--Results] >> test.py::test[join-premap_common_left_cross-off-Debug] [SKIPPED] >> test.py::test[view-view_with_library--ForceBlocks] [GOOD] >> test.py::test[csee-yql-7237--ForceBlocks] [GOOD] >> test.py::test[csee-yql-7237--Plan] [GOOD] >> test.py::test[order_by-order_by_udf--Analyze] [GOOD] >> test.py::test[order_by-order_by_udf--Debug] >> test.py::test[key_filter-is_null_with_condition--Results] [GOOD] >> test.py::test[expr-non_persistable_insert_into_fail--Results] [GOOD] >> test.py::test[expr-static_zip-default.txt-Debug] >> test.py::test[join-premap_common_left_cross-off-Plan] [SKIPPED] >> test.py::test[join-premap_common_left_cross-off-Results] [SKIPPED] >> test.py::test[join-premap_common_semi-off-Debug] [SKIPPED] >> test.py::test[join-premap_common_semi-off-Plan] [SKIPPED] >> test.py::test[join-premap_common_semi-off-Results] [SKIPPED] >> test.py::test[view-view_with_library--Plan] [GOOD] >> test.py::test[view-view_with_library--Results] >> test.py::test[csee-yql-7237--Results] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-complex-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Plan] >> test.py::test[key_filter-complex-default.txt-Plan] [GOOD] >> test.py::test[key_filter-complex-default.txt-Results] >> test.py::test[blocks-combine_hashed_count--Results] [GOOD] >> test.py::test[blocks-date_add_interval--Debug] >> test.py::test[key_filter-no_bypass_merge--Debug] [SKIPPED] >> test.py::test[key_filter-no_bypass_merge--Plan] [SKIPPED] >> test.py::test[key_filter-no_bypass_merge--Results] [SKIPPED] >> test.py::test[key_filter-yql-8117-table_key_filter--Debug] >> test.py::test[join-premap_map_semi-off-Debug] [SKIPPED] >> test.py::test[join-premap_map_semi-off-Plan] [SKIPPED] >> test.py::test[join-premap_map_semi-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort1--Debug] >> test.py::test[pg-tpcds-q56-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-Results] >> test.py::test[join-pullup_random--Debug] [GOOD] >> test.py::test[join-pullup_random--ForceBlocks] >> test.py::test[action-runtime_apply_quoted_code-default.txt-Results] [GOOD] >> test.py::test[key_filter-multiusage--Analyze] [GOOD] >> test.py::test[key_filter-multiusage--Debug] >> test.py::test[action-runtime_format_free_args_code-default.txt-Debug] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Results] >> test.py::test[pg-select_having_no_from-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_having_no_from-default.txt-Debug] >> test.py::test[agg_phases-count_all-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-count_all-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-count_all-default.txt-Results] >> test.py::test[sampling-subquery_default-default.txt-Debug] [GOOD] >> test.py::test[sampling-subquery_default-default.txt-Plan] [GOOD] >> test.py::test[sampling-subquery_default-default.txt-Results] >> test.py::test[pg-select_join_inner-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_join_inner-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_inner-default.txt-Results] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf_duo--Debug] >> test.py::test[datetime-date_tz_cast-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Debug] >> test.py::test[udf-same_udf_modules--Results] [GOOD] >> test.py::test[blocks-pg--Debug] [GOOD] >> test.py::test[blocks-pg--Plan] [GOOD] >> test.py::test[blocks-pg--Results] >> test.py::test[tpch-q21-default.txt-Results] [GOOD] >> test.py::test[tpch-q6-default.txt-Debug] >> test.py::test[udf-trivial_udf--Debug] >> test.py::test[join-star_join_semionly--Results] [GOOD] >> test.py::test[join-yql-14829_leftonly--Debug] >> test.py::test[join-join_cbo_3_tables--Debug] [GOOD] >> test.py::test[join-join_cbo_3_tables--Plan] [GOOD] >> test.py::test[join-join_cbo_3_tables--Results] >> test.py::test[action-eval_extract-default.txt-Debug] [GOOD] >> test.py::test[action-eval_extract-default.txt-Plan] [GOOD] >> test.py::test[action-eval_extract-default.txt-Results] >> test.py::test[pg-select_win_expr_agg-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_lead_lag-default.txt-Debug] >> test.py::test[in-in_tuple_check0_fail--Results] [GOOD] >> test.py::test[in-in_tuple_table-default.txt-Debug] >> test.py::test[aggregate-avg_with_having-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping--Debug] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Debug] [GOOD] >> test.py::test[agg_apply-pg_text-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-pg_text-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-pg_text-default.txt-Results] >> test.py::test[action-eval_resourcetype-default.txt-Analyze] [GOOD] >> test.py::test[action-eval_resourcetype-default.txt-Debug] >> test.py::test[select-column_labels-default.txt-Debug] [GOOD] >> test.py::test[select-column_labels-default.txt-Plan] [GOOD] >> test.py::test[select-column_labels-default.txt-Results] >> test.py::test[pg-tpcds-q56-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q61-default.txt-Debug] >> test.py::test[view-view_with_library--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Analyze] >> test.py::test[flatten_by-flatten_list_on_flatten_by--ForceBlocks] >> test.py::test[expr-static_zip-default.txt-Debug] [GOOD] >> test.py::test[expr-static_zip-default.txt-Plan] [GOOD] >> test.py::test[expr-static_zip-default.txt-Results] >> test.py::test[order_by-order_by_expr_mul_cols--Results] [GOOD] >> test.py::test[order_by-order_by_expr_simple--Debug] >> test.py::test[pg-select_join_inner-default.txt-Results] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-Analyze] >> test.py::test[tpch-q20-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q20-default.txt-Debug] >> test.py::test[aggregate-aggregate_by_one_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--Analyze] >> test.py::test[tpch-q16-default.txt-Debug] [GOOD] >> test.py::test[tpch-q16-default.txt-ForceBlocks] >> test.py::test[sampling-subquery_default-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_expr-default.txt-Debug] >> test.py::test[action-runtime_format_free_args_code-default.txt-Debug] [GOOD] >> test.py::test[blocks-pg--Results] [GOOD] >> test.py::test[action-runtime_format_free_args_code-default.txt-Plan] [GOOD] >> test.py::test[blocks-struct_type--Debug] >> test.py::test[action-runtime_format_free_args_code-default.txt-Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single--Results] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord-off-Analyze] >> test.py::test[order_by-order_by_udf--Debug] [GOOD] >> test.py::test[order_by-order_by_udf--ForceBlocks] >> test.py::test[key_filter-complex-default.txt-Results] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt-Debug] >> test.py::test[sampling-table_content--Debug] [GOOD] >> test.py::test[sampling-table_content--ForceBlocks] >> test.py::test[pg-select_having_no_from-default.txt-Debug] [GOOD] >> test.py::test[pg-select_having_no_from-default.txt-ForceBlocks] >> test.py::test[blocks-date_less_or_equal_scalar--Debug] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--Plan] [GOOD] >> test.py::test[blocks-date_less_or_equal_scalar--Results] >> test.py::test[join-pullup_random--ForceBlocks] [GOOD] >> test.py::test[join-pullup_random--Plan] [GOOD] >> test.py::test[join-pullup_random--Results] >> test.py::test[action-eval_extract-default.txt-Results] [GOOD] >> test.py::test[action-eval_type-default.txt-Debug] >> test.py::test[udf-trivial_udf--Debug] [GOOD] >> test.py::test[udf-trivial_udf--Plan] [GOOD] >> test.py::test[udf-trivial_udf--Results] >> test.py::test[select-column_labels-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf_duo--Debug] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-Debug] >> test.py::test[order_by-order_by_udf_duo--Plan] [GOOD] >> test.py::test[order_by-order_by_udf_duo--Results] >> test.py::test[expr-static_zip-default.txt-Results] [GOOD] >> test.py::test[expr-struct_builtins-default.txt-Debug] >> test.py::test[weak_field-optimize_weak_fields_map--Analyze] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Debug] >> test.py::test[action-runtime_format_free_args_code-default.txt-Results] [GOOD] >> test.py::test[action-runtime_if_select-default.txt-Debug] >> test.py::test[key_filter-multiusage--Debug] [GOOD] >> test.py::test[key_filter-multiusage--ForceBlocks] >> test.py::test[agg_apply-pg_text-default.txt-Results] [GOOD] >> test.py::test[agg_apply-some_null-default.txt-Debug] >> test.py::test[action-eval_resourcetype-default.txt-Debug] [GOOD] >> test.py::test[action-eval_resourcetype-default.txt-ForceBlocks] >> test.py::test[pg-select_win_lead_lag-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_lead_lag-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_lead_lag-default.txt-Results] >> test.py::test[key_filter-yql-8117-table_key_filter--Debug] [GOOD] >> test.py::test[key_filter-yql-8117-table_key_filter--Plan] [GOOD] >> test.py::test[key_filter-yql-8117-table_key_filter--Results] >> test.py::test[join-premap_merge_extrasort1--Debug] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Plan] [GOOD] >> test.py::test[join-premap_merge_extrasort1--Results] >> test.py::test[tpch-q6-default.txt-Debug] [GOOD] >> test.py::test[blocks-date_add_interval--Debug] [GOOD] >> test.py::test[blocks-date_add_interval--Plan] [GOOD] >> test.py::test[blocks-date_add_interval--Results] >> test.py::test[tpch-q6-default.txt-Plan] [GOOD] >> test.py::test[tpch-q6-default.txt-Results] >> test.py::test[in-in_tuple_table-default.txt-Debug] [GOOD] >> test.py::test[in-in_tuple_table-default.txt-Plan] [GOOD] >> test.py::test[in-in_tuple_table-default.txt-Results] >> test.py::test[pg-select_qstarref2-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-Debug] >> test.py::test[pg-tpcds-q61-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q61-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q61-default.txt-Results] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-5833-table_content--Debug] >> test.py::test[flatten_by-flatten_list_on_flatten_by--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Plan] [GOOD] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Results] >> test.py::test[csee-yql-7237--Results] [GOOD] >> test.py::test[udf-trivial_udf--Results] [GOOD] >> test.py::test[udf-udf_empty--Debug] >> test.py::test[dq-blacklisted_pragmas--Analyze] >> test.py::test[order_by-order_by_udf--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_udf--Plan] [GOOD] >> test.py::test[order_by-order_by_udf--Results] >> test.py::test[action-eval_type-default.txt-Debug] [GOOD] >> test.py::test[action-eval_type-default.txt-Plan] [GOOD] >> test.py::test[action-eval_type-default.txt-Results] >> test.py::test[aggregate-group_by_ru_join_agg--Analyze] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--Debug] >> test.py::test[tpch-q16-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q16-default.txt-Plan] [GOOD] >> test.py::test[tpch-q16-default.txt-Results] >> test.py::test[order_by-order_by_expr_simple--Debug] [GOOD] >> test.py::test[order_by-order_by_expr_simple--Plan] [GOOD] >> test.py::test[order_by-order_by_expr_simple--Results] >> test.py::test[pg-select_having_no_from-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_having_no_from-default.txt-Plan] [GOOD] >> test.py::test[pg-select_having_no_from-default.txt-Results] >> test.py::test[tpch-q20-default.txt-Debug] [GOOD] >> test.py::test[tpch-q20-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_udf_duo--Results] [GOOD] >> test.py::test[pg-dates_to_pg-default.txt-Debug] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflightMock [GOOD] >> TPDiskRaces::Decommit >> test.py::test[key_filter-dependent_value-default.txt-Debug] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt-Plan] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt-Results] >> test.py::test[join-mapjoin_on_tablerecord-off-Analyze] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord-off-Debug] >> test.py::test[weak_field-optimize_weak_fields_map--Debug] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--ForceBlocks] >> test.py::test[join-pullup_random--Results] [GOOD] >> test.py::test[join-star_join_inners--Analyze] >> test.py::test[pg-select_win_lead_lag-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_max_null-default.txt-Debug] >> test.py::test[expr-struct_builtins-default.txt-Debug] [GOOD] >> test.py::test[expr-struct_builtins-default.txt-Plan] [GOOD] >> test.py::test[expr-struct_builtins-default.txt-Results] >> test.py::test[sampling-subquery_expr-default.txt-Debug] [GOOD] >> test.py::test[sampling-subquery_expr-default.txt-Plan] [GOOD] >> test.py::test[sampling-subquery_expr-default.txt-Results] >> test.py::test[in-in_tuple_table-default.txt-Results] [GOOD] >> test.py::test[in-in_with_opt_tuple-default.txt-Debug] >> test.py::test[action-eval_resourcetype-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-eval_resourcetype-default.txt-Plan] [GOOD] >> test.py::test[action-eval_resourcetype-default.txt-Results] >> test.py::test[action-eval_type-default.txt-Results] [GOOD] >> test.py::test[action-evaluate_match_type-default.txt-Debug] >> test.py::test[sampling-table_content--ForceBlocks] [GOOD] >> test.py::test[sampling-table_content--Plan] [GOOD] >> test.py::test[sampling-table_content--Results] >> test.py::test[join-premap_merge_extrasort1--Results] [GOOD] >> test.py::test[join-pullup_inner--Debug] >> test.py::test[pg-tpcds-q61-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q75-default.txt-Debug] >> test.py::test[agg_apply-some_null-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-some_null-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-some_null-default.txt-Results] >> test.py::test[blocks-struct_type--Debug] [GOOD] >> test.py::test[blocks-struct_type--Plan] [GOOD] >> test.py::test[blocks-struct_type--Results] >> test.py::test[tpch-q6-default.txt-Results] [GOOD] >> test.py::test[type_v3-mergejoin_with_sort--Debug] >> test.py::test[key_filter-yql-8117-table_key_filter--Results] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--Debug] >> test.py::test[join-yql-14829_leftonly--Debug] [GOOD] >> test.py::test[flatten_by-flatten_list_on_flatten_by--Results] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Analyze] >> test.py::test[join-yql-14829_leftonly--Plan] [GOOD] >> test.py::test[join-yql-14829_leftonly--Results] >> test.py::test[action-runtime_if_select-default.txt-Debug] [GOOD] >> test.py::test[action-runtime_if_select-default.txt-Plan] [GOOD] >> test.py::test[action-runtime_if_select-default.txt-Results] >> test.py::test[pg-select_having_no_from-default.txt-Results] [GOOD] >> test.py::test[pg-select_where-default.txt-Analyze] >> test.py::test[dq-blacklisted_pragmas--Analyze] [GOOD] >> test.py::test[dq-blacklisted_pragmas--Debug] >> test.py::test[agg_phases-count_all-default.txt-Results] [GOOD] >> test.py::test[agg_phases-count_null-default.txt-Debug] >> test.py::test[order_by-order_by_udf--Results] [GOOD] >> test.py::test[order_by-tuple01-default.txt-Analyze] >> test.py::test[key_filter-multiusage--ForceBlocks] [GOOD] >> test.py::test[key_filter-multiusage--Plan] [GOOD] >> test.py::test[key_filter-multiusage--Results] >> test.py::test[udf-udf_empty--Debug] [GOOD] >> test.py::test[udf-udf_empty--Plan] [GOOD] >> test.py::test[udf-udf_empty--Results] >> test.py::test[order_by-order_by_expr_simple--Results] [GOOD] >> test.py::test[join-join_cbo_3_tables--Results] [GOOD] >> test.py::test[join-join_comp_common_table--Debug] >> test.py::test[order_by-order_by_tablepath_column--Debug] >> test.py::test[expr-struct_builtins-default.txt-Results] [GOOD] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-Debug] >> test.py::test[key_filter-dependent_value-default.txt-Results] [GOOD] >> test.py::test[library-forward_import--Debug] >> test.py::test[action-eval_resourcetype-default.txt-Results] [GOOD] >> test.py::test[action-export_action--Analyze] >> test.py::test[pg-dates_to_pg-default.txt-Debug] [GOOD] >> test.py::test[pg-dates_to_pg-default.txt-Plan] [GOOD] >> test.py::test[pg-dates_to_pg-default.txt-Results] >> test.py::test[agg_apply-some_null-default.txt-Results] [GOOD] >> test.py::test[agg_apply-sum_decimal-default.txt-Debug] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Results] >> test.py::test[pg-select_win_max_null-default.txt-Debug] [GOOD] >> test.py::test[sampling-subquery_expr-default.txt-Results] [GOOD] >> test.py::test[schema-limit_simple--Debug] >> test.py::test[pg-select_qstarref2-default.txt-Debug] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-ForceBlocks] >> test.py::test[join-star_join_inners--Analyze] [GOOD] >> test.py::test[join-star_join_inners--Debug] >> test.py::test[blocks-date_add_interval--Results] [GOOD] >> test.py::test[pg-select_win_max_null-default.txt-Plan] [GOOD] >> test.py::test[blocks-date_less_scalar--Debug] >> test.py::test[pg-select_win_max_null-default.txt-Results] >> test.py::test[weak_field-optimize_weak_fields_map--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Plan] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord-off-Debug] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_on_tablerecord-off-Plan] [GOOD] >> test.py::test[join-mapjoin_on_tablerecord-off-Results] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Analyze] >> test.py::test[optimizers-yql-5833-table_content--Debug] [GOOD] >> test.py::test[optimizers-yql-5833-table_content--Plan] [GOOD] >> test.py::test[optimizers-yql-5833-table_content--Results] >> test.py::test[weak_field-optimize_weak_fields_map--Results] >> test.py::test[udf-udf_empty--Results] [GOOD] >> test.py::test[union_all-mix_map_and_read-default.txt-Debug] >> test.py::test[sampling-table_content--Results] [GOOD] >> test.py::test[schema-select_all-schema-Analyze] >> test.py::test[tpch-q20-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q20-default.txt-Plan] [GOOD] >> test.py::test[tpch-q20-default.txt-Results] >> test.py::test[select-from_in_front_sub-default.txt-Debug] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-Plan] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-Results] >> test.py::test[blocks-struct_type--Results] [GOOD] >> test.py::test[case-case_then_else-default.txt-Debug] >> test.py::test[action-runtime_if_select-default.txt-Results] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Debug] >> test.py::test[flatten_by-flatten_member_is_struct--Analyze] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Debug] >> test.py::test[pg-dates_to_pg-default.txt-Results] [GOOD] >> test.py::test[pg-distinct_all_projection-default.txt-Debug] >> test.py::test[aggregate-group_by_ru_join_agg--Debug] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--ForceBlocks] >> test.py::test[action-evaluate_match_type-default.txt-Debug] [GOOD] >> test.py::test[action-evaluate_match_type-default.txt-Plan] [GOOD] >> test.py::test[action-evaluate_match_type-default.txt-Results] >> test.py::test[pg-tpcds-q75-default.txt-Debug] [GOOD] >> test.py::test[pg-select_where-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q75-default.txt-Plan] [GOOD] >> test.py::test[pg-select_where-default.txt-Debug] >> test.py::test[pg-tpcds-q75-default.txt-Results] >> test.py::test[order_by-tuple01-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases-count_null-default.txt-Debug] [GOOD] >> test.py::test[order_by-tuple01-default.txt-Debug] >> test.py::test[agg_phases-count_null-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-count_null-default.txt-Results] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-Results] >> test.py::test[in-in_with_opt_tuple-default.txt-Debug] [GOOD] >> test.py::test[in-in_with_opt_tuple-default.txt-Plan] [GOOD] >> test.py::test[in-in_with_opt_tuple-default.txt-Results] >> test.py::test[dq-blacklisted_pragmas--Debug] [GOOD] >> test.py::test[dq-blacklisted_pragmas--ForceBlocks] [SKIPPED] >> test.py::test[dq-blacklisted_pragmas--Plan] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Results] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Analyze] >> test.py::test[dq-blacklisted_pragmas--Results] [GOOD] >> test.py::test[dq-join_cbo_native_3_tables--Analyze] >> test.py::test[agg_apply-sum_decimal-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-sum_decimal-default.txt-Plan] >> test.py::test[library-forward_import--Debug] [GOOD] >> test.py::test[library-forward_import--Plan] [GOOD] >> test.py::test[library-forward_import--Results] >> test.py::test[key_filter-yql-8663-dedup_ranges--Debug] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--Plan] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--Results] >> test.py::test[action-export_action--Analyze] [GOOD] >> test.py::test[action-export_action--Debug] >> test.py::test[agg_apply-sum_decimal-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-sum_decimal-default.txt-Results] >> test.py::test[tpch-q16-default.txt-Results] [GOOD] >> test.py::test[tpch-q2-default.txt-Analyze] >> test.py::test[type_v3-mergejoin_with_sort--Debug] [GOOD] >> test.py::test[type_v3-mergejoin_with_sort--Plan] [GOOD] >> test.py::test[type_v3-mergejoin_with_sort--Results] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Analyze] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Debug] >> test.py::test[schema-select_all-schema-Analyze] [GOOD] >> test.py::test[schema-select_all-schema-Debug] >> test.py::test[join-pullup_inner--Debug] [GOOD] >> test.py::test[join-pullup_inner--Plan] [GOOD] >> test.py::test[join-pullup_inner--Results] >> test.py::test[schema-limit_simple--Debug] [GOOD] >> test.py::test[schema-limit_simple--Plan] [GOOD] >> test.py::test[schema-limit_simple--Results] >> test.py::test[expr-to_sorted_dict_vartuple_key-default.txt-Results] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-Plan] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-Results] >> test.py::test[optimizers-yql-5833-table_content--Results] [GOOD] >> test.py::test[join-yql-14829_leftonly--Results] [GOOD] >> test.py::test[join-yql-14829_leftonly-off-Debug] [SKIPPED] >> test.py::test[join-yql-14829_leftonly-off-Plan] [SKIPPED] >> test.py::test[join-yql-14829_leftonly-off-Results] [SKIPPED] >> test.py::test[join-yql-4275-off-Debug] [SKIPPED] >> test.py::test[join-yql-4275-off-Plan] [SKIPPED] >> test.py::test[join-yql-4275-off-Results] [SKIPPED] >> test.py::test[join-yql-6199-off-Debug] [SKIPPED] >> test.py::test[join-yql-6199-off-Plan] [SKIPPED] >> test.py::test[join-yql-6199-off-Results] [SKIPPED] >> test.py::test[json-combination/nested-default.txt-Debug] >> test.py::test[join-star_join_inners--Debug] [GOOD] >> test.py::test[join-star_join_inners--ForceBlocks] >> test.py::test[action-evaluate_match_type-default.txt-Results] [GOOD] >> test.py::test[action-evaluate_queries--Debug] >> test.py::test[key_filter-multiusage--Results] [GOOD] >> test.py::test[key_filter-no_bypass_merge--Analyze] >> test.py::test[order_by-order_by_tablepath_column--Debug] [GOOD] >> test.py::test[order_by-order_by_tablepath_column--Plan] [GOOD] >> test.py::test[order_by-order_by_tablepath_column--Results] >> test.py::test[expr-try_member--Debug] [SKIPPED] >> test.py::test[expr-try_member--Plan] [SKIPPED] >> test.py::test[expr-try_member--Results] [SKIPPED] >> test.py::test[expr-variant_tuple_comp-default.txt-Debug] >> test.py::test[pg-tpcds-q75-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-Debug] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Debug] >> test.py::test[blocks-date_less_or_equal_scalar--Results] [GOOD] >> test.py::test[pg-distinct_all_projection-default.txt-Debug] [GOOD] >> test.py::test[pg-distinct_all_projection-default.txt-Plan] [GOOD] >> test.py::test[pg-distinct_all_projection-default.txt-Results] >> test.py::test[key_filter-no_bypass_merge--Analyze] [SKIPPED] >> test.py::test[key_filter-no_bypass_merge--Debug] [SKIPPED] >> test.py::test[key_filter-no_bypass_merge--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-no_bypass_merge--Plan] [SKIPPED] >> test.py::test[key_filter-no_bypass_merge--Results] [SKIPPED] >> test.py::test[key_filter-string_with_legacy--Analyze] >> test.py::test[pg-select_win_max_null-default.txt-Results] [GOOD] >> test.py::test[pg-select_yql_type--Debug] >> test.py::test[flatten_by-flatten_member_is_struct--Debug] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--ForceBlocks] >> test.py::test[blocks-date_sub_interval--Debug] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Debug] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Plan] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Results] >> test.py::test[case-case_then_else-default.txt-Debug] [GOOD] >> test.py::test[case-case_then_else-default.txt-Plan] [GOOD] >> test.py::test[case-case_then_else-default.txt-Results] >> test.py::test[in-in_with_opt_tuple-default.txt-Results] [GOOD] >> test.py::test[insert-append-with_view-Debug] [SKIPPED] >> test.py::test[insert-append-with_view-Plan] [SKIPPED] >> test.py::test[insert-append-with_view-Results] [SKIPPED] >> test.py::test[insert-double_append_to_anonymous--Debug] >> test.py::test[agg_apply-sum_decimal-default.txt-Results] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-Results] [GOOD] >> test.py::test[agg_apply-sum_decimal_null-default.txt-Debug] >> test.py::test[select-hits_count--Debug] >> test.py::test[order_by-tuple01-default.txt-Debug] [GOOD] >> test.py::test[order_by-tuple01-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_count_and_avg-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_star1--Debug] >> test.py::test[library-forward_import--Results] [GOOD] >> test.py::test[library-library--Debug] >> test.py::test[union_all-mix_map_and_read-default.txt-Debug] [GOOD] >> test.py::test[union_all-mix_map_and_read-default.txt-Plan] [GOOD] >> test.py::test[union_all-mix_map_and_read-default.txt-Results] >> test.py::test[key_filter-yql-8663-dedup_ranges--Results] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-Debug] >> test.py::test[produce-fuse_reduces_with_presort--Debug] [GOOD] >> test.py::test[produce-fuse_reduces_with_presort--Plan] [GOOD] >> test.py::test[produce-fuse_reduces_with_presort--Results] >> test.py::test[pg-select_where-default.txt-Debug] [GOOD] >> test.py::test[pg-select_where-default.txt-ForceBlocks] >> test.py::test[tpch-q20-default.txt-Results] [GOOD] >> test.py::test[tpch-q9-default.txt-Analyze] >> test.py::test[weak_field-weak_field_to_yson--Analyze] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Debug] >> test.py::test[dq-join_cbo_native_3_tables--Analyze] [GOOD] >> test.py::test[dq-join_cbo_native_3_tables--Debug] >> test.py::test[schema-limit_simple--Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-Debug] >> test.py::test[action-export_action--Debug] [GOOD] >> test.py::test[action-export_action--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_agg--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--Plan] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--Results] >> test.py::test[schema-select_all-schema-Debug] [GOOD] >> test.py::test[order_by-order_by_tablepath_column--Results] [GOOD] >> test.py::test[order_by-ordered_fill--Debug] >> test.py::test[pg-distinct_all_projection-default.txt-Results] [GOOD] >> test.py::test[pg-drop_table--Debug] >> test.py::test[action-evaluate_queries--Debug] [GOOD] >> test.py::test[action-evaluate_queries--Plan] [GOOD] >> test.py::test[action-evaluate_queries--Results] >> test.py::test[schema-select_all-schema-ForceBlocks] >> test.py::test[json-combination/nested-default.txt-Debug] [GOOD] >> test.py::test[json-combination/nested-default.txt-Plan] [GOOD] >> test.py::test[json-combination/nested-default.txt-Results] >> test.py::test[case-case_then_else-default.txt-Results] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Debug] >> test.py::test[type_v3-mergejoin_with_sort--Results] [GOOD] >> test.py::test[udf-udaf_default-default.txt-Debug] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Debug] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Plan] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Results] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys--Analyze] >> test.py::test[join-pullup_inner--Results] [GOOD] >> test.py::test[expr-variant_tuple_comp-default.txt-Debug] [GOOD] >> test.py::test[expr-variant_tuple_comp-default.txt-Plan] [GOOD] >> test.py::test[expr-variant_tuple_comp-default.txt-Results] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Results] [GOOD] >> test.py::test[action-subquery_accessnode-default.txt-Debug] >> test.py::test[key_filter-string_with_legacy--Analyze] [GOOD] >> test.py::test[key_filter-string_with_legacy--Debug] >> test.py::test[tpch-q2-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q2-default.txt-Debug] >> test.py::test[join-pullup_inner-off-Debug] [SKIPPED] >> test.py::test[join-pullup_inner-off-Plan] [SKIPPED] >> test.py::test[join-pullup_inner-off-Results] [SKIPPED] >> test.py::test[join-pullup_rownumber--Debug] >> test.py::test[pg-tpcds-q76-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-Results] |73.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp >> test.py::test[aggregate-group_by_cube_grouping--Debug] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping--Plan] [GOOD] >> test.py::test[aggregate-group_by_cube_grouping--Results] >> test.py::test[agg_apply-sum_decimal_null-default.txt-Debug] [GOOD] |73.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a >> test.py::test[union_all-mix_map_and_read-default.txt-Results] [GOOD] >> test.py::test[agg_apply-sum_decimal_null-default.txt-Plan] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-Results] [GOOD] >> test.py::test[agg_apply-sum_decimal_null-default.txt-Results] >> test.py::test[union_all-union_all_fields-default.txt-Debug] >> test.py::test[pg-select_subquery_scalar2_qstar-default.txt-Analyze] >> test.py::test[join-star_join_inners--ForceBlocks] [GOOD] >> test.py::test[pg-select_yql_type--Debug] [GOOD] >> test.py::test[library-library--Debug] [GOOD] >> test.py::test[order_by-tuple01-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners--Plan] [GOOD] >> test.py::test[pg-select_yql_type--Plan] [GOOD] >> test.py::test[pg-select_where-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--ForceBlocks] [GOOD] >> test.py::test[library-library--Plan] [GOOD] >> test.py::test[order_by-tuple01-default.txt-Plan] >> test.py::test[join-star_join_inners--Results] >> test.py::test[action-evaluate_queries--Results] [GOOD] >> test.py::test[library-library--Results] >> test.py::test[pg-select_yql_type--Results] >> test.py::test[schema-select_all-row_spec_diff_sort2-Debug] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Debug] [GOOD] >> test.py::test[dq-join_cbo_native_3_tables--Debug] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Plan] [GOOD] >> test.py::test[order_by-tuple01-default.txt-Plan] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--ForceBlocks] >> test.py::test[pg-select_where-default.txt-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] >> test.py::test[action-export_action--Debug] >> test.py::test[dq-join_cbo_native_3_tables--ForceBlocks] >> test.py::test[pg-select_where-default.txt-Results] >> test.py::test[order_by-tuple01-default.txt-Results] >> test.py::test[tpch-q9-default.txt-Analyze] [GOOD] >> test.py::test[json-combination/nested-default.txt-Results] [GOOD] >> test.py::test[action-export_action--ForceBlocks] [GOOD] >> test.py::test[dq-join_cbo_native_3_tables--ForceBlocks] [SKIPPED] >> test.py::test[lambda-lambda_simple-default.txt-Debug] [GOOD] >> test.py::test[expr-variant_tuple_comp-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-Results] [GOOD] >> test.py::test[insert-double_append_to_anonymous--Debug] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Results] >> test.py::test[tpch-q9-default.txt-Debug] >> test.py::test[pg-tpcds-q90-default.txt-Debug] >> test.py::test[json-json_value/on_error_cast_value_exception--Debug] [SKIPPED] >> test.py::test[dq-join_cbo_native_3_tables--Plan] [GOOD] >> test.py::test[dq-join_cbo_native_3_tables--Results] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-Plan] [GOOD] >> test.py::test[lambda-lambda_simple-default.txt-Results] >> test.py::test[flexible_types-group_by-default.txt-Debug] [SKIPPED] >> test.py::test[insert-double_append_to_anonymous--Plan] [GOOD] >> test.py::test[insert-double_append_to_anonymous--Results] >> test.py::test[action-export_action--Plan] [GOOD] >> test.py::test[json-json_value/on_error_cast_value_exception--Plan] [SKIPPED] >> test.py::test[json-json_value/on_error_cast_value_exception--Results] >> test.py::test[dq-precompute_parallel--Analyze] >> test.py::test[action-export_action--Results] >> test.py::test[agg_phases-count_null-default.txt-Results] [GOOD] >> test.py::test[agg_phases-min_null-default.txt-Debug] >> test.py::test[action-subquery_accessnode-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-sum_decimal_null-default.txt-Results] [GOOD] >> test.py::test[agg_phases-avg_opt-default.txt-Debug] >> test.py::test[action-subquery_accessnode-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_accessnode-default.txt-Results] >> test.py::test[udf-udaf_default-default.txt-Debug] [GOOD] >> test.py::test[udf-udaf_default-default.txt-Plan] [GOOD] >> test.py::test[udf-udaf_default-default.txt-Results] >> test.py::test[flexible_types-group_by-default.txt-Plan] [SKIPPED] >> test.py::test[schema-select_all-schema-ForceBlocks] [GOOD] >> test.py::test[flexible_types-group_by-default.txt-Results] [SKIPPED] >> test.py::test[hor_join-filters--Debug] >> test.py::test[schema-select_all-schema-Plan] [GOOD] >> test.py::test[schema-select_all-schema-Results] |73.6%| [AR] {RESULT} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a >> test.py::test[coalesce-coalesce_few_real-default.txt-Debug] [GOOD] >> test.py::test[library-library--Results] [GOOD] >> test.py::test[blocks-date_sub_interval--Debug] [GOOD] >> test.py::test[blocks-date_sub_interval--Plan] [GOOD] >> test.py::test[blocks-date_sub_interval--Results] >> test.py::test[pg-drop_table--Debug] [GOOD] >> test.py::test[pg-drop_table--Plan] [GOOD] >> test.py::test[pg-drop_table--Results] >> test.py::test[coalesce-coalesce_few_real-default.txt-Plan] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] >> test.py::test[limit-sort_calc_limit--Debug] >> test.py::test[join-mapjoin_partial_uniq_keys--Analyze] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys--Debug] >> test.py::test[pg-select_subquery_scalar2_qstar-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_subquery_scalar2_qstar-default.txt-Debug] >> test.py::test[order_by-tuple01-default.txt-Results] [GOOD] >> test.py::test[pg-dates_from_pg-default.txt-Analyze] >> test.py::test[order_by-ordered_fill--Debug] [GOOD] >> test.py::test[order_by-ordered_fill--Plan] |73.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a >> test.py::test[pg-select_where-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_ntile-default.txt-Analyze] >> test.py::test[key_filter-string_with_legacy--Debug] [GOOD] >> test.py::test[key_filter-string_with_legacy--ForceBlocks] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-Debug] >> test.py::test[order_by-ordered_fill--Plan] [GOOD] >> test.py::test[order_by-ordered_fill--Results] |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp >> test.py::test[action-subquery_accessnode-default.txt-Results] [GOOD] >> test.py::test[action-subquery_assumeorderby-default.txt-Debug] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Debug] [GOOD] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Plan] [GOOD] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Results] >> test.py::test[distinct-distinct_star1--Debug] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--ForceBlocks] [GOOD] >> test.py::test[select-hits_count--Debug] [GOOD] >> test.py::test[distinct-distinct_star1--Plan] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Plan] [GOOD] >> test.py::test[select-hits_count--Plan] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Results] >> test.py::test[udf-udaf_default-default.txt-Results] [GOOD] >> test.py::test[union-union_column_extention-default.txt-Debug] >> test.py::test[schema-select_all-schema-Results] [GOOD] >> test.py::test[schema-select_field-row_spec-Analyze] >> test.py::test[dq-precompute_parallel--Analyze] [GOOD] >> test.py::test[flatten_by-flatten_member_is_struct--Results] [GOOD] >> test.py::test[dq-precompute_parallel--Debug] >> test.py::test[json-json_value/on_error_cast_value_exception--Results] [GOOD] >> test.py::test[hor_join-double_input-default.txt-Analyze] >> test.py::test[json-json_value/returning-default.txt-Debug] >> test.py::test[select-hits_count--Results] >> test.py::test[distinct-distinct_star1--Results] >> test.py::test[lambda-lambda_simple-default.txt-Results] [GOOD] >> test.py::test[like-regexp_clause--Debug] >> test.py::test[pg-select_yql_type--Results] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_having_exists-default.txt-Debug] >> test.py::test[union_all-union_all_fields-default.txt-Plan] [GOOD] >> test.py::test[union_all-union_all_fields-default.txt-Results] >> test.py::test[insert-double_append_to_anonymous--Results] [GOOD] >> test.py::test[insert-drop_sortness-desc-Debug] >> test.py::test[tpch-q2-default.txt-Debug] [GOOD] >> test.py::test[tpch-q2-default.txt-ForceBlocks] >> test.py::test[join-pullup_rownumber--Debug] [GOOD] >> test.py::test[join-pullup_rownumber--Plan] [GOOD] >> test.py::test[join-pullup_rownumber--Results] >> test.py::test[pg-tpcds-q90-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q90-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q90-default.txt-Results] >> test.py::test[pg-drop_table--Results] [GOOD] >> test.py::test[pg-join_using4-default.txt-Debug] >> test.py::test[pg-dates_from_pg-default.txt-Analyze] [GOOD] >> test.py::test[action-export_action--Results] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] [GOOD] >> test.py::test[pg-dates_from_pg-default.txt-Debug] >> test.py::test[action-lambda_arg_count-default.txt-Analyze] >> test.py::test[column_group-groups-single-Debug] [SKIPPED] >> test.py::test[column_group-groups-single-Plan] [SKIPPED] >> test.py::test[column_group-groups-single-Results] [SKIPPED] >> test.py::test[column_group-hint-single-Debug] [SKIPPED] >> test.py::test[column_group-hint-single-Plan] [SKIPPED] >> test.py::test[column_group-hint-single-Results] [SKIPPED] >> test.py::test[column_group-hint_non_map_yson_fail--Debug] >> test.py::test[pg-select_subquery_scalar2_qstar-default.txt-Debug] [GOOD] >> test.py::test[pg-select_subquery_scalar2_qstar-default.txt-ForceBlocks] >> test.py::test[agg_phases-min_null-default.txt-Debug] [GOOD] >> test.py::test[column_group-hint_non_map_yson_fail--Debug] [SKIPPED] >> test.py::test[column_group-hint_non_map_yson_fail--Plan] [SKIPPED] >> test.py::test[column_group-hint_non_map_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-join--Debug] [SKIPPED] >> test.py::test[column_order-join--Plan] [SKIPPED] >> test.py::test[column_order-join--Results] [SKIPPED] >> test.py::test[column_order-winfunc-default.txt-Debug] >> test.py::test[agg_phases-min_null-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-min_null-default.txt-Results] >> test.py::test[schema-select_all-row_spec_extra_sort-Debug] [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] >> test.py::test[pg-select_win_ntile-default.txt-Analyze] [GOOD] >> test.py::test[pg-select_win_ntile-default.txt-Debug] >> test.py::test[agg_phases-avg_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-avg_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-avg_opt-default.txt-Results] >> test.py::test[join-mapjoin_partial_uniq_keys--Debug] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys--ForceBlocks] >> test.py::test[hor_join-filters--Debug] [GOOD] >> test.py::test[hor_join-filters--Plan] [GOOD] >> test.py::test[hor_join-filters--Results] >> test.py::test[aggregate-group_by_ru_join_agg--Results] [GOOD] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--Analyze] >> test.py::test[order_by-ordered_fill--Results] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Debug] >> test.py::test[tpch-q9-default.txt-Debug] [GOOD] >> test.py::test[tpch-q9-default.txt-ForceBlocks] >> test.py::test[hor_join-double_input-default.txt-Analyze] [GOOD] >> test.py::test[hor_join-double_input-default.txt-Debug] >> test.py::test[weak_field-weak_field_to_yson--Results] [GOOD] >> test.py::test[window-full/aggregations_compact--Analyze] >> test.py::test[action-export_action--Debug] [GOOD] >> test.py::test[action-export_action--Plan] [GOOD] >> test.py::test[action-export_action--Results] >> test.py::test[action-subquery_assumeorderby-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_assumeorderby-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_assumeorderby-default.txt-Results] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--Analyze] [SKIPPED] >> test.py::test[join-star_join_inners--Results] [GOOD] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--Debug] [SKIPPED] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--ForceBlocks] [SKIPPED] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--Plan] [SKIPPED] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--Results] >> test.py::test[limit-sort_calc_limit--Debug] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-Analyze] >> test.py::test[union-union_column_extention-default.txt-Debug] [GOOD] >> test.py::test[union-union_column_extention-default.txt-Plan] [GOOD] >> test.py::test[union-union_column_extention-default.txt-Results] >> test.py::test[schema-select_field-row_spec-Analyze] [GOOD] >> test.py::test[schema-select_field-row_spec-Debug] >> test.py::test[aggregate-group_by_with_udf_by_aggregate--Results] [SKIPPED] >> test.py::test[ansi_idents-inplace_yql-default.txt-Analyze] >> test.py::test[limit-sort_calc_limit--Plan] [GOOD] >> test.py::test[limit-sort_calc_limit--Results] >> test.py::test[pg-tpcds-q90-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Debug] >> test.py::test[key_filter-string_with_legacy--ForceBlocks] [GOOD] >> test.py::test[json-json_value/returning-default.txt-Debug] [GOOD] >> test.py::test[json-json_value/returning-default.txt-Plan] >> test.py::test[distinct-distinct_star1--Results] [GOOD] >> test.py::test[dq-dq_replicate_ok-default.txt-Debug] [SKIPPED] >> test.py::test[dq-dq_replicate_ok-default.txt-Plan] [SKIPPED] >> test.py::test[dq-dq_replicate_ok-default.txt-Results] [SKIPPED] >> test.py::test[union_all-union_all_fields-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_having_exists-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_trivial-default.txt-Debug] >> test.py::test[pg-sublink_having_exists-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_having_exists-default.txt-Results] >> test.py::test[blocks-date_sub_interval--Results] [GOOD] >> test.py::test[key_filter-string_with_legacy--Plan] [GOOD] >> test.py::test[json-json_value/returning-default.txt-Plan] [GOOD] >> test.py::test[json-json_value/returning-default.txt-Results] >> test.py::test[key_filter-string_with_legacy--Results] >> test.py::test[action-lambda_arg_count-default.txt-Analyze] [GOOD] >> test.py::test[pg-dates_from_pg-default.txt-Debug] [GOOD] >> test.py::test[pg-dates_from_pg-default.txt-ForceBlocks] >> test.py::test[action-lambda_arg_count-default.txt-Debug] >> test.py::test[dq-mem_limit--Debug] [SKIPPED] >> test.py::test[dq-mem_limit--Plan] [SKIPPED] >> test.py::test[dq-mem_limit--Results] [SKIPPED] >> test.py::test[dq-precompute_parallel_mix--Debug] [SKIPPED] >> test.py::test[dq-precompute_parallel_mix--Plan] [SKIPPED] >> test.py::test[dq-precompute_parallel_mix--Results] [SKIPPED] >> test.py::test[expr-as_dict_implicit_cast-default.txt-Debug] >> test.py::test[blocks-decimal_op_decimal_scalar--Debug] >> test.py::test[select-hits_count--Results] [GOOD] >> test.py::test[select-literal_bool-default.txt-Debug] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Debug] >> test.py::test[pg-select_subquery_scalar2_qstar-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_subquery_scalar2_qstar-default.txt-Plan] [GOOD] >> test.py::test[insert-drop_sortness-desc-Debug] [GOOD] >> test.py::test[pg-select_subquery_scalar2_qstar-default.txt-Results] >> test.py::test[insert-drop_sortness-desc-Plan] [GOOD] >> test.py::test[insert-drop_sortness-desc-Results] >> test.py::test[pg-join_using4-default.txt-Debug] [GOOD] >> test.py::test[pg-join_using4-default.txt-Plan] [GOOD] >> test.py::test[pg-join_using4-default.txt-Results] >> test.py::test[blocks-date_less_scalar--Debug] [GOOD] >> test.py::test[blocks-date_less_scalar--Plan] [GOOD] >> test.py::test[blocks-date_less_scalar--Results] >> test.py::test[like-regexp_clause--Debug] [GOOD] >> test.py::test[like-regexp_clause--Plan] [GOOD] >> test.py::test[like-regexp_clause--Results] >> test.py::test[dq-precompute_parallel--Debug] [GOOD] >> test.py::test[dq-precompute_parallel--ForceBlocks] [SKIPPED] >> test.py::test[dq-precompute_parallel--Plan] [GOOD] >> test.py::test[dq-precompute_parallel--Results] [GOOD] >> test.py::test[dq-wrong_script--Analyze] [SKIPPED] >> test.py::test[dq-wrong_script--Debug] [SKIPPED] >> test.py::test[dq-wrong_script--ForceBlocks] >> test.py::test[action-subquery_assumeorderby-default.txt-Results] [GOOD] >> test.py::test[action-subquery_opt_args-default.txt-Debug] >> test.py::test[dq-wrong_script--ForceBlocks] [SKIPPED] >> test.py::test[dq-wrong_script--Plan] [SKIPPED] >> test.py::test[dq-wrong_script--Results] [SKIPPED] >> test.py::test[expr-cast_dynumber-default.txt-Analyze] >> test.py::test[join-pullup_rownumber--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_left-off-Debug] [SKIPPED] >> test.py::test[join-pushdown_filter_over_left-off-Plan] [SKIPPED] >> test.py::test[join-pushdown_filter_over_left-off-Results] >> test.py::test[hor_join-filters--Results] [GOOD] >> test.py::test[hor_join-out_sampling--Debug] >> test.py::test[pg-sublink_having_exists-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Debug] >> test.py::test[join-pushdown_filter_over_left-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted_with_rename--Debug] >> test.py::test[union-union_column_extention-default.txt-Results] [GOOD] >> test.py::test[union-union_positional_mix-default.txt-Debug] >> test.py::test[pg-select_win_ntile-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_ntile-default.txt-ForceBlocks] >> test.py::test[json-json_value/returning-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-8223_direct_row_and_skipnullmembers--Results] [GOOD] >> test.py::test[optimizers-yson_dup_serialize--Debug] >> test.py::test[json-jsondocument/select--Debug] >> test.py::test[action-export_action--Results] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-Analyze] [GOOD] >> test.py::test[ansi_idents-inplace_yql-default.txt-Analyze] [GOOD] >> test.py::test[ansi_idents-inplace_yql-default.txt-Debug] >> test.py::test[action-runtime_for_select-default.txt-Debug] >> test.py::test[join-star_join_inners_vk_sorted-off-Debug] >> test.py::test[tpch-q2-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q2-default.txt-Plan] [GOOD] >> test.py::test[tpch-q2-default.txt-Results] >> test.py::test[join-mapjoin_partial_uniq_keys--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys--Plan] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys--Results] >> test.py::test[pg-select_subquery_scalar2_qstar-default.txt-Results] [GOOD] >> test.py::test[insert-drop_sortness-desc-Results] [GOOD] >> test.py::test[insert-keepmeta-view_fail-Debug] [SKIPPED] >> test.py::test[pg-set_of_as_records--Analyze] [SKIPPED] >> test.py::test[pg-set_of_as_records--Debug] [SKIPPED] >> test.py::test[pg-set_of_as_records--ForceBlocks] [SKIPPED] >> test.py::test[pg-set_of_as_records--Plan] [SKIPPED] >> test.py::test[pg-set_of_as_records--Results] [SKIPPED] >> test.py::test[pg-sort_nulls_priority_window-default.txt-Analyze] >> test.py::test[schema-select_field-row_spec-Debug] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Debug] [GOOD] >> test.py::test[schema-select_field-row_spec-ForceBlocks] >> test.py::test[pg-tpcds-q93-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Results] >> test.py::test[hor_join-double_input-default.txt-Debug] [GOOD] >> test.py::test[hor_join-double_input-default.txt-ForceBlocks] >> test.py::test[window-full/aggregations_compact--Analyze] [GOOD] >> test.py::test[window-full/aggregations_compact--Debug] >> test.py::test[union_all-union_all_trivial-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_trivial-default.txt-Plan] >> test.py::test[insert-keepmeta-view_fail-Plan] [SKIPPED] >> test.py::test[insert-keepmeta-view_fail-Results] >> test.py::test[action-lambda_arg_count-default.txt-Debug] [GOOD] >> test.py::test[action-lambda_arg_count-default.txt-ForceBlocks] >> test.py::test[union_all-union_all_trivial-default.txt-Plan] [GOOD] >> test.py::test[limit-sort_calc_limit--Results] [GOOD] >> test.py::test[lineage-reduce_all-default.txt-Debug] [SKIPPED] >> test.py::test[union_all-union_all_trivial-default.txt-Results] >> test.py::test[lineage-reduce_all-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-reduce_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-reduce_all_row-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-reduce_all_row-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-reduce_all_row-default.txt-Results] [SKIPPED] >> test.py::test[match_recognize-test_type-default.txt-Debug] >> test.py::test[like-regexp_clause--Results] [GOOD] >> test.py::test[limit-dynamic_sort_limit--Debug] [SKIPPED] >> test.py::test[limit-dynamic_sort_limit--Plan] [SKIPPED] >> test.py::test[limit-dynamic_sort_limit--Results] [SKIPPED] >> test.py::test[limit-empty_input_after_limit-default.txt-Debug] >> TBtreeIndexTPartLarge::CutKeys [GOOD] >> TBtreeIndexTPartLarge::Group >> test.py::test[pg-join_using4-default.txt-Results] [GOOD] >> test.py::test[expr-as_dict_implicit_cast-default.txt-Debug] [GOOD] >> test.py::test[expr-as_dict_implicit_cast-default.txt-Plan] [GOOD] >> test.py::test[expr-as_dict_implicit_cast-default.txt-Results] >> test.py::test[pg-dates_from_pg-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_full-default.txt-Debug] >> test.py::test[pg-dates_from_pg-default.txt-Plan] [GOOD] >> test.py::test[pg-dates_from_pg-default.txt-Results] >> test.py::test[order_by-presort_order_by_table-default.txt-Debug] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Plan] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] >> test.py::test[key_filter-string_with_legacy--Results] [GOOD] >> test.py::test[lambda-lambda_no_arg-default.txt-Analyze] >> test.py::test[blocks-decimal_op_decimal_scalar--Debug] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--Plan] [GOOD] >> test.py::test[blocks-decimal_op_decimal_scalar--Results] >> test.py::test[expr-cast_dynumber-default.txt-Analyze] [GOOD] >> test.py::test[expr-cast_dynumber-default.txt-Debug] >> test.py::test[column_order-winfunc-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_opt_args-default.txt-Debug] [GOOD] >> test.py::test[column_order-winfunc-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_opt_args-default.txt-Plan] [GOOD] >> test.py::test[column_order-winfunc-default.txt-Results] >> test.py::test[action-subquery_opt_args-default.txt-Results] >> test.py::test[tpch-q9-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q9-default.txt-Plan] [GOOD] >> test.py::test[tpch-q9-default.txt-Results] >> test.py::test[pg-tpcds-q93-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q97-default.txt-Debug] >> test.py::test[select-literal_bool-default.txt-Debug] [GOOD] >> test.py::test[select-literal_bool-default.txt-Plan] >> test.py::test[join-mapjoin_partial_uniq_keys--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique-off-Analyze] >> test.py::test[join-selfjoin_on_sorted_with_rename--Debug] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename--Plan] [GOOD] >> test.py::test[join-selfjoin_on_sorted_with_rename--Results] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Debug] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Plan] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] >> test.py::test[expr-as_dict_implicit_cast-default.txt-Results] [GOOD] >> test.py::test[pg-dates_from_pg-default.txt-Results] [GOOD] >> test.py::test[expr-backtick_escape-default.txt-Debug] >> test.py::test[select-literal_bool-default.txt-Plan] [GOOD] >> test.py::test[select-literal_bool-default.txt-Results] >> test.py::test[union_all-union_all_trivial-default.txt-Results] [GOOD] >> test.py::test[view-all_from_view--Debug] >> test.py::test[union-union_positional_mix-default.txt-Debug] [GOOD] >> test.py::test[union-union_positional_mix-default.txt-Plan] [GOOD] >> test.py::test[union-union_positional_mix-default.txt-Results] >> test.py::test[aggregate-group_by_cube_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_distinct--Debug] [SKIPPED] >> test.py::test[aggregate-group_by_hop_distinct--Plan] [SKIPPED] >> test.py::test[aggregate-group_by_hop_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_distinct_compact--Debug] [SKIPPED] >> test.py::test[aggregate-group_by_hop_distinct_compact--Plan] [SKIPPED] >> test.py::test[aggregate-group_by_hop_distinct_compact--Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_column_reuse--Debug] >> test.py::test[pg-distinct_all_projection-default.txt-Analyze] >> test.py::test[json-jsondocument/select--Debug] [GOOD] >> test.py::test[json-jsondocument/select--Plan] [GOOD] >> test.py::test[json-jsondocument/select--Results] >> test.py::test[action-runtime_for_select-default.txt-Debug] [GOOD] >> test.py::test[ansi_idents-inplace_yql-default.txt-Debug] [GOOD] >> test.py::test[action-runtime_for_select-default.txt-Plan] [GOOD] >> test.py::test[ansi_idents-inplace_yql-default.txt-ForceBlocks] >> test.py::test[action-runtime_for_select-default.txt-Results] >> test.py::test[pg-tpcds-q02-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Results] >> test.py::test[pg-sort_nulls_priority_window-default.txt-Analyze] [GOOD] >> test.py::test[pg-sort_nulls_priority_window-default.txt-Debug] >> test.py::test[optimizers-yson_dup_serialize--Debug] [GOOD] >> test.py::test[optimizers-yson_dup_serialize--Plan] [GOOD] >> test.py::test[optimizers-yson_dup_serialize--Results] >> test.py::test[action-subquery_opt_args-default.txt-Results] [GOOD] >> test.py::test[action-subquery_orderby0-default.txt-Debug] >> test.py::test[schema-select_field-row_spec-ForceBlocks] [GOOD] >> test.py::test[schema-select_field-row_spec-Plan] [GOOD] >> test.py::test[schema-select_field-row_spec-Results] >> test.py::test[match_recognize-test_type-default.txt-Debug] [GOOD] >> test.py::test[match_recognize-test_type-default.txt-Plan] >> test.py::test[insert-keepmeta-view_fail-Results] [GOOD] >> test.py::test[insert-select_relabel-default.txt-Debug] >> test.py::test[join-join_comp_common_table--Debug] [GOOD] >> test.py::test[join-join_comp_common_table--Plan] [GOOD] >> test.py::test[action-lambda_arg_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-lambda_arg_count-default.txt-Plan] [GOOD] >> test.py::test[action-lambda_arg_count-default.txt-Results] >> test.py::test[match_recognize-test_type-default.txt-Plan] [GOOD] >> test.py::test[match_recognize-test_type-default.txt-Results] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] [GOOD] >> test.py::test[params-no_optional_param-default.txt-Debug] >> test.py::test[join-join_comp_common_table--Results] >> test.py::test[pg-select_win_ntile-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_win_ntile-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_ntile-default.txt-Results] >> test.py::test[join-star_join_inners_vk_sorted-off-Debug] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-ForceBlocks] >> test.py::test[blocks-decimal_op_decimal_scalar--Results] [GOOD] >> test.py::test[blocks-minmax_strings_filter--Debug] >> test.py::test[hor_join-double_input-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-double_input-default.txt-Plan] [GOOD] >> test.py::test[hor_join-double_input-default.txt-Results] >> test.py::test[pg-order_by_agg_input_columns_full-default.txt-Debug] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_full-default.txt-Plan] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_full-default.txt-Results] >> test.py::test[join-star_join_inners_vk_sorted-off-ForceBlocks] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted-off-Plan] [GOOD] >> test.py::test[join-star_join_inners_vk_sorted-off-Results] [GOOD] >> test.py::test[join-strict_keys-off-Analyze] [SKIPPED] >> test.py::test[join-strict_keys-off-Debug] [SKIPPED] >> test.py::test[join-strict_keys-off-ForceBlocks] [SKIPPED] >> test.py::test[join-strict_keys-off-Plan] [SKIPPED] >> test.py::test[join-strict_keys-off-Results] >> test.py::test[join-selfjoin_on_sorted_with_rename--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap-off-Debug] [SKIPPED] >> test.py::test[join-star_join_semionly_premap-off-Plan] [SKIPPED] >> test.py::test[join-star_join_semionly_premap-off-Results] [SKIPPED] >> test.py::test[join-three_equalities_paren--Debug] >> test.py::test[produce-fuse_reduces_with_presort--Results] [GOOD] >> test.py::test[produce-process_multi_in_single_out--Debug] [SKIPPED] >> test.py::test[expr-cast_dynumber-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_dynumber-default.txt-ForceBlocks] >> test.py::test[lambda-lambda_no_arg-default.txt-Analyze] [GOOD] >> test.py::test[lambda-lambda_no_arg-default.txt-Debug] >> test.py::test[agg_phases-min_null-default.txt-Results] [GOOD] >> test.py::test[agg_phases-min_opt-default.txt-Debug] >> test.py::test[json-jsondocument/select--Results] [GOOD] >> test.py::test[select-literal_bool-default.txt-Results] [GOOD] >> test.py::test[key_filter-contains-default.txt-Debug] >> test.py::test[select-missing_with_nonpersist--Debug] [SKIPPED] >> test.py::test[select-missing_with_nonpersist--Plan] [SKIPPED] >> test.py::test[select-missing_with_nonpersist--Results] >> test.py::test[union-union_positional_mix-default.txt-Results] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-Debug] >> test.py::test[produce-process_multi_in_single_out--Plan] [SKIPPED] >> test.py::test[produce-process_multi_in_single_out--Results] [SKIPPED] >> test.py::test[produce-process_with_python-default.txt-Debug] [SKIPPED] >> test.py::test[produce-process_with_python-default.txt-Plan] [SKIPPED] >> test.py::test[produce-process_with_python-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_all_expr-default.txt-Debug] [SKIPPED] >> test.py::test[produce-reduce_all_expr-default.txt-Plan] [SKIPPED] >> test.py::test[produce-reduce_all_expr-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Debug] >> test.py::test[pg-tpcds-q97-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q97-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q97-default.txt-Results] >> test.py::test[select-missing_with_nonpersist--Results] [SKIPPED] >> test.py::test[action-runtime_for_select-default.txt-Results] [GOOD] >> test.py::test[select-result_label-default.txt-Debug] >> test.py::test[action-runtime_format_type-default.txt-Debug] >> test.py::test[join-mergejoin_big_primary_unique-off-Analyze] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique-off-Debug] >> test.py::test[pg-tpcds-q19-default.txt-Debug] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Debug] >> test.py::test[optimizers-yson_dup_serialize--Results] [GOOD] >> test.py::test[order_by-native_desc_sort_calc--Debug] [SKIPPED] >> test.py::test[order_by-native_desc_sort_calc--Plan] [SKIPPED] >> test.py::test[order_by-native_desc_sort_calc--Results] [SKIPPED] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Debug] >> test.py::test[expr-backtick_escape-default.txt-Debug] [GOOD] >> test.py::test[action-lambda_arg_count-default.txt-Results] [GOOD] >> test.py::test[expr-backtick_escape-default.txt-Plan] [GOOD] >> test.py::test[expr-backtick_escape-default.txt-Results] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Analyze] >> test.py::test[schema-select_field-row_spec-Results] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Analyze] >> test.py::test[column_order-winfunc-default.txt-Results] [GOOD] >> test.py::test[compute_range-in2-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-in2-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-in2-default.txt-Results] [SKIPPED] >> test.py::test[tpch-q2-default.txt-Results] [GOOD] >> test.py::test[pg-distinct_all_projection-default.txt-Analyze] [GOOD] >> test.py::test[type_v3-non_strict--Analyze] >> test.py::test[pg-distinct_all_projection-default.txt-Debug] >> test.py::test[compute_range-merge_adjacent-default.txt-Debug] [SKIPPED] >> test.py::test[ansi_idents-inplace_yql-default.txt-ForceBlocks] [GOOD] >> test.py::test[compute_range-merge_adjacent-default.txt-Plan] [SKIPPED] >> test.py::test[ansi_idents-inplace_yql-default.txt-Plan] [GOOD] >> test.py::test[compute_range-merge_adjacent-default.txt-Results] [SKIPPED] >> test.py::test[ansi_idents-inplace_yql-default.txt-Results] >> test.py::test[compute_range-preserve_rest_predicates_order-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-preserve_rest_predicates_order-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-preserve_rest_predicates_order-default.txt-Results] [SKIPPED] >> test.py::test[csee-closure_l2-default.txt-Debug] >> test.py::test[limit-empty_input_after_limit-default.txt-Debug] [GOOD] >> test.py::test[limit-empty_input_after_limit-default.txt-Plan] >> test.py::test[action-subquery_orderby0-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_orderby0-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_orderby0-default.txt-Results] >> test.py::test[limit-empty_input_after_limit-default.txt-Plan] [GOOD] >> test.py::test[limit-empty_input_after_limit-default.txt-Results] >> test.py::test[tpch-q9-default.txt-Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs2--Analyze] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2--Debug] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2--ForceBlocks] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2--Plan] [SKIPPED] >> test.py::test[udf-named_args_for_script_with_posargs2--Results] [SKIPPED] >> test.py::test[udf-python_script--Analyze] [SKIPPED] >> test.py::test[udf-python_script--Debug] [SKIPPED] >> test.py::test[udf-python_script--ForceBlocks] [SKIPPED] >> test.py::test[udf-python_script--Plan] [SKIPPED] >> test.py::test[udf-python_script--Results] [SKIPPED] >> test.py::test[weak_field-few_source_different_columns--Analyze] >> test.py::test[pg-order_by_agg_input_columns_full-default.txt-Results] [GOOD] >> test.py::test[pg-pg_array_compare-default.txt-Debug] >> test.py::test[window-full/aggregations_compact--Debug] [GOOD] >> test.py::test[window-full/aggregations_compact--ForceBlocks] >> test.py::test[view-all_from_view--Debug] [GOOD] >> test.py::test[view-all_from_view--Plan] [GOOD] >> test.py::test[view-all_from_view--Results] >> test.py::test[params-no_optional_param-default.txt-Debug] [GOOD] >> test.py::test[params-no_optional_param-default.txt-Plan] [GOOD] >> test.py::test[params-no_optional_param-default.txt-Results] >> test.py::test[hor_join-double_input-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_ntile-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Analyze] >> test.py::test[hor_join-fuse_multi_outs1--Analyze] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1--Debug] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1--Plan] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs1--Results] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2-outlimit-Analyze] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2-outlimit-Debug] [SKIPPED] >> test.py::test[insert-select_relabel-default.txt-Debug] [GOOD] >> test.py::test[match_recognize-test_type-default.txt-Results] [GOOD] >> test.py::test[optimizers-coalesce_propagate-default.txt-Debug] >> test.py::test[insert-select_relabel-default.txt-Plan] [GOOD] >> test.py::test[insert-select_relabel-default.txt-Results] >> test.py::test[hor_join-fuse_multi_outs2-outlimit-ForceBlocks] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2-outlimit-Plan] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-less_outs--Analyze] [SKIPPED] >> test.py::test[hor_join-less_outs--Debug] [SKIPPED] >> test.py::test[hor_join-less_outs--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-less_outs--Plan] [SKIPPED] >> test.py::test[hor_join-less_outs--Results] [SKIPPED] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Analyze] >> test.py::test[expr-backtick_escape-default.txt-Results] [GOOD] >> test.py::test[expr-cast_reverse_list-default.txt-Debug] >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] >> test.py::test[pg-tpcds-q97-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q07-default.txt-Debug] >> test.py::test[lambda-lambda_no_arg-default.txt-Debug] [GOOD] >> test.py::test[lambda-lambda_no_arg-default.txt-ForceBlocks] >> test.py::test[agg_phases-min_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-min_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-min_opt-default.txt-Results] >> test.py::test[join-strict_keys-off-Results] [GOOD] >> test.py::test[join-trivial_view--Analyze] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Debug] [GOOD] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Plan] [GOOD] >> test.py::test[select-result_label-default.txt-Debug] [GOOD] >> test.py::test[select-result_label-default.txt-Plan] [GOOD] >> test.py::test[ansi_idents-inplace_yql-default.txt-Results] [GOOD] >> test.py::test[bigdate-explicit_cast-default.txt-Analyze] >> test.py::test[schema-yamred_dsv_select_from_dict--Debug] [GOOD] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] >> test.py::test[action-runtime_format_type-default.txt-Debug] [GOOD] >> test.py::test[action-runtime_format_type-default.txt-Plan] [GOOD] >> test.py::test[select-result_label-default.txt-Results] >> test.py::test[action-runtime_format_type-default.txt-Results] >> test.py::test[action-subquery_orderby0-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Plan] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Plan] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] >> test.py::test[expr-cast_dynumber-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-cast_dynumber-default.txt-Plan] [GOOD] >> test.py::test[expr-cast_dynumber-default.txt-Results] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Analyze] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Debug] >> test.py::test[agg_apply-max-default.txt-Debug] >> test.py::test[pg-tpcds-q19-default.txt-Results] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Analyze] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Debug] >> test.py::test[params-no_optional_param-default.txt-Results] [GOOD] >> test.py::test[params-param_in_json_api--Debug] >> test.py::test[view-all_from_view--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Debug] >> test.py::test[type_v3-non_strict--Analyze] [GOOD] >> test.py::test[type_v3-non_strict--Debug] >> test.py::test[csee-closure_l2-default.txt-Debug] [GOOD] >> test.py::test[csee-closure_l2-default.txt-Plan] [GOOD] >> test.py::test[csee-closure_l2-default.txt-Results] >> test.py::test[pg-distinct_all_projection-default.txt-Debug] [GOOD] >> test.py::test[pg-distinct_all_projection-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_big_primary_unique-off-Debug] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_big_primary_unique-off-Plan] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique-off-Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry--Analyze] >> test.py::test[key_filter-contains-default.txt-Debug] [GOOD] >> test.py::test[key_filter-contains-default.txt-Plan] [GOOD] >> test.py::test[key_filter-contains-default.txt-Results] >> test.py::test[union_all-mix_map_and_project-trivial_map-Debug] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-Plan] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Debug] [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Plan] [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Results] >> test.py::test[hor_join-out_sampling--Debug] [GOOD] >> test.py::test[hor_join-out_sampling--Plan] [GOOD] >> test.py::test[hor_join-out_sampling--Results] >> test.py::test[weak_field-few_source_different_columns--Analyze] [GOOD] >> test.py::test[weak_field-few_source_different_columns--Debug] |73.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] >> test.py::test[limit-empty_input_after_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-flatten_by--Debug] >> test.py::test[insert-select_relabel-default.txt-Results] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-Debug] >> test.py::test[pg-pg_array_compare-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_array_compare-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_array_compare-default.txt-Results] >> test.py::test[join-three_equalities_paren--Debug] [GOOD] >> test.py::test[join-three_equalities_paren--Plan] [GOOD] >> test.py::test[join-three_equalities_paren--Results] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] [GOOD] >> test.py::test[select-anon_clash--Debug] [SKIPPED] >> test.py::test[select-anon_clash--Plan] [SKIPPED] >> test.py::test[select-anon_clash--Results] >> test.py::test[pg-sublink_projection_array-default.txt-Analyze] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Debug] >> test.py::test[expr-cast_reverse_list-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_reverse_list-default.txt-Plan] [GOOD] >> test.py::test[expr-cast_reverse_list-default.txt-Results] >> test.py::test[select-result_label-default.txt-Results] [GOOD] >> test.py::test[select-trivial_group_by-default.txt-Debug] >> test.py::test[action-runtime_format_type-default.txt-Results] [GOOD] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-Debug] >> test.py::test[blocks-date_less_scalar--Results] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--Debug] >> test.py::test[optimizers-coalesce_propagate-default.txt-Debug] [GOOD] >> test.py::test[optimizers-coalesce_propagate-default.txt-Plan] [GOOD] >> test.py::test[optimizers-coalesce_propagate-default.txt-Results] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Analyze] [GOOD] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Debug] >> test.py::test[expr-cast_dynumber-default.txt-Results] [GOOD] >> test.py::test[expr-dict_comp-default.txt-Analyze] >> test.py::test[pg-tpcds-q19-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Debug] >> test.py::test[produce-reduce_lambda_list_mem-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--Debug] >> test.py::test[csee-closure_l2-default.txt-Results] [GOOD] >> test.py::test[csee-l2_dup_l1-default.txt-Debug] >> test.py::test[join-trivial_view--Analyze] [GOOD] >> test.py::test[join-trivial_view--Debug] >> test.py::test[pg-sort_nulls_priority_window-default.txt-Debug] [GOOD] >> test.py::test[pg-sort_nulls_priority_window-default.txt-ForceBlocks] >> test.py::test[agg_phases-avg_opt-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-count-default.txt-Debug] >> test.py::test[agg_apply-max-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-max-default.txt-Plan] >> test.py::test[bigdate-explicit_cast-default.txt-Analyze] [GOOD] >> test.py::test[bigdate-explicit_cast-default.txt-Debug] >> test.py::test[lambda-lambda_no_arg-default.txt-ForceBlocks] [GOOD] >> test.py::test[lambda-lambda_no_arg-default.txt-Plan] [GOOD] >> test.py::test[lambda-lambda_no_arg-default.txt-Results] >> test.py::test[agg_apply-max-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-max-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_column_reuse--Debug] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_reuse--Plan] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_reuse--Results] >> test.py::test[params-param_in_json_api--Debug] [GOOD] >> test.py::test[params-param_in_json_api--Plan] [GOOD] >> test.py::test[params-param_in_json_api--Results] >> test.py::test[pg-pg_array_compare-default.txt-Results] [GOOD] >> test.py::test[pg-pg_corr_count-default.txt-Debug] >> test.py::test[key_filter-contains-default.txt-Results] [GOOD] >> test.py::test[key_filter-is_null_or_data--Debug] >> test.py::test[join-mergejoin_choose_primary_with_retry--Analyze] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry--Debug] >> test.py::test[expr-cast_reverse_list-default.txt-Results] [GOOD] >> test.py::test[expr-common_type_for_resource_and_data--Debug] >> test.py::test[type_v3-non_strict--Debug] [GOOD] >> test.py::test[type_v3-non_strict--ForceBlocks] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] [GOOD] >> test.py::test[union_all-union_all_with_limits-default.txt-Debug] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Debug] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-ForceBlocks] >> test.py::test[weak_field-optimize_weak_fields_map--Debug] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Plan] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map--Results] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Debug] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-ForceBlocks] >> test.py::test[pg-distinct_all_projection-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-distinct_all_projection-default.txt-Plan] [GOOD] >> test.py::test[pg-distinct_all_projection-default.txt-Results] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Results] [GOOD] >> test.py::test[order_by-order_with_null-default.txt-Debug] >> test.py::test[blocks-minmax_strings_filter--Debug] [GOOD] >> test.py::test[window-full/aggregations_compact--ForceBlocks] [GOOD] >> test.py::test[window-full/aggregations_compact--Plan] [GOOD] >> test.py::test[window-full/aggregations_compact--Results] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-Results] >> test.py::test[optimizers-coalesce_propagate-default.txt-Results] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset_range--Debug] >> test.py::test[select-anon_clash--Results] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-Debug] >> test.py::test[blocks-minmax_strings_filter--Plan] [GOOD] >> test.py::test[blocks-minmax_strings_filter--Results] >> test.py::test[expr-dict_comp-default.txt-Analyze] [GOOD] >> test.py::test[expr-dict_comp-default.txt-Debug] >> test.py::test[lineage-flatten_by--Debug] [GOOD] >> test.py::test[lineage-flatten_by--Plan] [GOOD] >> test.py::test[lineage-flatten_by--Results] >> test.py::test[params-param_in_json_api--Results] [GOOD] >> test.py::test[params-primitives--Debug] >> test.py::test[weak_field-few_source_different_columns--Debug] [GOOD] >> test.py::test[weak_field-few_source_different_columns--ForceBlocks] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Debug] [GOOD] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-ForceBlocks] >> test.py::test[lambda-lambda_no_arg-default.txt-Results] [GOOD] >> test.py::test[like-like_multiline-default.txt-Analyze] >> test.py::test[csee-l2_dup_l1-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Plan] >> test.py::test[csee-l2_dup_l1-default.txt-Plan] [GOOD] >> test.py::test[csee-l2_dup_l1-default.txt-Results] >> test.py::test[agg_apply-max-default.txt-Results] [GOOD] >> test.py::test[agg_apply-sum_interval-default.txt-Debug] >> test.py::test[insert-select_with_sort_limit-default.txt-Debug] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-Plan] [GOOD] >> test.py::test[insert-select_with_sort_limit-default.txt-Results] >> test.py::test[pg-sublink_projection_array-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q25-default.txt-Results] >> test.py::test[weak_field-optimize_weak_fields_map--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_condition--Debug] >> test.py::test[pg-distinct_all_projection-default.txt-Results] [GOOD] >> test.py::test[pg-distinct_on_single_projection_order-default.txt-Analyze] >> test.py::test[join-three_equalities_paren--Results] [GOOD] >> test.py::test[join-trivial_view-off-Debug] [SKIPPED] >> test.py::test[join-trivial_view-off-Plan] [SKIPPED] >> test.py::test[join-trivial_view-off-Results] [SKIPPED] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Debug] >> test.py::test[agg_phases_agg_apply-count-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-count-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-count-default.txt-Results] >> test.py::test[join-trivial_view--Debug] [GOOD] >> test.py::test[join-trivial_view--ForceBlocks] >> test.py::test[bigdate-explicit_cast-default.txt-Debug] [GOOD] >> test.py::test[bigdate-explicit_cast-default.txt-ForceBlocks] >> test.py::test[action-subquery_extend_over_extend_for-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry--Debug] [GOOD] >> test.py::test[agg_apply-opt_len_count_distinct-default.txt-Debug] >> test.py::test[join-mergejoin_choose_primary_with_retry--ForceBlocks] >> test.py::test[pg-pg_corr_count-default.txt-Debug] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Plan] [GOOD] >> test.py::test[key_filter-is_null_or_data--Debug] [GOOD] >> test.py::test[pg-pg_corr_count-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_corr_count-default.txt-Results] >> test.py::test[csee-l2_dup_l1-default.txt-Results] [GOOD] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Debug] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Results] >> test.py::test[expr-common_type_for_resource_and_data--Debug] [GOOD] >> test.py::test[expr-common_type_for_resource_and_data--Plan] [GOOD] >> test.py::test[expr-common_type_for_resource_and_data--Results] >> test.py::test[key_filter-is_null_or_data--Plan] [GOOD] >> test.py::test[key_filter-is_null_or_data--Results] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Plan] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Results] >> test.py::test[type_v3-non_strict--ForceBlocks] [GOOD] >> test.py::test[type_v3-non_strict--Plan] [GOOD] >> test.py::test[type_v3-non_strict--Results] >> test.py::test[insert-select_with_sort_limit-default.txt-Results] [GOOD] >> test.py::test[insert-two_input_tables--Debug] >> test.py::test[like-like_multiline-default.txt-Analyze] [GOOD] >> test.py::test[like-like_multiline-default.txt-Debug] >> test.py::test[pg-tpcds-q25-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-Debug] >> test.py::test[aggregate-group_by_rollup_column_reuse--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--Debug] >> test.py::test[agg_apply-sum_interval-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-sum_interval-default.txt-Plan] [GOOD] >> test.py::test[union_all-union_all_with_limits-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_with_limits-default.txt-Plan] [GOOD] >> test.py::test[union_all-union_all_with_limits-default.txt-Results] >> test.py::test[blocks-minmax_strings_filter--Results] [GOOD] >> test.py::test[blocks-not_opt--Debug] >> test.py::test[agg_apply-sum_interval-default.txt-Results] >> test.py::test[select-corr_name_in_select-default.txt-Debug] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-Plan] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-Results] >> test.py::test[hor_join-out_sampling--Results] [GOOD] >> test.py::test[hor_join-yield_off-default.txt-Debug] [SKIPPED] >> test.py::test[hor_join-yield_off-default.txt-Plan] [SKIPPED] >> test.py::test[hor_join-yield_off-default.txt-Results] [SKIPPED] >> test.py::test[in-in_ansi_dict-default.txt-Debug] >> test.py::test[weak_field-few_source_different_columns--ForceBlocks] [GOOD] >> test.py::test[weak_field-few_source_different_columns--Plan] [GOOD] >> test.py::test[weak_field-few_source_different_columns--Results] >> test.py::test[agg_phases-min_opt-default.txt-Results] [GOOD] >> test.py::test[agg_phases-sum_opt-default.txt-Debug] >> test.py::test[lineage-flatten_by--Results] [GOOD] >> test.py::test[lineage-member_over_if_struct-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-member_over_if_struct-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-member_over_if_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-unordered_subquery-default.txt-Plan] [SKIPPED] >> test.py::test[pg-tpch-q07-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q07-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q07-default.txt-Results] >> test.py::test[lineage-unordered_subquery-default.txt-Results] [SKIPPED] >> test.py::test[match_recognize-alerts_without_order-default.txt-Debug] >> test.py::test[optimizers-combinebykey_fields_subset_range--Debug] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset_range--Plan] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] >> test.py::test[select-trivial_group_by-default.txt-Debug] [GOOD] >> test.py::test[select-trivial_group_by-default.txt-Plan] [GOOD] >> test.py::test[select-trivial_group_by-default.txt-Results] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Plan] [GOOD] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Results] >> test.py::test[pg-sublink_projection_array-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_projection_array-default.txt-Results] >> test.py::test[pg-distinct_on_single_projection_order-default.txt-Analyze] [GOOD] >> test.py::test[pg-distinct_on_single_projection_order-default.txt-Debug] >> test.py::test[produce-reduce_multi_in_difftype--Debug] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--Plan] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--Results] >> test.py::test[expr-dict_comp-default.txt-Debug] [GOOD] >> test.py::test[expr-dict_comp-default.txt-ForceBlocks] >> test.py::test[blocks-date_sub_interval_scalar--Debug] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--Plan] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--Results] >> test.py::test[action-select_from_subquery_with_orderby-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_nested_subquery--Analyze] >> test.py::test[join-trivial_view--ForceBlocks] [GOOD] >> test.py::test[join-trivial_view--Plan] [GOOD] >> test.py::test[join-trivial_view--Results] >> test.py::test[params-primitives--Debug] [GOOD] >> test.py::test[params-primitives--Plan] [GOOD] >> test.py::test[params-primitives--Results] >> test.py::test[expr-common_type_for_resource_and_data--Results] [GOOD] >> test.py::test[expr-dict_common_type--Debug] [SKIPPED] >> test.py::test[expr-dict_common_type--Plan] [SKIPPED] >> test.py::test[expr-dict_common_type--Results] [SKIPPED] >> test.py::test[expr-expr_named_yql_lambda_quotes-default.txt-Debug] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_existing_column--Analyze] >> test.py::test[join-mergejoin_choose_primary_with_retry--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry--Plan] >> test.py::test[order_by-order_with_null-default.txt-Debug] [GOOD] >> test.py::test[order_by-order_with_null-default.txt-Plan] [GOOD] >> test.py::test[order_by-order_with_null-default.txt-Results] >> test.py::test[join-mergejoin_choose_primary_with_retry--Plan] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry--Results] >> test.py::test[window-full/aggregations_compact--Results] [GOOD] >> test.py::test[window-full/leadlag--Analyze] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Debug] [GOOD] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Plan] >> test.py::test[key_filter-is_null_or_data--Results] [GOOD] >> test.py::test[key_filter-part_key_over_dynamic--Debug] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Plan] [GOOD] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Results] >> test.py::test[type_v3-non_strict--Results] [GOOD] >> test.py::test[view-secure_eval--Analyze] [SKIPPED] >> test.py::test[view-secure_eval--Debug] [SKIPPED] >> test.py::test[view-secure_eval--ForceBlocks] [SKIPPED] >> test.py::test[view-secure_eval--Plan] [SKIPPED] >> test.py::test[view-secure_eval--Results] >> test.py::test[pg-sort_nulls_priority_window-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-sort_nulls_priority_window-default.txt-Plan] [GOOD] >> test.py::test[pg-sort_nulls_priority_window-default.txt-Results] >> test.py::test[bigdate-explicit_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-explicit_cast-default.txt-Plan] [GOOD] >> test.py::test[bigdate-explicit_cast-default.txt-Results] >> test.py::test[agg_apply-opt_len_count_distinct-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-opt_len_count_distinct-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-opt_len_count_distinct-default.txt-Results] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Results] [GOOD] >> test.py::test[in-in_tablesource_on_raw_list--Analyze] [SKIPPED] >> test.py::test[in-in_tablesource_on_raw_list--Debug] >> test.py::test[agg_apply-sum_interval-default.txt-Results] [GOOD] >> test.py::test[agg_apply-table--Debug] >> test.py::test[in-in_tablesource_on_raw_list--Debug] [SKIPPED] >> test.py::test[in-in_tablesource_on_raw_list--ForceBlocks] [SKIPPED] >> test.py::test[in-in_tablesource_on_raw_list--Plan] [SKIPPED] >> test.py::test[in-in_tablesource_on_raw_list--Results] [SKIPPED] >> test.py::test[in-in_tuple_table-default.txt-Analyze] >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-Results] >> test.py::test[select-corr_name_in_select-default.txt-Results] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Debug] >> test.py::test[pg-pg_corr_count-default.txt-Results] [GOOD] >> test.py::test[pg-pg_iterate-default.txt-Debug] >> test.py::test[weak_field-few_source_different_columns--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Analyze] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Debug] >> test.py::test[pg-sublink_projection_array-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_where_any_corr-default.txt-Analyze] >> test.py::test[params-primitives--Results] [GOOD] >> test.py::test[pg-aggregate_combine_all--Debug] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Debug] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Plan] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Results] >> test.py::test[in-in_ansi_dict-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_with_limits-default.txt-Results] [GOOD] >> test.py::test[view-file_outer--Debug] [SKIPPED] >> test.py::test[view-file_outer--Plan] [SKIPPED] >> test.py::test[view-file_outer--Results] >> test.py::test[match_recognize-alerts_without_order-default.txt-Debug] [GOOD] >> test.py::test[match_recognize-alerts_without_order-default.txt-Plan] [GOOD] >> test.py::test[match_recognize-alerts_without_order-default.txt-Results] >> test.py::test[in-in_ansi_dict-default.txt-Plan] [GOOD] >> test.py::test[in-in_ansi_dict-default.txt-Results] >> test.py::test[blocks-not_opt--Debug] [GOOD] >> test.py::test[blocks-not_opt--Plan] [GOOD] >> test.py::test[blocks-not_opt--Results] >> test.py::test[csee-nested_closure_in_l2_and_l1-default.txt-Results] [GOOD] >> test.py::test[csee-same_l1_expr-default.txt-Debug] >> test.py::test[expr-expr_named_yql_lambda_quotes-default.txt-Debug] [GOOD] >> test.py::test[expr-expr_named_yql_lambda_quotes-default.txt-Plan] [GOOD] >> test.py::test[expr-expr_named_yql_lambda_quotes-default.txt-Results] >> test.py::test[weak_field-weak_field_join_condition--Debug] [GOOD] >> test.py::test[weak_field-weak_field_join_condition--Plan] [GOOD] >> test.py::test[weak_field-weak_field_join_condition--Results] >> test.py::test[action-subquery_merge_nested_subquery--Analyze] [GOOD] >> test.py::test[action-subquery_merge_nested_subquery--Debug] >> test.py::test[like-like_multiline-default.txt-Debug] [GOOD] >> test.py::test[like-like_multiline-default.txt-ForceBlocks] >> test.py::test[agg_phases-sum_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-sum_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-sum_opt-default.txt-Results] |73.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |73.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication >> test.py::test[select-trivial_group_by-default.txt-Results] [GOOD] >> test.py::test[seq_mode-subquery_shared_subquery-default.txt-Debug] >> test.py::test[schema-user_schema_existing_column--Analyze] [GOOD] >> test.py::test[schema-user_schema_existing_column--Debug] |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |73.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] >> test.py::test[produce-reduce_multi_in_difftype--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Debug] >> test.py::test[insert-two_input_tables--Debug] [GOOD] >> test.py::test[insert-two_input_tables--Plan] [GOOD] >> test.py::test[insert-two_input_tables--Results] >> test.py::test[bigdate-explicit_cast-default.txt-Results] [GOOD] >> test.py::test[bigdate-int_cast-default.txt-Analyze] >> test.py::test[agg_apply-opt_len_count_distinct-default.txt-Results] [GOOD] >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-Debug] >> test.py::test[view-secure_eval--Results] [GOOD] >> test.py::test[view-trivial_view_concat--Analyze] >> test.py::test[join-mergejoin_choose_primary_with_retry--Results] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-Analyze] >> test.py::test[pg-distinct_on_single_projection_order-default.txt-Debug] [GOOD] >> test.py::test[join-join_comp_common_table--Results] [GOOD] >> test.py::test[pg-distinct_on_single_projection_order-default.txt-ForceBlocks] >> test.py::test[join-join_no_correlation_in_order_by-off-Debug] [SKIPPED] >> test.py::test[join-join_no_correlation_in_order_by-off-Plan] [SKIPPED] >> test.py::test[join-join_no_correlation_in_order_by-off-Results] >> test.py::test[in-in_tuple_table-default.txt-Analyze] [GOOD] >> test.py::test[in-in_tuple_table-default.txt-Debug] >> test.py::test[window-full/leadlag--Analyze] [GOOD] >> test.py::test[window-full/leadlag--Debug] >> test.py::test[order_by-order_with_null-default.txt-Results] [GOOD] >> test.py::test[expr-expr_named_yql_lambda_quotes-default.txt-Results] [GOOD] >> test.py::test[order_by-union_all--Debug] >> test.py::test[expr-expr_yql_data-default.txt-Debug] >> test.py::test[join-trivial_view--Results] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q62-default.txt-Debug] >> test.py::test[join-trivial_view-off-Analyze] >> test.py::test[join-join_no_correlation_in_order_by-off-Results] [SKIPPED] >> test.py::test[join-left_only_semi_and_other--Debug] >> test.py::test[blocks-date_sub_interval_scalar--Results] [GOOD] >> test.py::test[blocks-decimal_comparison--Debug] >> test.py::test[match_recognize-alerts_without_order-default.txt-Results] [GOOD] >> test.py::test[optimizers-direct_row_after_merge--Debug] >> test.py::test[key_filter-part_key_over_dynamic--Debug] [GOOD] >> test.py::test[key_filter-part_key_over_dynamic--Plan] [GOOD] >> test.py::test[key_filter-part_key_over_dynamic--Results] >> test.py::test[blocks-not_opt--Results] [GOOD] >> test.py::test[blocks-pg_sort--Debug] >> test.py::test[pg-pg_iterate-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_iterate-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_iterate-default.txt-Results] >> test.py::test[view-file_outer--Results] [GOOD] >> test.py::test[view-file_outer_library--Debug] [SKIPPED] >> test.py::test[agg_phases_agg_apply-count-default.txt-Results] [GOOD] >> test.py::test[view-file_outer_library--Plan] [SKIPPED] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Debug] >> test.py::test[view-file_outer_library--Results] >> test.py::test[expr-dict_comp-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-dict_comp-default.txt-Plan] [GOOD] >> test.py::test[expr-dict_comp-default.txt-Results] >> test.py::test[pg-sublink_where_any_corr-default.txt-Analyze] [GOOD] >> test.py::test[pg-sublink_where_any_corr-default.txt-Debug] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Analyze] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Debug] >> test.py::test[in-in_ansi_dict-default.txt-Results] [GOOD] >> test.py::test[in-in_immediate_subquery-default.txt-Debug] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Debug] [GOOD] >> test.py::test[agg_apply-table--Debug] [GOOD] >> test.py::test[agg_apply-table--Plan] [GOOD] >> test.py::test[agg_apply-table--Results] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Debug] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Plan] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] >> test.py::test[select-dict_with_few_keys-default.txt-Debug] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Plan] [GOOD] >> test.py::test[select-dict_with_few_keys-default.txt-Results] >> test.py::test[seq_mode-subquery_shared_subquery-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_merge_nested_subquery--Debug] [GOOD] >> test.py::test[action-subquery_merge_nested_subquery--ForceBlocks] >> test.py::test[insert-two_input_tables--Results] [GOOD] >> test.py::test[seq_mode-subquery_shared_subquery-default.txt-Plan] [GOOD] >> test.py::test[seq_mode-subquery_shared_subquery-default.txt-Results] >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-Debug] [GOOD] >> test.py::test[like-like_multiline-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-like_multiline-default.txt-Plan] [GOOD] >> test.py::test[like-like_multiline-default.txt-Results] >> test.py::test[csee-same_l1_expr-default.txt-Debug] [GOOD] >> test.py::test[csee-same_l1_expr-default.txt-Plan] [GOOD] >> test.py::test[csee-same_l1_expr-default.txt-Results] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Results] [GOOD] >> test.py::test[join-yql-19081--Debug] [SKIPPED] >> test.py::test[insert-udf_empty--Debug] >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-Plan] [GOOD] >> test.py::test[bigdate-int_cast-default.txt-Analyze] [GOOD] >> test.py::test[bigdate-int_cast-default.txt-Debug] >> test.py::test[join-yql-19081--Plan] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-Analyze] [GOOD] >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-Results] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-Debug] >> test.py::test[pg-tpch-q07-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-Debug] >> test.py::test[weak_field-weak_field_join_condition--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_where--Debug] >> test.py::test[join-yql-19081--Results] [SKIPPED] >> test.py::test[join-yql-8125--Debug] >> test.py::test[pg-sort_nulls_priority_window-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_having_any-default.txt-Analyze] >> test.py::test[pg-aggregate_combine_all--Debug] [GOOD] >> test.py::test[pg-aggregate_combine_all--Plan] [GOOD] >> test.py::test[pg-aggregate_combine_all--Results] >> test.py::test[schema-user_schema_existing_column--Debug] [GOOD] >> test.py::test[schema-user_schema_existing_column--ForceBlocks] >> test.py::test[view-trivial_view_concat--Analyze] [GOOD] >> test.py::test[view-trivial_view_concat--Debug] >> test.py::test[pg-distinct_on_single_projection_order-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-distinct_on_single_projection_order-default.txt-Plan] [GOOD] >> test.py::test[pg-distinct_on_single_projection_order-default.txt-Results] >> test.py::test[join-trivial_view-off-Analyze] [GOOD] >> test.py::test[join-trivial_view-off-Debug] >> test.py::test[key_filter-part_key_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-ranges--Debug] >> test.py::test[expr-expr_yql_data-default.txt-Debug] [GOOD] >> test.py::test[expr-expr_yql_data-default.txt-Plan] [GOOD] >> test.py::test[expr-expr_yql_data-default.txt-Results] >> test.py::test[in-in_tuple_table-default.txt-Debug] [GOOD] >> test.py::test[in-in_tuple_table-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q62-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q62-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q62-default.txt-Results] >> test.py::test[view-file_outer_library--Results] [GOOD] >> test.py::test[view-secure--Debug] [SKIPPED] >> test.py::test[view-secure--Plan] [SKIPPED] >> test.py::test[view-secure--Results] >> test.py::test[select-dict_with_few_keys-default.txt-Results] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] [GOOD] >> test.py::test[optimizers-sorted_scalar_content--Debug] [SKIPPED] >> test.py::test[optimizers-sorted_scalar_content--Plan] [SKIPPED] >> test.py::test[optimizers-sorted_scalar_content--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_window_no_payloads--Debug] >> test.py::test[select-digits--Debug] >> test.py::test[blocks-decimal_comparison--Debug] [GOOD] >> test.py::test[blocks-decimal_comparison--Plan] [GOOD] >> test.py::test[pg-pg_iterate-default.txt-Results] [GOOD] >> test.py::test[pg-pg_types_window1-default.txt-Debug] >> test.py::test[blocks-decimal_comparison--Results] >> test.py::test[expr-dict_comp-default.txt-Results] [GOOD] >> test.py::test[expr-exapnd_with_singular_types-default.txt-Analyze] >> test.py::test[window-full/leadlag--Debug] [GOOD] >> test.py::test[window-full/leadlag--ForceBlocks] >> test.py::test[csee-same_l1_expr-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_bytes-default.txt-Debug] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Debug] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--ForceBlocks] >> test.py::test[like-like_multiline-default.txt-Results] [GOOD] >> test.py::test[limit-zero_limit-default.txt-Analyze] >> test.py::test[blocks-pg_sort--Debug] [GOOD] >> test.py::test[blocks-pg_sort--Plan] [GOOD] >> test.py::test[blocks-pg_sort--Results] >> test.py::test[seq_mode-subquery_shared_subquery-default.txt-Results] [GOOD] >> test.py::test[agg_apply-opt_sum_divide_by_zero-default.txt-Results] [GOOD] >> test.py::test[agg_apply-some_notnull-default.txt-Debug] >> test.py::test[optimizers-direct_row_after_merge--Debug] [GOOD] >> test.py::test[optimizers-direct_row_after_merge--Plan] [GOOD] >> test.py::test[optimizers-direct_row_after_merge--Results] >> test.py::test[solomon-BadDownsamplingAggregation--Debug] [SKIPPED] >> test.py::test[solomon-BadDownsamplingAggregation--Plan] [SKIPPED] >> test.py::test[solomon-BadDownsamplingAggregation--Results] >> test.py::test[aggregate-group_by_ru_join_agg--Debug] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--Plan] [GOOD] >> test.py::test[aggregate-group_by_ru_join_agg--Results] >> test.py::test[solomon-BadDownsamplingAggregation--Results] [SKIPPED] >> test.py::test[solomon-Downsampling-default.txt-Debug] [SKIPPED] >> test.py::test[solomon-Downsampling-default.txt-Plan] [SKIPPED] >> test.py::test[solomon-Downsampling-default.txt-Results] [SKIPPED] >> test.py::test[solomon-UnknownSetting--Debug] [SKIPPED] >> test.py::test[solomon-UnknownSetting--Plan] [SKIPPED] >> test.py::test[solomon-UnknownSetting--Results] [SKIPPED] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-Debug] [SKIPPED] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-Plan] [SKIPPED] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-Results] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-Results] [SKIPPED] >> test.py::test[table_range-each_with_non_existing--Debug] >> TPDiskRaces::Decommit [GOOD] >> TPDiskRaces::DecommitWithInflight >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Debug] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Plan] [GOOD] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Results] >> test.py::test[agg_apply-table--Results] [GOOD] >> test.py::test[agg_phases-count-default.txt-Debug] >> test.py::test[insert-udf_empty--Debug] [GOOD] >> test.py::test[insert-udf_empty--Plan] [GOOD] >> test.py::test[insert-udf_empty--Results] >> test.py::test[pg-tpcds-q62-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Debug] >> test.py::test[order_by-union_all--Debug] [GOOD] >> test.py::test[order_by-union_all--Plan] [GOOD] >> test.py::test[order_by-union_all--Results] >> test.py::test[pg-distinct_on_single_projection_order-default.txt-Results] [GOOD] >> test.py::test[pg-join_using_multiple3-default.txt-Analyze] >> test.py::test[pg-aggregate_combine_all--Results] [GOOD] >> test.py::test[pg-aggregate_emit_agg_apply-default.txt-Debug] >> test.py::test[pg-sublink_where_any_corr-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_where_any_corr-default.txt-ForceBlocks] >> test.py::test[action-subquery_merge_nested_subquery--ForceBlocks] [GOOD] >> test.py::test[action-subquery_merge_nested_subquery--Plan] [GOOD] >> test.py::test[action-subquery_merge_nested_subquery--Results] >> test.py::test[expr-expr_yql_data-default.txt-Results] [GOOD] >> test.py::test[expr-int_literals_negative-default.txt-Debug] >> test.py::test[pg-sublink_having_any-default.txt-Analyze] [GOOD] >> test.py::test[pg-sublink_having_any-default.txt-Debug] >> test.py::test[agg_phases-sum_opt-default.txt-Results] [GOOD] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Debug] [GOOD] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Plan] [GOOD] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Results] >> test.py::test[agg_phases_agg_apply-count_all_opt-default.txt-Debug] >> test.py::test[bigdate-int_cast-default.txt-Debug] [GOOD] >> test.py::test[bigdate-int_cast-default.txt-ForceBlocks] >> test.py::test[in-in_tuple_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_tuple_table-default.txt-Plan] >> test.py::test[in-in_tuple_table-default.txt-Plan] [GOOD] >> test.py::test[in-in_tuple_table-default.txt-Results] >> test.py::test[in-in_immediate_subquery-default.txt-Debug] [GOOD] >> test.py::test[in-in_immediate_subquery-default.txt-Plan] [GOOD] >> test.py::test[schema-user_schema_existing_column--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_existing_column--Plan] [GOOD] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Results] >> test.py::test[in-in_immediate_subquery-default.txt-Results] >> test.py::test[schema-user_schema_existing_column--Results] >> test.py::test[insert-udf_empty--Results] [GOOD] >> test.py::test[insert-yql-14538--Debug] >> test.py::test[join-trivial_view-off-Debug] [GOOD] >> test.py::test[optimizers-direct_row_after_merge--Results] [GOOD] >> test.py::test[optimizers-yql-17413-topsort--Debug] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-Debug] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-ForceBlocks] >> test.py::test[join-trivial_view-off-ForceBlocks] [SKIPPED] >> test.py::test[join-trivial_view-off-Plan] [GOOD] >> test.py::test[join-trivial_view-off-Results] [GOOD] >> test.py::test[json-json_query/common_syntax-default.txt-Analyze] >> test.py::test[expr-exapnd_with_singular_types-default.txt-Analyze] [GOOD] >> test.py::test[expr-exapnd_with_singular_types-default.txt-Debug] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-Plan] [GOOD] >> test.py::test[join-mergejoin_choose_primary_with_retry-off-Results] [GOOD] >> test.py::test[join-premap_common_left_cross-off-Analyze] >> test.py::test[action-subquery_merge_nested_subquery--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_where--Debug] [GOOD] >> test.py::test[weak_field-weak_field_join_where--Plan] [GOOD] >> test.py::test[weak_field-weak_field_join_where--Results] >> test.py::test[view-secure--Results] [GOOD] >> test.py::test[view-trivial_view_concat--Debug] >> test.py::test[optimizers-unused_columns_window_no_payloads--Debug] [GOOD] >> test.py::test[optimizers-unused_columns_window_no_payloads--Plan] >> test.py::test[action-subquery_orderby1-default.txt-Analyze] >> test.py::test[join-left_only_semi_and_other--Debug] [GOOD] >> test.py::test[join-left_only_semi_and_other--Plan] >> test.py::test[optimizers-unused_columns_window_no_payloads--Plan] [GOOD] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] >> test.py::test[key_filter-ranges--Debug] [GOOD] >> test.py::test[key_filter-ranges--Plan] [GOOD] >> test.py::test[key_filter-ranges--Results] >> test.py::test[pg-pg_types_window1-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_types_window1-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_types_window1-default.txt-Results] >> test.py::test[limit-zero_limit-default.txt-Analyze] [GOOD] >> test.py::test[limit-zero_limit-default.txt-Debug] >> test.py::test[select-digits--Debug] [GOOD] >> test.py::test[blocks-pg_sort--Results] [GOOD] >> test.py::test[select-digits--Plan] [GOOD] >> test.py::test[select-digits--Results] >> test.py::test[join-left_only_semi_and_other--Plan] [GOOD] >> test.py::test[join-left_only_semi_and_other--Results] >> test.py::test[column_group-insert_diff_groups1_fail--Debug] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail--Plan] [SKIPPED] >> test.py::test[column_group-insert_diff_groups1_fail--Results] [SKIPPED] >> test.py::test[column_order-insert_with_new_cols--Debug] >> test.py::test[view-trivial_view_concat--Debug] [GOOD] >> test.py::test[view-trivial_view_concat--ForceBlocks] >> test.py::test[table_range-each_with_non_existing--Debug] [GOOD] >> test.py::test[table_range-each_with_non_existing--Plan] >> test.py::test[blocks-decimal_comparison--Results] [GOOD] >> test.py::test[blocks-distinct_pure_all--Debug] >> test.py::test[table_range-each_with_non_existing--Plan] [GOOD] >> test.py::test[table_range-each_with_non_existing--Results] >> test.py::test[agg_apply-some_notnull-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-some_notnull-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-some_notnull-default.txt-Results] >> test.py::test[expr-int_literals_negative-default.txt-Debug] [GOOD] >> test.py::test[expr-int_literals_negative-default.txt-Plan] [GOOD] >> test.py::test[expr-int_literals_negative-default.txt-Results] >> test.py::test[pg-join_using_multiple3-default.txt-Analyze] [GOOD] >> test.py::test[pg-join_using_multiple3-default.txt-Debug] >> test.py::test[sampling-mapjoin_left_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Debug] >> test.py::test[in-in_tuple_table-default.txt-Results] [GOOD] >> test.py::test[schema-user_schema_existing_column--Results] [GOOD] >> test.py::test[schema-user_schema_mix1--Analyze] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-Analyze] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots >> test.py::test[agg_phases-count-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-count-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-count-default.txt-Results] >> test.py::test[pg-tpcds-q81-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q81-default.txt-Results] >> test.py::test[datetime-date_tz_bytes-default.txt-Debug] [GOOD] >> test.py::test[datetime-date_tz_bytes-default.txt-Plan] >> test.py::test[order_by-union_all--Results] [GOOD] >> test.py::test[params-param_in_in_predicate--Debug] >> test.py::test[datetime-date_tz_bytes-default.txt-Plan] [GOOD] >> test.py::test[datetime-date_tz_bytes-default.txt-Results] >> test.py::test[pg-aggregate_emit_agg_apply-default.txt-Debug] [GOOD] >> test.py::test[pg-aggregate_emit_agg_apply-default.txt-Plan] [GOOD] >> test.py::test[pg-aggregate_emit_agg_apply-default.txt-Results] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Plan] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] >> test.py::test[bigdate-int_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-int_cast-default.txt-Plan] [GOOD] >> test.py::test[bigdate-int_cast-default.txt-Results] >> test.py::test[agg_apply-some_notnull-default.txt-Results] [GOOD] >> test.py::test[agg_phases-avg-default.txt-Debug] >> test.py::test[pg-sublink_having_any-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_having_any-default.txt-ForceBlocks] >> test.py::test[agg_phases_agg_apply-count_all_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-count_all_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-count_all_opt-default.txt-Results] >> test.py::test[json-json_query/common_syntax-default.txt-Analyze] [GOOD] >> test.py::test[json-json_query/common_syntax-default.txt-Debug] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] [GOOD] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-Debug] >> test.py::test[expr-int_literals_negative-default.txt-Results] [GOOD] >> test.py::test[expr-len--Debug] >> test.py::test[expr-exapnd_with_singular_types-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-Analyze] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-Debug] >> test.py::test[expr-exapnd_with_singular_types-default.txt-ForceBlocks] >> test.py::test[window-full/leadlag--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_left_cross-off-Analyze] [GOOD] >> test.py::test[select-digits--Results] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-Debug] >> test.py::test[produce-reduce_multi_in_difftype_assume_keytuple--Results] [GOOD] >> test.py::test[join-premap_common_left_cross-off-Debug] >> test.py::test[pg-tpch-q10-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-Plan] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> test.py::test[window-full/leadlag--Plan] [GOOD] >> test.py::test[window-full/leadlag--Results] >> test.py::test[pg-tpch-q10-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-Results] >> test.py::test[table_range-each_with_non_existing--Results] [GOOD] >> test.py::test[table_range-range_with_view--Debug] |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> test.py::test[produce-reduce_multi_in_ref--Debug] >> test.py::test[view-trivial_view_concat--Debug] [GOOD] >> test.py::test[view-trivial_view_concat--Plan] >> test.py::test[limit-zero_limit-default.txt-Debug] [GOOD] >> test.py::test[view-trivial_view_concat--Plan] [GOOD] >> test.py::test[view-trivial_view_concat--Results] >> test.py::test[key_filter-ranges--Results] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-Debug] >> test.py::test[join-yql-8125--Debug] [GOOD] >> test.py::test[join-yql-8125--Plan] >> test.py::test[pg-pg_types_window1-default.txt-Results] [GOOD] >> test.py::test[pg-pg_view-default.txt-Debug] >> test.py::test[join-yql-8125--Plan] [GOOD] >> test.py::test[join-yql-8125--Results] >> test.py::test[schema-user_schema_mix1--Analyze] [GOOD] >> test.py::test[schema-user_schema_mix1--Debug] >> test.py::test[limit-zero_limit-default.txt-ForceBlocks] >> test.py::test[in-in_immediate_subquery-default.txt-Results] [GOOD] >> test.py::test[view-trivial_view_concat--ForceBlocks] [GOOD] >> test.py::test[view-trivial_view_concat--Plan] >> test.py::test[in-in_noansi_list_dict-default.txt-Debug] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-Analyze] [GOOD] >> test.py::test[view-trivial_view_concat--Plan] [GOOD] >> test.py::test[view-trivial_view_concat--Results] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-Debug] >> test.py::test[pg-sublink_where_any_corr-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-sublink_where_any_corr-default.txt-Plan] [GOOD] >> test.py::test[insert-yql-14538--Debug] [GOOD] >> test.py::test[insert-yql-14538--Plan] >> test.py::test[pg-sublink_where_any_corr-default.txt-Results] >> test.py::test[aggregate-group_by_ru_join_agg--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Debug] >> test.py::test[insert-yql-14538--Plan] [GOOD] >> test.py::test[insert-yql-14538--Results] >> test.py::test[aggr_factory-avg_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Debug] >> test.py::test[weak_field-weak_field_join_where--Results] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt-Debug] >> test.py::test[pg-aggregate_emit_agg_apply-default.txt-Results] [GOOD] >> test.py::test[pg-distinct_on_multi_projection-default.txt-Debug] >> test.py::test[params-param_in_in_predicate--Debug] [GOOD] >> test.py::test[params-param_in_in_predicate--Plan] [GOOD] >> test.py::test[params-param_in_in_predicate--Results] >> test.py::test[datetime-date_tz_bytes-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_asc--Debug] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Debug] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Plan] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Results] >> test.py::test[order_by-order_by_value_desc-default.txt-Debug] >> test.py::test[pg-tpcds-q81-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q02-default.txt-Debug] >> test.py::test[column_order-insert_with_new_cols--Debug] [GOOD] >> test.py::test[column_order-insert_with_new_cols--Plan] >> test.py::test[json-json_query/common_syntax-default.txt-Debug] [GOOD] >> test.py::test[json-json_query/common_syntax-default.txt-ForceBlocks] >> test.py::test[column_order-insert_with_new_cols--Plan] [GOOD] >> test.py::test[column_order-insert_with_new_cols--Results] >> test.py::test[bigdate-int_cast-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_io-default.txt-Analyze] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Analyze] >> test.py::test[blocks-distinct_pure_all--Debug] [GOOD] >> test.py::test[blocks-distinct_pure_all--Plan] [GOOD] >> test.py::test[blocks-distinct_pure_all--Results] >> test.py::test[join-left_only_semi_and_other--Results] [GOOD] >> test.py::test[join-left_only_with_other-off-Debug] [SKIPPED] >> test.py::test[join-left_only_with_other-off-Plan] [SKIPPED] >> test.py::test[join-left_only_with_other-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_csee-off-Debug] >> test.py::test[join-lookupjoin_bug7646_csee-off-Debug] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_csee-off-Plan] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_csee-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug8533--Debug] >> test.py::test[pg-join_using_multiple3-default.txt-Debug] [GOOD] >> test.py::test[pg-join_using_multiple3-default.txt-ForceBlocks] >> test.py::test[agg_phases-avg-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-avg-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-avg-default.txt-Results] >> test.py::test[optimizers-yql-17413-topsort--Debug] [GOOD] >> test.py::test[optimizers-yql-17413-topsort--Plan] [GOOD] >> test.py::test[optimizers-yql-17413-topsort--Results] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-Debug] [GOOD] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-Plan] [GOOD] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-Results] >> test.py::test[expr-exapnd_with_singular_types-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-exapnd_with_singular_types-default.txt-Plan] [GOOD] >> test.py::test[expr-exapnd_with_singular_types-default.txt-Results] >> test.py::test[view-trivial_view_concat--Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Debug] >> test.py::test[expr-len--Debug] [GOOD] >> test.py::test[expr-len--Plan] [GOOD] >> test.py::test[pg-pg_view-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_view-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_view-default.txt-Results] >> test.py::test[expr-len--Results] >> test.py::test[view-trivial_view_concat--Results] [GOOD] >> test.py::test[weak_field-weak_field_data--Analyze] >> test.py::test[action-subquery_orderby1-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-ForceBlocks] >> test.py::test[agg_phases-count-default.txt-Results] [GOOD] >> test.py::test[agg_phases-percentile_null-default.txt-Debug] >> test.py::test[insert-yql-14538--Results] [GOOD] >> test.py::test[insert_monotonic-break_sort_fail--Debug] [SKIPPED] >> test.py::test[insert_monotonic-break_sort_fail--Plan] >> test.py::test[pg-sublink_having_any-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-sublink_having_any-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_having_any-default.txt-Results] >> test.py::test[insert_monotonic-break_sort_fail--Plan] [SKIPPED] >> test.py::test[insert_monotonic-break_sort_fail--Results] >> test.py::test[pg-sublink_where_any_corr-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Analyze] >> test.py::test[select-multi_source_issue-default.txt-Debug] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-Plan] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-Results] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-Debug] [GOOD] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-ForceBlocks] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut >> test.py::test[in-in_noansi_list_dict-default.txt-Debug] [GOOD] >> test.py::test[in-in_noansi_list_dict-default.txt-Plan] [GOOD] >> test.py::test[in-in_noansi_list_dict-default.txt-Results] >> test.py::test[schema-user_schema_mix1--Debug] [GOOD] >> test.py::test[schema-user_schema_mix1--ForceBlocks] >> test.py::test[limit-zero_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-zero_limit-default.txt-Plan] >> test.py::test[limit-zero_limit-default.txt-Plan] [GOOD] >> test.py::test[limit-zero_limit-default.txt-Results] >> test.py::test[agg_phases_agg_apply-count_all_opt-default.txt-Results] [GOOD] >> test.py::test[params-param_in_in_predicate--Results] [GOOD] >> test.py::test[sampling-mapjoin_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Debug] >> test.py::test[join-premap_common_left_cross-off-Debug] [GOOD] >> test.py::test[join-premap_common_left_cross-off-ForceBlocks] [SKIPPED] >> test.py::test[join-premap_common_left_cross-off-Plan] >> test.py::test[agg_phases_agg_apply-count_opt-default.txt-Debug] >> test.py::test[pg-bit_const-default.txt-Debug] >> test.py::test[join-premap_common_left_cross-off-Plan] [GOOD] >> test.py::test[join-premap_common_left_cross-off-Results] >> test.py::test[join-yql-8125--Results] [GOOD] >> test.py::test[join-premap_common_left_cross-off-Results] [GOOD] >> test.py::test[join-premap_common_right_tablecontent-off-Analyze] >> test.py::test[weak_field-weak_field_esc_string--Analyze] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Debug] >> test.py::test[join-yql-8125-off-Debug] [SKIPPED] >> test.py::test[join-yql-8125-off-Plan] [SKIPPED] >> test.py::test[join-yql-8125-off-Results] [SKIPPED] >> test.py::test[json-jsondocument/json_query-default.txt-Debug] >> test.py::test[pg-distinct_on_multi_projection-default.txt-Debug] [GOOD] >> test.py::test[pg-distinct_on_multi_projection-default.txt-Plan] >> test.py::test[optimizers-yql-10042_disable_fuse_depends_on-default.txt-Results] [GOOD] >> test.py::test[table_range-range_with_view--Debug] [GOOD] >> test.py::test[table_range-range_with_view--Plan] >> test.py::test[pg-distinct_on_multi_projection-default.txt-Plan] [GOOD] >> test.py::test[pg-distinct_on_multi_projection-default.txt-Results] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Debug] >> test.py::test[window-full/leadlag--Results] [GOOD] >> test.py::test[window-full/session_compact--Analyze] >> test.py::test[table_range-range_with_view--Plan] [GOOD] >> test.py::test[table_range-range_with_view--Results] >> test.py::test[expr-exapnd_with_singular_types-default.txt-Results] [GOOD] >> test.py::test[expr-replace_member-default.txt-Analyze] >> test.py::test[weak_field-weak_field_real_col-default.txt-Debug] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt-Plan] [GOOD] >> test.py::test[weak_field-weak_field_real_col-default.txt-Results] >> test.py::test[pg-pg_view-default.txt-Results] [GOOD] >> test.py::test[pg-select_agg_expr_projection-default.txt-Debug] >> test.py::test[bigdate-table_io-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_io-default.txt-Debug] >> test.py::test[json-json_query/common_syntax-default.txt-ForceBlocks] [GOOD] >> test.py::test[json-json_query/common_syntax-default.txt-Plan] >> test.py::test[pg-type_aliases-default.txt-Debug] >> test.py::test[expr-len--Results] [GOOD] >> test.py::test[expr-list_replicate-default.txt-Debug] >> test.py::test[json-json_query/common_syntax-default.txt-Plan] [GOOD] >> test.py::test[json-json_query/common_syntax-default.txt-Results] >> test.py::test[in-in_noansi_list_dict-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_multi_in_ref--Debug] [GOOD] >> test.py::test[produce-reduce_multi_in_ref--Plan] [GOOD] >> test.py::test[produce-reduce_multi_in_ref--Results] >> test.py::test[column_order-insert_with_new_cols--Results] [GOOD] >> test.py::test[compute_range-decimal-default.txt-Debug] [SKIPPED] >> test.py::test[weak_field-weak_field_data--Analyze] [GOOD] >> test.py::test[weak_field-weak_field_data--Debug] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Debug] >> test.py::test[key_filter-string_with_ff-default.txt-Debug] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-Plan] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-Results] >> test.py::test[insert_monotonic-break_sort_fail--Results] [GOOD] >> test.py::test[insert_monotonic-not_all_fail--Debug] [SKIPPED] >> test.py::test[compute_range-decimal-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-decimal-default.txt-Results] [SKIPPED] >> test.py::test[compute_range-multiply_limit_with_dups-default.txt-Debug] [SKIPPED] >> test.py::test[insert_monotonic-not_all_fail--Plan] [SKIPPED] >> test.py::test[insert_monotonic-not_all_fail--Results] >> test.py::test[pg-join_using_multiple3-default.txt-ForceBlocks] [GOOD] >> test.py::test[compute_range-multiply_limit_with_dups-default.txt-Plan] [SKIPPED] >> test.py::test[pg-join_using_multiple3-default.txt-Plan] [GOOD] >> test.py::test[pg-join_using_multiple3-default.txt-Results] >> test.py::test[compute_range-multiply_limit_with_dups-default.txt-Results] [SKIPPED] >> test.py::test[csee-const_body_same_lambda-default.txt-Debug] >> test.py::test[select-multi_source_issue-default.txt-Results] [GOOD] >> test.py::test[select-optional_as_warn-default.txt-Debug] >> test.py::test[action-subquery_orderby1-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt-Debug] [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt-Plan] >> test.py::test[datetime-date_tz_table_sort_asc--Debug] [GOOD] >> test.py::test[datetime-date_tz_table_sort_asc--Plan] [GOOD] >> test.py::test[datetime-date_tz_table_sort_asc--Results] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-Plan] >> test.py::test[join-lookupjoin_bug8533--Debug] [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt-Plan] [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt-Results] >> test.py::test[join-lookupjoin_bug8533--Plan] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-Results] >> test.py::test[blocks-distinct_pure_all--Results] [GOOD] >> test.py::test[blocks-distinct_pure_keys--Debug] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-Results] >> test.py::test[join-lookupjoin_bug8533--Results] >> test.py::test[pg-sublink_having_any-default.txt-Results] [GOOD] >> test.py::test[pg-table_func-default.txt-Analyze] >> test.py::test[weak_field-weak_field_esc_string--Debug] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Plan] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Results] >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] >> test.py::test[pg-distinct_on_multi_projection-default.txt-Results] [GOOD] >> test.py::test[pg-distinct_on_single_projection_no_order-default.txt-Debug] >> test.py::test[limit-zero_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-list_literal1-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-list_literal1-default.txt-Debug] [SKIPPED] >> test.py::test[optimizers-yql-17413-topsort--Results] [GOOD] >> test.py::test[optimizers-yql-3455_filter_sorted--Debug] >> test.py::test[lineage-list_literal1-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-list_literal1-default.txt-Plan] >> test.py::test[pg-bit_const-default.txt-Debug] [GOOD] >> test.py::test[lineage-list_literal1-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-list_literal1-default.txt-Results] [SKIPPED] >> test.py::test[pg-bit_const-default.txt-Plan] [GOOD] >> test.py::test[pg-bit_const-default.txt-Results] >> test.py::test[expr-replace_member-default.txt-Analyze] [GOOD] >> test.py::test[match_recognize-alerts-default.txt-Analyze] >> test.py::test[schema-user_schema_mix1--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_mix1--Plan] >> test.py::test[json-json_query/common_syntax-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-count_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-count_opt-default.txt-Plan] [GOOD] >> test.py::test[join-premap_common_right_tablecontent-off-Analyze] [GOOD] >> test.py::test[join-premap_common_right_tablecontent-off-Debug] >> test.py::test[pg-type_aliases-default.txt-Debug] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Debug] [GOOD] >> test.py::test[pg-type_aliases-default.txt-Plan] >> test.py::test[weak_field-weak_field_esc_string--ForceBlocks] >> test.py::test[pg-select_agg_expr_projection-default.txt-Debug] [GOOD] >> test.py::test[pg-select_agg_expr_projection-default.txt-Plan] >> test.py::test[json-jsondocument/json_query-default.txt-Debug] [GOOD] >> test.py::test[json-jsondocument/json_query-default.txt-Plan] [GOOD] >> test.py::test[json-jsondocument/json_query-default.txt-Results] >> test.py::test[expr-replace_member-default.txt-Debug] >> test.py::test[agg_phases-percentile_null-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-percentile_null-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-percentile_null-default.txt-Results] >> test.py::test[schema-user_schema_mix1--Plan] [GOOD] >> test.py::test[schema-user_schema_mix1--Results] >> test.py::test[agg_phases_agg_apply-count_opt-default.txt-Results] >> test.py::test[json-json_query/passing-default.txt-Analyze] >> test.py::test[pg-type_aliases-default.txt-Plan] [GOOD] >> test.py::test[pg-type_aliases-default.txt-Results] >> test.py::test[pg-select_agg_expr_projection-default.txt-Plan] [GOOD] >> test.py::test[pg-select_agg_expr_projection-default.txt-Results] >> test.py::test[aggr_factory-bitor-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Plan] [GOOD] >> test.py::test[window-full/session_compact--Analyze] [GOOD] >> test.py::test[action-subquery_orderby1-default.txt-Results] [GOOD] >> test.py::test[window-full/session_compact--Debug] >> test.py::test[agg_phases-min_by_opt-default.txt-Analyze] >> test.py::test[insert_monotonic-not_all_fail--Results] [GOOD] >> test.py::test[expr-list_replicate-default.txt-Debug] [GOOD] >> test.py::test[expr-list_replicate-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-bitor-default.txt-Results] >> test.py::test[join-anyjoin_merge_nodup--Debug] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Debug] [GOOD] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Plan] >> test.py::test[weak_field-weak_field_real_col-default.txt-Results] [GOOD] >> test.py::test[window-current/session--Debug] >> test.py::test[csee-const_body_same_lambda-default.txt-Debug] [GOOD] >> test.py::test[csee-const_body_same_lambda-default.txt-Plan] [GOOD] >> test.py::test[weak_field-weak_field_data--Debug] [GOOD] >> test.py::test[expr-list_replicate-default.txt-Results] >> test.py::test[pg-tpcds-q05-default.txt-Analyze] [GOOD] >> test.py::test[csee-const_body_same_lambda-default.txt-Results] >> test.py::test[weak_field-weak_field_data--ForceBlocks] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Plan] [GOOD] >> test.py::test[pg-join_using_multiple3-default.txt-Results] [GOOD] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Results] >> test.py::test[pg-nulls-default.txt-Analyze] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-Results] [GOOD] >> test.py::test[in-yql-14677-default.txt-Analyze] >> test.py::test[pg-tpcds-q05-default.txt-Debug] |73.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt-Results] [GOOD] >> test.py::test[params-list--Debug] >> test.py::test[pg-bit_const-default.txt-Results] [GOOD] >> test.py::test[pg-cbo_pragma1-default.txt-Debug] >> test.py::test[key_filter-string_with_ff-default.txt-Results] [GOOD] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |73.7%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> test.py::test[key_filter-tzdate--Debug] >> test.py::test[pg-distinct_on_single_projection_no_order-default.txt-Debug] [GOOD] >> test.py::test[pg-distinct_on_single_projection_no_order-default.txt-Plan] [GOOD] >> test.py::test[pg-type_aliases-default.txt-Results] [GOOD] >> test.py::test[pg-values-default.txt-Debug] |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> test.py::test[key_filter-tzdate--Debug] [SKIPPED] >> test.py::test[key_filter-tzdate--Plan] [SKIPPED] >> test.py::test[key_filter-tzdate--Results] [SKIPPED] >> test.py::test[table_range-range_with_view--Results] [GOOD] >> test.py::test[table_range-table_funcs_expr--Debug] >> test.py::test[pg-distinct_on_single_projection_no_order-default.txt-Results] >> test.py::test[json-jsondocument/json_query-default.txt-Results] [GOOD] >> test.py::test[key_filter-between_with_key_filter--Debug] >> test.py::test[select-optional_as_warn-default.txt-Debug] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Results] [GOOD] >> test.py::test[lambda-lambda_udf--Debug] >> test.py::test[window-current/aggregations--Debug] >> test.py::test[select-optional_as_warn-default.txt-Plan] [GOOD] >> test.py::test[select-optional_as_warn-default.txt-Results] >> test.py::test[schema-user_schema_mix1--Results] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt-Analyze] >> test.py::test[datetime-date_tz_table_sort_asc--Results] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Debug] >> test.py::test[expr-list_replicate-default.txt-Results] [GOOD] >> test.py::test[expr-pg_try_member--Debug] [SKIPPED] >> test.py::test[pg-select_agg_expr_projection-default.txt-Results] [GOOD] >> test.py::test[expr-pg_try_member--Plan] [SKIPPED] >> test.py::test[pg-select_agg_star-default.txt-Debug] >> test.py::test[expr-pg_try_member--Results] [SKIPPED] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Debug] >> test.py::test[match_recognize-alerts-default.txt-Analyze] [GOOD] >> test.py::test[in-in_nonliteral_tuple_ansi-default.txt-Results] [GOOD] >> test.py::test[csee-const_body_same_lambda-default.txt-Results] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Debug] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Plan] >> test.py::test[datetime-date_diff_sub-default.txt-Debug] >> test.py::test[in-in_sorted_by_tuple--Debug] >> test.py::test[match_recognize-alerts-default.txt-Debug] >> test.py::test[produce-reduce_multi_in_ref--Results] [GOOD] >> test.py::test[join-lookupjoin_bug8533--Results] [GOOD] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Plan] [GOOD] >> test.py::test[sampling-bind_expr_subquery-default.txt-Debug] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Results] >> test.py::test[expr-replace_member-default.txt-Debug] [GOOD] >> test.py::test[expr-replace_member-default.txt-ForceBlocks] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |73.7%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut >> test.py::test[agg_phases-avg-default.txt-Results] [GOOD] >> test.py::test[agg_phases-avg_null-default.txt-Debug] >> test.py::test[weak_field-weak_field_esc_string--ForceBlocks] [GOOD] >> test.py::test[json-json_query/passing-default.txt-Analyze] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Plan] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Results] >> test.py::test[join-lookupjoin_semi_2o--Debug] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |73.7%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> test.py::test[json-json_query/passing-default.txt-Debug] |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> test.py::test[agg_phases-min_by_opt-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases-min_by_opt-default.txt-Debug] >> test.py::test[bigdate-table_io-default.txt-Debug] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Debug] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Plan] >> test.py::test[weak_field-weak_field_data--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_data--Plan] [GOOD] >> test.py::test[weak_field-weak_field_data--Results] >> test.py::test[bigdate-table_io-default.txt-ForceBlocks] >> TPDiskRaces::DecommitWithInflight [GOOD] >> TPDiskRaces::DecommitWithInflightMock >> test.py::test[params-list--Debug] [GOOD] >> test.py::test[params-list--Plan] [GOOD] >> test.py::test[params-list--Results] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Plan] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] >> test.py::test[join-premap_common_right_tablecontent-off-Debug] [GOOD] >> test.py::test[join-premap_common_right_tablecontent-off-ForceBlocks] [SKIPPED] >> test.py::test[join-premap_common_right_tablecontent-off-Plan] [GOOD] >> test.py::test[join-premap_common_right_tablecontent-off-Results] >> test.py::test[pg-nulls-default.txt-Analyze] [GOOD] >> test.py::test[pg-nulls-default.txt-Debug] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |73.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> test.py::test[join-premap_common_right_tablecontent-off-Results] [GOOD] >> test.py::test[pg-table_func-default.txt-Analyze] [GOOD] >> test.py::test[pg-table_func-default.txt-Debug] >> test.py::test[join-premap_merge_extrasort2--Analyze] >> test.py::test[pg-values-default.txt-Debug] [GOOD] >> test.py::test[pg-values-default.txt-Plan] [GOOD] >> test.py::test[pg-values-default.txt-Results] >> test.py::test[pg-distinct_on_single_projection_no_order-default.txt-Results] [GOOD] >> test.py::test[pg-in_sorted-default.txt-Debug] >> test.py::test[in-yql-14677-default.txt-Analyze] [GOOD] >> test.py::test[in-yql-14677-default.txt-Debug] >> test.py::test[pg-cbo_pragma1-default.txt-Debug] [GOOD] >> test.py::test[pg-cbo_pragma1-default.txt-Plan] >> test.py::test[select-optional_as_warn-default.txt-Results] [GOOD] >> test.py::test[select-refselect-1000-Debug] [SKIPPED] >> test.py::test[select-refselect-1000-Plan] [SKIPPED] >> test.py::test[select-corr_name_in_select_seq-default.txt-Analyze] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt-Debug] >> test.py::test[select-refselect-1000-Results] [SKIPPED] >> test.py::test[select-scalar_subquery-default.txt-Debug] >> test.py::test[pg-cbo_pragma1-default.txt-Plan] [GOOD] >> test.py::test[pg-cbo_pragma1-default.txt-Results] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Results] >> test.py::test[datetime-date_diff_sub-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-ForceBlocks] >> test.py::test[lambda-lambda_udf--Debug] [GOOD] >> test.py::test[datetime-date_diff_sub-default.txt-Plan] [GOOD] >> test.py::test[pg-select_agg_star-default.txt-Debug] [GOOD] >> test.py::test[pg-select_agg_star-default.txt-Plan] >> test.py::test[lambda-lambda_udf--Plan] [GOOD] >> test.py::test[lambda-lambda_udf--Results] >> test.py::test[datetime-date_diff_sub-default.txt-Results] >> test.py::test[pg-select_agg_star-default.txt-Plan] [GOOD] >> test.py::test[expr-replace_member-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_agg_star-default.txt-Results] >> test.py::test[match_recognize-alerts-default.txt-Debug] [GOOD] >> test.py::test[match_recognize-alerts-default.txt-ForceBlocks] >> test.py::test[expr-replace_member-default.txt-Plan] [GOOD] >> test.py::test[expr-replace_member-default.txt-Results] >> test.py::test[weak_field-weak_field_esc_string--Results] [GOOD] >> test.py::test[optimizers-yql-3455_filter_sorted--Debug] [GOOD] >> test.py::test[optimizers-yql-3455_filter_sorted--Plan] >> test.py::test[weak_field-weak_member_string_copy-default.txt-Analyze] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> test.py::test[optimizers-yql-3455_filter_sorted--Plan] [GOOD] >> test.py::test[optimizers-yql-3455_filter_sorted--Results] >> test.py::test[pg-cbo_pragma1-default.txt-Results] [GOOD] >> test.py::test[pg-values-default.txt-Results] [GOOD] >> test.py::test[pg-with_rec_all-default.txt-Debug] >> test.py::test[params-list--Results] [GOOD] >> test.py::test[params-variant--Debug] >> test.py::test[window-full/session_compact--Debug] [GOOD] >> test.py::test[window-full/session_compact--ForceBlocks] >> test.py::test[pg-join_using_tables2-default.txt-Debug] |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> test.py::test[join-premap_merge_extrasort2--Analyze] [GOOD] >> test.py::test[join-premap_merge_extrasort2--Debug] >> test.py::test[json-json_query/passing-default.txt-Debug] [GOOD] >> test.py::test[json-json_query/passing-default.txt-ForceBlocks] >> test.py::test[aggr_factory-bitor-default.txt-Results] [GOOD] >> test.py::test[key_filter-between_with_key_filter--Debug] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Debug] >> test.py::test[key_filter-between_with_key_filter--Plan] [GOOD] >> test.py::test[agg_phases-percentile_null-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-count_null-default.txt-Debug] >> test.py::test[weak_field-weak_field_data--Results] [GOOD] >> test.py::test[window-current/session_incompat_sort--Analyze] >> test.py::test[expr-to_hashed_set_list_key-default.txt-Results] [GOOD] >> test.py::test[expr-to_hashed_set_varstruct_key-default.txt-Debug] >> test.py::test[table_range-table_funcs_expr--Debug] [GOOD] >> test.py::test[table_range-table_funcs_expr--Plan] [GOOD] >> test.py::test[table_range-table_funcs_expr--Results] >> test.py::test[sampling-bind_expr_subquery-default.txt-Debug] [GOOD] >> test.py::test[key_filter-between_with_key_filter--Results] >> test.py::test[datetime-date_diff_sub-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_arithmetic-default.txt-Debug] >> test.py::test[sampling-bind_expr_subquery-default.txt-Plan] [GOOD] >> test.py::test[sampling-bind_expr_subquery-default.txt-Results] >> test.py::test[agg_phases-avg_null-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Results] >> test.py::test[agg_phases_agg_apply-count_opt-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Debug] >> test.py::test[pg-in_sorted-default.txt-Debug] [GOOD] >> test.py::test[pg-in_sorted-default.txt-Plan] [GOOD] >> test.py::test[pg-in_sorted-default.txt-Results] >> test.py::test[agg_phases-avg_null-default.txt-Plan] [GOOD] >> test.py::test[in-yql-14677-default.txt-Debug] [GOOD] >> test.py::test[in-yql-14677-default.txt-ForceBlocks] >> test.py::test[agg_phases-avg_null-default.txt-Results] >> test.py::test[sampling-orderedjoin_left_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Debug] >> test.py::test[pg-select_agg_star-default.txt-Results] [GOOD] >> test.py::test[lambda-lambda_udf--Results] [GOOD] >> test.py::test[like-like_clause_escape-default.txt-Debug] >> test.py::test[select-corr_name_in_select_seq-default.txt-Debug] [GOOD] >> test.py::test[window-current/session--Debug] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--Debug] [SKIPPED] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--Plan] >> test.py::test[select-corr_name_in_select_seq-default.txt-ForceBlocks] >> test.py::test[window-current/session--Plan] >> test.py::test[expr-replace_member-default.txt-Results] [GOOD] >> test.py::test[expr-struct_gather_spread-default.txt-Analyze] >> test.py::test[pg-select_having_no_from-default.txt-Debug] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--Plan] [SKIPPED] >> test.py::test[join-lookupjoin_semi_2o--Debug] [GOOD] >> test.py::test[window-current/session--Plan] [GOOD] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--Results] [SKIPPED] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Debug] >> test.py::test[window-current/session--Results] >> test.py::test[join-lookupjoin_semi_2o--Plan] [GOOD] >> test.py::test[join-lookupjoin_semi_2o--Results] >> test.py::test[pg-with_rec_all-default.txt-Debug] [GOOD] >> test.py::test[pg-with_rec_all-default.txt-Plan] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> test.py::test[weak_field-weak_member_string_copy-default.txt-Analyze] [GOOD] |73.7%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> test.py::test[weak_field-weak_member_string_copy-default.txt-Debug] >> test.py::test[match_recognize-alerts-default.txt-ForceBlocks] [GOOD] >> test.py::test[match_recognize-alerts-default.txt-Plan] >> test.py::test[bigdate-table_io-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_io-default.txt-Plan] [GOOD] >> test.py::test[bigdate-table_io-default.txt-Results] >> test.py::test[pg-with_rec_all-default.txt-Plan] [GOOD] >> test.py::test[pg-with_rec_all-default.txt-Results] >> test.py::test[params-variant--Debug] [GOOD] >> test.py::test[match_recognize-alerts-default.txt-Plan] [GOOD] >> test.py::test[match_recognize-alerts-default.txt-Results] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] >> test.py::test[params-variant--Plan] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-ForceBlocks] [GOOD] >> test.py::test[params-variant--Results] >> test.py::test[pg-in_sorted-default.txt-Results] [GOOD] >> test.py::test[pg-interval_to_pg-default.txt-Debug] >> test.py::test[pg-tpcds-q05-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Results] >> test.py::test[select-scalar_subquery-default.txt-Debug] [GOOD] >> test.py::test[blocks-distinct_pure_keys--Debug] [GOOD] >> test.py::test[blocks-distinct_pure_keys--Plan] [GOOD] >> test.py::test[select-scalar_subquery-default.txt-Plan] [GOOD] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |73.7%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut >> test.py::test[key_filter-between_with_key_filter--Results] [GOOD] >> test.py::test[window-current/session_incompat_sort--Analyze] [GOOD] >> test.py::test[window-current/session_incompat_sort--Debug] >> test.py::test[key_filter-string_with-default.txt-Debug] >> test.py::test[blocks-distinct_pure_keys--Results] >> test.py::test[select-scalar_subquery-default.txt-Results] >> test.py::test[optimizers-yql-3455_filter_sorted--Results] [GOOD] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Debug] >> test.py::test[expr-to_hashed_set_varstruct_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_hashed_set_varstruct_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_hashed_set_varstruct_key-default.txt-Results] >> test.py::test[json-json_query/passing-default.txt-ForceBlocks] [GOOD] >> test.py::test[json-json_query/passing-default.txt-Plan] [GOOD] >> test.py::test[json-json_query/passing-default.txt-Results] >> test.py::test[pg-nulls-default.txt-Debug] [GOOD] >> test.py::test[pg-nulls-default.txt-ForceBlocks] >> test.py::test[sampling-bind_expr_subquery-default.txt-Results] [GOOD] >> test.py::test[sampling-insert--Debug] >> test.py::test[join-premap_merge_extrasort2--Debug] [GOOD] >> test.py::test[join-premap_merge_extrasort2--ForceBlocks] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> test.py::test[datetime-date_tz_arithmetic-default.txt-Debug] [GOOD] >> test.py::test[pg-join_using_tables2-default.txt-Debug] [GOOD] >> test.py::test[pg-join_using_tables2-default.txt-Plan] >> test.py::test[table_range-table_funcs_expr--Results] [GOOD] >> test.py::test[tpch-q1-default.txt-Debug] |73.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |73.8%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut >> test.py::test[pg-select_having_no_from-default.txt-Debug] [GOOD] >> test.py::test[pg-table_func-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-count_null-default.txt-Debug] [GOOD] >> test.py::test[pg-table_func-default.txt-ForceBlocks] >> test.py::test[in-in_sorted_by_tuple--Debug] [GOOD] >> test.py::test[in-in_sorted_by_tuple--Plan] >> test.py::test[select-corr_name_in_select_seq-default.txt-ForceBlocks] [GOOD] >> test.py::test[datetime-date_tz_arithmetic-default.txt-Plan] [GOOD] >> test.py::test[pg-join_using_tables2-default.txt-Plan] [GOOD] >> test.py::test[pg-join_using_tables2-default.txt-Results] >> test.py::test[datetime-date_tz_arithmetic-default.txt-Results] >> test.py::test[pg-with_rec_all-default.txt-Results] [GOOD] >> test.py::test[pg-select_having_no_from-default.txt-Plan] [GOOD] >> test.py::test[pg-select_having_no_from-default.txt-Results] >> test.py::test[pg_catalog-pg_operator-default.txt-Debug] |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut >> test.py::test[in-yql-14677-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-yql-14677-default.txt-Plan] [GOOD] >> test.py::test[in-yql-14677-default.txt-Results] >> test.py::test[match_recognize-alerts-default.txt-Results] [GOOD] >> test.py::test[optimizers-or_absorption--Analyze] [SKIPPED] >> test.py::test[optimizers-or_absorption--Debug] [SKIPPED] >> test.py::test[optimizers-or_absorption--ForceBlocks] [SKIPPED] >> test.py::test[weak_field-weak_member_string_copy-default.txt-Debug] [GOOD] >> test.py::test[optimizers-or_absorption--Plan] [SKIPPED] >> test.py::test[weak_field-weak_member_string_copy-default.txt-ForceBlocks] >> test.py::test[in-in_sorted_by_tuple--Plan] [GOOD] >> test.py::test[in-in_sorted_by_tuple--Results] >> test.py::test[agg_phases_agg_apply-count_null-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-count_null-default.txt-Results] >> test.py::test[params-variant--Results] [GOOD] >> test.py::test[pg-aggregate_combine--Debug] >> test.py::test[optimizers-or_absorption--Results] [SKIPPED] >> test.py::test[optimizers-yql-12620_stage_multiuse--Analyze] >> test.py::test[expr-struct_gather_spread-default.txt-Analyze] [GOOD] >> test.py::test[like-like_clause_escape-default.txt-Debug] [GOOD] >> test.py::test[like-like_clause_escape-default.txt-Plan] [GOOD] >> test.py::test[like-like_clause_escape-default.txt-Results] >> test.py::test[window-current/aggregations--Debug] [GOOD] >> test.py::test[window-current/aggregations--Plan] [GOOD] >> test.py::test[window-current/aggregations--Results] >> test.py::test[join-lookupjoin_semi_2o--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star--Debug] >> test.py::test[pg-interval_to_pg-default.txt-Debug] [GOOD] >> test.py::test[pg-interval_to_pg-default.txt-Plan] [GOOD] >> test.py::test[expr-to_hashed_set_varstruct_key-default.txt-Results] [GOOD] >> test.py::test[expr-to_sorted_dict_tuple_key-default.txt-Debug] >> test.py::test[json-json_query/passing-default.txt-Results] [GOOD] >> test.py::test[pg-interval_to_pg-default.txt-Results] >> test.py::test[join-anyjoin_merge_nodup--Debug] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Results] [GOOD] >> test.py::test[window-full/session_compact--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Analyze] >> test.py::test[window-full/session_compact--Plan] [GOOD] >> test.py::test[window-full/session_compact--Results] |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut |73.8%| [LD] {RESULT} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Analyze] >> test.py::test[join-anyjoin_merge_nodup--Plan] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--Results] >> test.py::test[select-scalar_subquery-default.txt-Results] [GOOD] >> test.py::test[select-select_all-default.txt-Debug] |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut >> test.py::test[window-current/session--Results] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-Debug] >> test.py::test[pg-select_having_no_from-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_full_const-default.txt-Debug] >> test.py::test[key_filter-string_with-default.txt-Debug] [GOOD] >> test.py::test[key_filter-string_with-default.txt-Plan] [GOOD] >> test.py::test[key_filter-string_with-default.txt-Results] >> test.py::test[aggr_factory-bottom_by-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] >> test.py::test[datetime-date_tz_arithmetic-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_impossible_cast--Debug] [SKIPPED] >> test.py::test[datetime-date_tz_impossible_cast--Plan] [SKIPPED] >> test.py::test[bigdate-table_io-default.txt-Results] [GOOD] >> test.py::test[binding-named_callable-default.txt-Analyze] >> test.py::test[join-premap_merge_extrasort2--ForceBlocks] [GOOD] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Debug] [GOOD] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Plan] [GOOD] >> test.py::test[datetime-date_tz_impossible_cast--Results] >> test.py::test[pg_catalog-pg_operator-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_operator-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_operator-default.txt-Results] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Results] >> test.py::test[join-premap_merge_extrasort2--Plan] [GOOD] >> test.py::test[join-premap_merge_extrasort2--Results] >> test.py::test[like-like_clause_escape-default.txt-Results] [GOOD] >> test.py::test[pg-interval_to_pg-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] [GOOD] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Debug] >> test.py::test[pg-join_using_case_insensetive1-default.txt-Debug] >> test.py::test[epochs-use_sorted_by_complex_type--Debug] >> test.py::test[in-yql-14677-default.txt-Results] [GOOD] >> test.py::test[insert-multiappend_sorted-default.txt-Analyze] >> test.py::test[weak_field-weak_member_string_copy-default.txt-ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_filter_udf--Debug] [SKIPPED] >> test.py::test[table_range-range_over_filter_udf--Plan] [SKIPPED] >> test.py::test[table_range-range_over_filter_udf--Results] >> test.py::test[weak_field-weak_member_string_copy-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q02-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q02-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q02-default.txt-Results] >> test.py::test[agg_phases-avg_null-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Analyze] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Debug] >> test.py::test[agg_phases-percentile_opt-default.txt-Debug] |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |73.8%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> test.py::test[weak_field-weak_member_string_copy-default.txt-Results] >> test.py::test[table_range-range_over_filter_udf--Results] [SKIPPED] >> test.py::test[tpch-q5-default.txt-Debug] >> test.py::test[sampling-insert--Debug] [GOOD] >> test.py::test[sampling-insert--Plan] [GOOD] >> test.py::test[sampling-insert--Results] >> TBtreeIndexTPartLarge::Group [GOOD] >> TBtreeIndexTPartLarge::History >> test.py::test[window-current/session_incompat_sort--Debug] [GOOD] >> test.py::test[window-current/session_incompat_sort--ForceBlocks] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Debug] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Plan] >> test.py::test[pg-join_using_tables2-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Plan] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Debug] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Analyze] [GOOD] >> test.py::test[select-select_all-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Analyze] [GOOD] >> test.py::test[select-select_all-default.txt-Plan] [GOOD] >> test.py::test[select-select_all-default.txt-Results] >> test.py::test[pg-tpcds-q16-default.txt-Debug] >> test.py::test[expr-to_sorted_dict_tuple_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_sorted_dict_tuple_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_sorted_dict_tuple_key-default.txt-Results] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Debug] >> test.py::test[weak_field-weak_member_string_copy-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Results] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Analyze] >> test.py::test[pg-nulls-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-nulls-default.txt-Plan] [GOOD] >> test.py::test[pg-nulls-default.txt-Results] >> test.py::test[key_filter-string_with-default.txt-Results] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-Debug] [GOOD] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Debug] [SKIPPED] >> test.py::test[window-distinct_over_window_struct-default.txt-Plan] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Plan] [SKIPPED] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Results] >> test.py::test[binding-named_callable-default.txt-Analyze] [GOOD] >> test.py::test[binding-named_callable-default.txt-Debug] >> test.py::test[agg_phases-min_by_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-min_by_opt-default.txt-ForceBlocks] |73.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[expr-struct_gather_spread-default.txt-Analyze] [GOOD] >> test.py::test[pg-aggregate_combine--Debug] [GOOD] >> test.py::test[pg-aggregate_combine--Plan] [GOOD] >> test.py::test[pg-aggregate_combine--Results] >> test.py::test[datetime-date_tz_impossible_cast--Results] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt-Debug] >> test.py::test[pg-join_using_case_insensetive1-default.txt-Debug] [GOOD] >> test.py::test[pg-join_using_case_insensetive1-default.txt-Plan] [GOOD] >> test.py::test[pg-join_using_case_insensetive1-default.txt-Results] >> test.py::test[blocks-distinct_pure_keys--Results] [GOOD] >> test.py::test[blocks-group_by_complex_key--Debug] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_session_aliases--Debug] >> test.py::test[pg-select_join_full_const-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_full_const-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_full_const-default.txt-Results] >> test.py::test[join-mapjoin_early_rewrite_star--Debug] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star--Plan] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star--Results] >> test.py::test[sampling-insert--Results] [GOOD] >> test.py::test[schema-def_values--Debug] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Debug] [GOOD] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Plan] [GOOD] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] >> test.py::test[in-in_sorted_by_tuple--Results] [GOOD] >> test.py::test[in-in_types_cast-default.txt-Debug] >> test.py::test[join-premap_merge_extrasort2--Results] [GOOD] >> test.py::test[join-star_join_multi-off-Analyze] |73.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[select-corr_name_in_select_seq-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all-default.txt-Results] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Debug] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Debug] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Plan] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Plan] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Results] >> test.py::test[window-full/session_compact--Results] [GOOD] >> test.py::test[window-generic/session_aliases--Analyze] >> test.py::test[window-current/aggregations--Results] [GOOD] >> test.py::test[window-current/ansi_current_with_win--Debug] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_mapjoin-default.txt-Debug] >> test.py::test[sampling-subquery_mapjoin-default.txt-Debug] [SKIPPED] >> test.py::test[sampling-subquery_mapjoin-default.txt-Plan] [SKIPPED] >> test.py::test[sampling-subquery_mapjoin-default.txt-Results] >> test.py::test[aggr_factory-bottom_by-default.txt-Results] [GOOD] >> test.py::test[binding-named_callable-default.txt-Debug] [GOOD] >> test.py::test[binding-named_callable-default.txt-ForceBlocks] >> test.py::test[sampling-subquery_mapjoin-default.txt-Results] [SKIPPED] >> test.py::test[sampling-subquery_sort-default.txt-Debug] >> test.py::test[pg-join_using_case_insensetive1-default.txt-Results] [GOOD] >> test.py::test[pg-nothing-default.txt-Debug] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Results] [GOOD] >> test.py::test[library-library_alias--Debug] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-container-default.txt-Debug] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Debug] >> test.py::test[pg_catalog-pg_operator-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_tablespace-default.txt-Debug] >> test.py::test[agg_phases_agg_apply-count_null-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-min-default.txt-Debug] >> test.py::test[expr-to_sorted_dict_tuple_key-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Debug] [GOOD] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Debug] [GOOD] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Plan] >> test.py::test[pg-tpcds-q16-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-ForceBlocks] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Plan] [GOOD] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Results] >> test.py::test[pg-table_func-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-table_func-default.txt-Plan] [GOOD] >> test.py::test[pg-table_func-default.txt-Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] [GOOD] >> test.py::test[agg_phases-percentile_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-percentile_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-percentile_opt-default.txt-Results] >> test.py::test[tpch-q1-default.txt-Debug] [GOOD] >> test.py::test[window-current/session_incompat_sort--ForceBlocks] [GOOD] >> test.py::test[window-current/session_incompat_sort--Plan] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] [GOOD] >> test.py::test[window-current/session_incompat_sort--Plan] [GOOD] >> test.py::test[window-current/session_incompat_sort--Results] >> test.py::test[window-empty/aggregations--Debug] >> test.py::test[pg-nulls-default.txt-Results] [GOOD] >> test.py::test[pg-order_by_agg_extra_for_keys-default.txt-Analyze] >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 [GOOD] >> test.py::test[pg-aggregate_combine--Results] [GOOD] >> test.py::test[pg-cbo_pragma2-default.txt-Debug] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Debug] [GOOD] >> test.py::test[insert-multiappend_sorted-default.txt-Analyze] [GOOD] >> test.py::test[insert-multiappend_sorted-default.txt-Debug] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-ForceBlocks] >> test.py::test[epochs-use_sorted_by_complex_type--Debug] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-median-default.txt-Debug] >> test.py::test[join-anyjoin_merge_nodup--Results] [GOOD] >> test.py::test[join-bush_dis_in_in-off-Debug] >> test.py::test[epochs-use_sorted_by_complex_type--Plan] [GOOD] >> test.py::test[epochs-use_sorted_by_complex_type--Results] >> test.py::test[join-bush_dis_in_in-off-Debug] [SKIPPED] >> test.py::test[join-bush_dis_in_in-off-Plan] [SKIPPED] >> test.py::test[join-bush_dis_in_in-off-Results] [SKIPPED] >> test.py::test[join-convert_check_key_mem--Debug] >> test.py::test[join-mapjoin_early_rewrite_star--Results] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys-off-Debug] |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys-off-Debug] [SKIPPED] >> test.py::test[join-mapjoin_partial_uniq_keys-off-Plan] [SKIPPED] >> test.py::test[join-mapjoin_partial_uniq_keys-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_no_sorted-off-Debug] [SKIPPED] >> test.py::test[join-mergejoin_force_no_sorted-off-Plan] [SKIPPED] >> test.py::test[join-mergejoin_force_no_sorted-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort-off-Debug] >> test.py::test[select-select_all_ordered-default.txt-Debug] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Plan] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Results] >> test.py::test[join-mergejoin_narrows_output_sort-off-Debug] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort-off-Plan] [SKIPPED] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Results] [GOOD] >> test.py::test[pg-order_by_agg_no_extra_for_aggs-default.txt-Debug] >> test.py::test[join-mergejoin_narrows_output_sort-off-Results] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn--Debug] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn--Plan] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn--Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_over_inner-off-Debug] [SKIPPED] >> test.py::test[join-nopushdown_filter_over_inner-off-Plan] [SKIPPED] >> test.py::test[join-nopushdown_filter_over_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner_both_sides--Debug] |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk2/unittest >> test.py::test[in-in_types_cast-default.txt-Debug] [GOOD] >> test.py::test[window-generic/session_aliases--Analyze] [GOOD] >> test.py::test[window-generic/session_aliases--Debug] >> test.py::test[tpch-q5-default.txt-Debug] [GOOD] >> test.py::test[in-in_types_cast-default.txt-Plan] [GOOD] >> test.py::test[in-in_types_cast-default.txt-Results] >> test.py::test[aggregate-group_by_session_aliases--Debug] [GOOD] >> test.py::test[aggregate-group_by_session_aliases--Plan] [GOOD] >> test.py::test[aggregate-group_by_session_aliases--Results] >> test.py::test[tpch-q5-default.txt-Plan] [GOOD] >> test.py::test[tpch-q5-default.txt-Results] >> test.py::test[distinct-distinct_columns-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt-Results] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] [GOOD] >> test.py::test[order_by-assume_cut_prefix--Debug] >> test.py::test[pg-nothing-default.txt-Debug] [GOOD] >> test.py::test[pg-nothing-default.txt-Plan] >> test.py::test[schema-def_values--Debug] [GOOD] >> test.py::test[schema-def_values--Plan] [GOOD] >> test.py::test[schema-def_values--Results] >> test.py::test[pg-nothing-default.txt-Plan] [GOOD] >> test.py::test[pg-nothing-default.txt-Results] >> test.py::test[blocks-group_by_complex_key--Debug] [GOOD] >> test.py::test[blocks-group_by_complex_key--Plan] >> test.py::test[blocks-group_by_complex_key--Plan] [GOOD] >> test.py::test[blocks-group_by_complex_key--Results] >> test.py::test[binding-named_callable-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-named_callable-default.txt-Plan] [GOOD] >> test.py::test[binding-named_callable-default.txt-Results] >> test.py::test[pg_catalog-pg_tablespace-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_tablespace-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_tablespace-default.txt-Results] >> test.py::test[library-library_alias--Debug] [GOOD] >> test.py::test[library-library_alias--Plan] [GOOD] >> test.py::test[library-library_alias--Results] >> test.py::test[agg_phases_agg_apply-min-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-min-default.txt-Plan] >> test.py::test[pg-order_by_agg_extra_for_keys-default.txt-Analyze] [GOOD] >> test.py::test[pg-order_by_agg_extra_for_keys-default.txt-Debug] >> test.py::test[pg-cbo_pragma2-default.txt-Debug] [GOOD] >> test.py::test[pg-cbo_pragma2-default.txt-Plan] >> test.py::test[agg_phases_agg_apply-min-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-min-default.txt-Results] >> test.py::test[pg-cbo_pragma2-default.txt-Plan] [GOOD] >> test.py::test[pg-cbo_pragma2-default.txt-Results] >> test.py::test[aggr_factory-container-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-container-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-container-default.txt-Results] >> test.py::test[join-star_join_multi-off-Analyze] [GOOD] >> test.py::test[join-star_join_multi-off-Debug] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-flatten_list_nested_lambda--Debug] [SKIPPED] >> test.py::test[lineage-flatten_list_nested_lambda--Plan] >> test.py::test[lineage-flatten_list_nested_lambda--Plan] [SKIPPED] >> test.py::test[lineage-flatten_list_nested_lambda--Results] [SKIPPED] >> test.py::test[lineage-flatten_where-default.txt-Debug] >> test.py::test[sampling-subquery_sort-default.txt-Debug] [GOOD] >> test.py::test[sampling-subquery_sort-default.txt-Plan] [GOOD] >> test.py::test[lineage-flatten_where-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-flatten_where-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-flatten_where-default.txt-Results] [SKIPPED] >> test.py::test[lineage-process-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-process-default.txt-Plan] >> test.py::test[sampling-subquery_sort-default.txt-Results] >> test.py::test[lineage-process-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-process-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_field_rename-default.txt-Debug] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Debug] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-ForceBlocks] >> test.py::test[lineage-select_field_rename-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-select_field_rename-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-select_field_rename-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-Debug] >> test.py::test[lineage-select_group_by_key-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_session-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-window_session-default.txt-Plan] [SKIPPED] >> test.py::test[select-select_all_ordered-default.txt-Results] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Debug] >> test.py::test[join-convert_check_key_mem--Debug] [GOOD] >> test.py::test[join-convert_check_key_mem--Plan] [GOOD] >> test.py::test[join-convert_check_key_mem--Results] >> test.py::test[lineage-window_session-default.txt-Results] [SKIPPED] >> test.py::test[match_recognize-after_match_skip_past_last_row-default.txt-Debug] >> test.py::test[pg-tpcds-q16-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Plan] >> test.py::test[pg-tpcds-q16-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Results] >> TRegistryTests::TestLock [GOOD] >> TRegistryTests::TestClasses [GOOD] >> TRegistryTests::TestDisableEnable [GOOD] >> test.py::test[pg-select_join_full_const-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_inner_equi-default.txt-Debug] >> test.py::test[window-current/session_incompat_sort--Results] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-Analyze] >> test.py::test[pg-order_by_agg_no_extra_for_aggs-default.txt-Debug] [GOOD] >> test.py::test[pg-order_by_agg_no_extra_for_aggs-default.txt-Plan] [GOOD] >> test.py::test[pg-order_by_agg_no_extra_for_aggs-default.txt-Results] >> test.py::test[pg-nothing-default.txt-Results] [GOOD] >> test.py::test[pg-numeric_to_pg-default.txt-Debug] >> test.py::test[pg_catalog-pg_tablespace-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_type_syntax_pg-default.txt-Debug] |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> TRegistryTests::TestDisableEnable [GOOD] >> test.py::test[binding-named_callable-default.txt-Results] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-Analyze] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Plan] >> test.py::test[library-library_alias--Results] [GOOD] >> test.py::test[library-package--Debug] [SKIPPED] >> test.py::test[library-package--Plan] [SKIPPED] >> test.py::test[library-package--Results] [SKIPPED] >> test.py::test[like-like_multiline-default.txt-Debug] >> test.py::test[pg-cbo_pragma2-default.txt-Results] [GOOD] >> test.py::test[pg-is_predicate-default.txt-Debug] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Plan] [GOOD] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Results] >> test.py::test[pg-table_func-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q20-default.txt-Analyze] >> test.py::test[epochs-use_sorted_by_complex_type--Results] [GOOD] >> test.py::test[expr-as_dict_dict_key-default.txt-Debug] >> test.py::test[agg_phases-min_by_opt-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases-min_by_opt-default.txt-Plan] [GOOD] >> test.py::test[in-in_types_cast-default.txt-Results] [GOOD] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-Debug] >> test.py::test[agg_phases-min_by_opt-default.txt-Results] >> test.py::test[distinct-distinct_columns-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Debug] >> test.py::test[join-join_without_column--Debug] >> test.py::test[schema-def_values--Results] [GOOD] >> test.py::test[schema-insert-row_spec-Debug] >> test.py::test[pg-tpch-q02-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Debug] >> test.py::test[window-current/ansi_current_with_win--Debug] [GOOD] >> test.py::test[window-current/ansi_current_with_win--Plan] [GOOD] >> test.py::test[window-current/ansi_current_with_win--Results] >> test.py::test[aggregate-group_by_session_aliases--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct--Debug] |73.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Debug] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Plan] >> test.py::test[order_by-assume_cut_prefix--Debug] [GOOD] >> test.py::test[order_by-assume_cut_prefix--Plan] [GOOD] >> test.py::test[order_by-assume_cut_prefix--Results] >> test.py::test[blocks-group_by_complex_key--Results] [GOOD] >> test.py::test[blocks-if--Debug] |73.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[optimizers-yql-12620_stage_multiuse--Debug] [GOOD] >> test.py::test[pg-order_by_agg_no_extra_for_aggs-default.txt-Results] [GOOD] >> test.py::test[pg-order_by_input_columns-default.txt-Debug] >> test.py::test[join-premap_common_inner_both_sides--Plan] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Results] |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |73.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage >> test.py::test[sampling-subquery_sort-default.txt-Results] [GOOD] >> test.py::test[schema-insert-read_schema-Debug] >> test.py::test[tpch-q5-default.txt-Results] [GOOD] >> test.py::test[tpch-q8-default.txt-Debug] >> test.py::test[pg-order_by_agg_extra_for_keys-default.txt-Debug] [GOOD] >> test.py::test[pg-order_by_agg_extra_for_keys-default.txt-ForceBlocks] |73.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[tpch-q1-default.txt-Debug] [GOOD] |73.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[expr-to_sorted_dict_tuple_key-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-median-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-median-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-median-default.txt-Results] >> test.py::test[window-empty/aggregations--Debug] [GOOD] >> test.py::test[window-empty/aggregations--Plan] [GOOD] >> test.py::test[window-empty/aggregations--Results] >> test.py::test[pg-tpcds-q16-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Analyze] >> test.py::test[window-win_by_all_avg_interval-default.txt-Analyze] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-Debug] >> test.py::test[aggr_factory-container-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Debug] >> test.py::test[pg-select_join_inner_equi-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_inner_equi-default.txt-Plan] >> test.py::test[window-generic/session_aliases--Debug] [GOOD] >> test.py::test[window-generic/session_aliases--ForceBlocks] >> test.py::test[match_recognize-after_match_skip_past_last_row-default.txt-Debug] [GOOD] >> test.py::test[match_recognize-after_match_skip_past_last_row-default.txt-Plan] [GOOD] >> test.py::test[match_recognize-after_match_skip_past_last_row-default.txt-Results] >> test.py::test[pg-numeric_to_pg-default.txt-Debug] [GOOD] >> test.py::test[pg-numeric_to_pg-default.txt-Plan] >> test.py::test[pg-select_join_inner_equi-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_inner_equi-default.txt-Results] >> test.py::test[pg-numeric_to_pg-default.txt-Plan] [GOOD] >> test.py::test[pg-numeric_to_pg-default.txt-Results] >> test.py::test[pg-is_predicate-default.txt-Debug] [GOOD] >> test.py::test[pg-is_predicate-default.txt-Plan] [GOOD] >> test.py::test[pg-is_predicate-default.txt-Results] >> test.py::test[pg_catalog-pg_type_syntax_pg-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_type_syntax_pg-default.txt-Plan] >> test.py::test[pg_catalog-pg_type_syntax_pg-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_type_syntax_pg-default.txt-Results] >> test.py::test[like-like_multiline-default.txt-Debug] [GOOD] >> test.py::test[like-like_multiline-default.txt-Plan] [GOOD] >> test.py::test[like-like_multiline-default.txt-Results] >> test.py::test[blocks-coalesce_complex-default.txt-Analyze] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-Debug] >> test.py::test[order_by-assume_cut_prefix--Results] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--Debug] >> test.py::test[pg-tpcds-q20-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q20-default.txt-Debug] >> test.py::test[join-convert_check_key_mem--Results] [GOOD] >> test.py::test[join-convert_check_key_mem-off-Debug] [SKIPPED] >> test.py::test[join-convert_check_key_mem-off-Plan] [SKIPPED] >> test.py::test[join-convert_check_key_mem-off-Results] [SKIPPED] >> test.py::test[join-equi_join_two_mult_keys--Debug] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-Debug] [GOOD] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-Plan] [GOOD] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-Results] >> test.py::test[expr-as_dict_dict_key-default.txt-Debug] [GOOD] >> test.py::test[expr-as_dict_dict_key-default.txt-Plan] [GOOD] >> test.py::test[expr-as_dict_dict_key-default.txt-Results] >> test.py::test[agg_phases_agg_apply-min-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-Debug] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Plan] [GOOD] |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |73.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> test.py::test[schema-insert-row_spec-Debug] [GOOD] >> test.py::test[schema-insert-row_spec-Plan] [GOOD] >> test.py::test[schema-insert-row_spec-Results] >> test.py::test[select-tablename_with_table_row-default.txt-Debug] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Plan] >> test.py::test[select-tablename_with_table_row-default.txt-Plan] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Results] >> test.py::test[pg-numeric_to_pg-default.txt-Results] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_keys-default.txt-Debug] >> test.py::test[key_filter-calc_dependent_with_tmp-default.txt-Results] [GOOD] >> test.py::test[agg_phases-percentile_opt-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-list--Debug] |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> test.py::test[key_filter-complex-default.txt-Analyze] >> TPDiskRaces::DecommitWithInflightMock [GOOD] >> TPDiskRaces::KillOwnerWhileDecommitting >> test.py::test[pg-order_by_input_columns-default.txt-Debug] [GOOD] >> test.py::test[pg-order_by_input_columns-default.txt-Plan] [GOOD] >> test.py::test[pg-order_by_input_columns-default.txt-Results] >> test.py::test[pg_catalog-pg_type_syntax_pg-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-table_constraints-default.txt-Debug] >> test.py::test[join-star_join_multi-off-Debug] [GOOD] >> test.py::test[join-star_join_multi-off-ForceBlocks] [SKIPPED] >> test.py::test[join-star_join_multi-off-Plan] |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> test.py::test[join-star_join_multi-off-Plan] [GOOD] >> test.py::test[join-star_join_multi-off-Results] [GOOD] >> test.py::test[like-like_multiline-default.txt-Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Results] [GOOD] >> test.py::test[expr-as_dict_dict_key-default.txt-Results] [GOOD] >> test.py::test[expr-as_table-default.txt-Debug] >> test.py::test[schema-insert-read_schema-Debug] [GOOD] >> test.py::test[schema-insert-read_schema-Plan] [GOOD] >> test.py::test[schema-insert-read_schema-Results] >> test.py::test[in-in_with_literal_list_of_structs-default.txt-Results] [GOOD] >> test.py::test[in-in_with_tuple_simple-default.txt-Debug] >> test.py::test[pg-select_join_inner_equi-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_inner_equi_and_one-default.txt-Debug] >> test.py::test[match_recognize-after_match_skip_past_last_row-default.txt-Results] [GOOD] >> test.py::test[optimizers-flatmap_with_non_struct_out--Debug] [SKIPPED] >> test.py::test[optimizers-flatmap_with_non_struct_out--Plan] [SKIPPED] >> test.py::test[optimizers-flatmap_with_non_struct_out--Results] [SKIPPED] >> test.py::test[join-premap_common_multiparents_no_premap--Debug] >> TestProgram::YqlKernelContains [GOOD] >> test.py::test[pg-is_predicate-default.txt-Results] [GOOD] >> test.py::test[pg-join_tree_order-default.txt-Debug] |73.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> test.py::test[action-action_eval_cluster_use--Debug] >> test.py::test[pg-tpcds-q30-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Debug] |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |73.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table >> test.py::test[pg-tpcds-q20-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q20-default.txt-ForceBlocks] >> test.py::test[schema-insert-row_spec-Results] [GOOD] >> test.py::test[pg-order_by_agg_extra_for_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-insert_sorted-schema-Debug] >> test.py::test[pg-order_by_agg_extra_for_keys-default.txt-Plan] |73.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |73.9%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Function { Arguments { Name: "string" } Arguments { Name: "substring" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{arguments=[G:string;G:substring;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; string: [ 4C6F72656D20697073756D20C020646F6C6F720C2073697420616D65742E, 4C6F72656D20697073756D20646F6C6F722073697420076D65742E, 4C6F72656D20697073756D20646F6C6F722073697420616D65742E, 4C6F72656D20697073756D20646F6C6F722073697420076D65742E ] substring: [ 646F6C6F72, 616D65742E, 076D65742E, 076D65742E ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; >> test.py::test[pg-order_by_agg_extra_for_keys-default.txt-Plan] [GOOD] >> test.py::test[pg-order_by_agg_extra_for_keys-default.txt-Results] >> test.py::test[pg-order_by_input_columns-default.txt-Results] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--Debug] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--Plan] |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut >> test.py::test[pg-pg_types_aggregate1-default.txt-Debug] >> test.py::test[order_by-literal_empty_list_sort--Plan] [GOOD] >> test.py::test[order_by-literal_empty_list_sort--Results] >> test.py::test[join-join_without_column--Debug] [GOOD] >> test.py::test[join-join_without_column--Plan] [GOOD] >> test.py::test[key_filter-complex-default.txt-Analyze] [GOOD] >> test.py::test[blocks-if--Debug] [GOOD] >> test.py::test[blocks-if--Plan] [GOOD] >> test.py::test[blocks-if--Results] >> test.py::test[pg-order_by_agg_input_columns_keys-default.txt-Debug] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_keys-default.txt-Plan] >> test.py::test[join-join_without_column--Results] >> test.py::test[window-current/ansi_current_with_win--Results] [GOOD] >> test.py::test[window-current/session_incompat_sort--Debug] >> test.py::test[key_filter-complex-default.txt-Debug] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] [GOOD] >> test.py::test[window-current/aggregations--Analyze] >> test.py::test[select-tablename_with_table_row-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Debug] >> test.py::test[join-equi_join_two_mult_keys--Debug] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_keys-default.txt-Plan] [GOOD] >> test.py::test[join-equi_join_two_mult_keys--Plan] [GOOD] >> test.py::test[join-equi_join_two_mult_keys--Results] >> test.py::test[pg-order_by_agg_input_columns_keys-default.txt-Results] >> test.py::test[pg_catalog-table_constraints-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-table_constraints-default.txt-Plan] >> test.py::test[expr-as_table-default.txt-Debug] [GOOD] >> test.py::test[expr-as_table-default.txt-Plan] >> test.py::test[pg_catalog-table_constraints-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-table_constraints-default.txt-Results] >> test.py::test[window-win_by_all_avg_interval-default.txt-Debug] [GOOD] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest |73.9%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> test.py::test[window-win_by_all_avg_interval-default.txt-ForceBlocks] >> test.py::test[insert-multiappend_sorted-default.txt-Debug] [GOOD] >> test.py::test[insert-multiappend_sorted-default.txt-ForceBlocks] >> TErasureTypeTest::TestAllSpecies1of2 [GOOD] >> TErasureTypeTest::TestAllSpecies2of2 >> test.py::test[expr-as_table-default.txt-Plan] [GOOD] >> test.py::test[expr-as_table-default.txt-Results] >> test.py::test[schema-insert-read_schema-Results] [GOOD] >> test.py::test[schema-other_job--Debug] >> OperationMapping::IndexBuildCanceled [GOOD] >> test.py::test[schema-other_job--Debug] [SKIPPED] >> test.py::test[schema-other_job--Plan] [SKIPPED] >> test.py::test[schema-other_job--Results] [SKIPPED] >> test.py::test[schema-patchtype--Debug] >> test.py::test[pg-select_join_inner_equi_and_one-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_inner_equi_and_one-default.txt-Plan] [GOOD] >> test.py::test[in-in_with_tuple_simple-default.txt-Debug] [GOOD] >> test.py::test[in-in_with_tuple_simple-default.txt-Plan] >> test.py::test[pg-select_join_inner_equi_and_one-default.txt-Results] >> test.py::test[in-in_with_tuple_simple-default.txt-Plan] [GOOD] >> test.py::test[in-in_with_tuple_simple-default.txt-Results] >> test.py::test[pg-join_tree_order-default.txt-Debug] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-Debug] [GOOD] >> test.py::test[pg-join_tree_order-default.txt-Plan] [GOOD] >> test.py::test[pg-join_tree_order-default.txt-Results] >> test.py::test[blocks-coalesce_complex-default.txt-ForceBlocks] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> test.py::test[pg-order_by_agg_extra_for_keys-default.txt-Results] [GOOD] >> test.py::test[window-empty/aggregations--Results] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Debug] >> test.py::test[order_by-literal_empty_list_sort--Results] [GOOD] >> test.py::test[order_by-literal_single_item_sort--Debug] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Analyze] >> test.py::test[window-generic/session_aliases--ForceBlocks] [GOOD] >> test.py::test[window-generic/session_aliases--Plan] [GOOD] >> test.py::test[window-generic/session_aliases--Results] >> OperationMapping::IndexBuildSuccess [GOOD] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> test.py::test[aggr_factory-corellation-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Plan] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Debug] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Plan] >> test.py::test[aggr_factory-corellation-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-corellation-default.txt-Results] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Plan] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] |73.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-table_constraints-default.txt-Results] [GOOD] >> test.py::test[pg_duplicated-ambigous_order_by_item_expression_from_projection--Debug] [SKIPPED] >> test.py::test[pg_duplicated-ambigous_order_by_item_expression_from_projection--Plan] [SKIPPED] >> test.py::test[pg_duplicated-ambigous_order_by_item_expression_from_projection--Results] >> test.py::test[expr-as_table-default.txt-Results] [GOOD] >> test.py::test[expr-as_table_bad_row_type_fail--Debug] >> test.py::test[agg_phases-min_by_opt-default.txt-Results] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_keys-default.txt-Results] [GOOD] >> test.py::test[pg-order_by_shadow_input_columns-default.txt-Debug] >> test.py::test[pg-tpcds-q30-default.txt-ForceBlocks] >> test.py::test[schema-insert_sorted-schema-Debug] [GOOD] >> test.py::test[schema-insert_sorted-schema-Plan] >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> test.py::test[expr-as_table_bad_row_type_fail--Debug] [SKIPPED] >> test.py::test[agg_phases-min_null-default.txt-Analyze] >> test.py::test[pg-tpcds-q20-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-as_table_bad_row_type_fail--Plan] [SKIPPED] >> test.py::test[expr-as_table_bad_row_type_fail--Results] >> test.py::test[schema-insert_sorted-schema-Plan] [GOOD] >> test.py::test[schema-insert_sorted-schema-Results] >> test.py::test[blocks-if--Results] [GOOD] >> test.py::test[blocks-json_document_type--Debug] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] >> test.py::test[pg-tpcds-q20-default.txt-Plan] [GOOD] >> test.py::test[key_filter-complex-default.txt-Debug] [GOOD] >> test.py::test[key_filter-complex-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q20-default.txt-Results] >> test.py::test[join-join_without_column--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Debug] >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Results] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] |73.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[join-star_join_multi-off-Results] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-Plan] >> test.py::test[action-action_eval_cluster_use--Debug] [GOOD] >> test.py::test[action-action_eval_cluster_use--Plan] >> test.py::test[pg-join_tree_order-default.txt-Results] [GOOD] >> test.py::test[pg-join_using_multiple2--Debug] >> test.py::test[in-in_with_tuple_simple-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-bitxor-default.txt-Results] >> test.py::test[schema-patchtype--Debug] [GOOD] >> test.py::test[schema-patchtype--Plan] [GOOD] >> test.py::test[schema-patchtype--Results] >> test.py::test[aggr_factory-median-default.txt-Results] [GOOD] >> test.py::test[window-current/aggregations--Analyze] [GOOD] >> test.py::test[window-current/aggregations--Debug] >> test.py::test[aggr_factory-mode-default.txt-Debug] >> test.py::test[action-action_eval_cluster_use--Plan] [GOOD] >> test.py::test[action-action_eval_cluster_use--Results] >> test.py::test[pg-select_join_inner_equi_and_one-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_session_compact--Debug] >> test.py::test[pg-select_join_left-default.txt-Debug] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] >> test.py::test[join-equi_join_two_mult_keys--Results] [GOOD] >> test.py::test[join-grace_join1-map-Debug] [SKIPPED] >> test.py::test[join-grace_join1-map-Plan] >> test.py::test[join-grace_join1-map-Plan] [SKIPPED] >> test.py::test[join-grace_join1-map-Results] >> test.py::test[window-win_by_all_avg_interval-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-Plan] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] >> test.py::test[join-grace_join1-map-Results] [SKIPPED] >> test.py::test[tpch-q8-default.txt-Debug] [GOOD] >> test.py::test[tpch-q8-default.txt-Plan] [GOOD] >> test.py::test[tpch-q8-default.txt-Results] >> test.py::test[order_by-literal_single_item_sort--Debug] [GOOD] >> test.py::test[order_by-literal_single_item_sort--Plan] [GOOD] >> test.py::test[order_by-literal_single_item_sort--Results] >> test.py::test[agg_phases-min_null-default.txt-Analyze] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap--Debug] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap--Plan] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap--Results] >> test.py::test[agg_phases-min_null-default.txt-Debug] >> test.py::test[pg-pg_types_aggregate1-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_types_aggregate1-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_types_aggregate1-default.txt-Results] |73.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[like-like_multiline-default.txt-Results] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Analyze] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Debug] >> test.py::test[pg-order_by_shadow_input_columns-default.txt-Debug] [GOOD] >> test.py::test[pg-order_by_shadow_input_columns-default.txt-Plan] [GOOD] >> test.py::test[pg-order_by_shadow_input_columns-default.txt-Results] >> test.py::test[expr-as_table_bad_row_type_fail--Results] [GOOD] >> test.py::test[expr-cast_longint-default.txt-Debug] >> test.py::test[pg_duplicated-ambigous_order_by_item_expression_from_projection--Results] [GOOD] >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Debug] >> test.py::test[aggregate-group_by_session_distinct--Debug] [GOOD] >> test.py::test[aggregate-group_by_session_distinct--Plan] [GOOD] >> test.py::test[aggregate-group_by_session_distinct--Results] |73.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[optimizers-flatmap_with_non_struct_out--Results] [SKIPPED] >> test.py::test[pg-tpcds-q30-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q30-default.txt-Results] >> KqpScan::ScanDuringSplit10 >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests2Inflight2BlobSize1000 >> test.py::test[schema-insert_sorted-schema-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-Debug] >> test.py::test[aggr_factory-list--Debug] [GOOD] >> test.py::test[aggr_factory-list--Plan] >> test.py::test[aggr_factory-corellation-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-Debug] >> test.py::test[window-generic/session_aliases--Results] [GOOD] >> test.py::test[window-win_by_simple-default.txt-Analyze] >> test.py::test[pg-tpcds-q20-default.txt-Results] [GOOD] >> test.py::test[pg-wide_top_sort--Analyze] >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> test.py::test[aggr_factory-list--Plan] [GOOD] >> test.py::test[aggr_factory-list--Results] >> CompatibilityInfo::VDiskCompatible >> CompatibilityInfo::VDiskCompatible [GOOD] >> CompatibilityInfo::VDiskIncompatible >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] [GOOD] >> test.py::test[epochs-read_modified--Debug] >> test.py::test[pg-select_join_left-default.txt-Debug] [GOOD] >> test.py::test[key_filter-complex-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-complex-default.txt-Plan] [GOOD] >> test.py::test[key_filter-complex-default.txt-Results] >> test.py::test[blocks-json_document_type--Debug] [GOOD] >> test.py::test[pg-select_join_left-default.txt-Plan] [GOOD] >> CompatibilityInfo::VDiskIncompatible [GOOD] >> CompatibilityInfo::VDiskIncompatibleWithDefault >> test.py::test[action-action_eval_cluster_use--Results] [GOOD] >> test.py::test[pg-select_join_left-default.txt-Results] >> CompatibilityInfo::VDiskIncompatibleWithDefault [GOOD] >> CompatibilityInfo::VDiskSuppressCompatibilityCheck [GOOD] >> CompatibilityInfo::VDiskMigration [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests1Inflight1BlobSize1000 >> test.py::test[blocks-json_document_type--Plan] >> test.py::test[action-eval_atom_wrong_type_param--Debug] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_param--Plan] >> test.py::test[blocks-coalesce_complex-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-Plan] [GOOD] >> test.py::test[blocks-coalesce_complex-default.txt-Results] >> KqpScan::RemoteShardScan >> test.py::test[action-eval_atom_wrong_type_param--Plan] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_param--Results] [SKIPPED] >> test.py::test[action-eval_on_modif_table_fail--Debug] [SKIPPED] >> test.py::test[action-eval_on_modif_table_fail--Plan] [SKIPPED] >> test.py::test[action-eval_on_modif_table_fail--Results] >> test.py::test[blocks-json_document_type--Plan] [GOOD] >> test.py::test[blocks-json_document_type--Results] >> test.py::test[schema-patchtype--Results] [GOOD] >> test.py::test[schema-remap_desc--Debug] >> test.py::test[pg-order_by_shadow_input_columns-default.txt-Results] [GOOD] >> test.py::test[pg-pg_corr_sort_limit-default.txt-Debug] >> test.py::test[window-current/session_incompat_sort--Debug] [GOOD] >> test.py::test[window-current/session_incompat_sort--Plan] >> CostMetricsPatchMirror3dc::TestPatchMirror3dcRequests2Inflight2BlobSize1000 [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests1Inflight1BlobSize1000 >> test.py::test[window-current/session_incompat_sort--Plan] [GOOD] >> test.py::test[window-current/session_incompat_sort--Results] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] [GOOD] >> test.py::test[order_by-literal_single_item_sort--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Debug] >> test.py::test[order_by-native_desc_sort--Debug] [SKIPPED] >> test.py::test[order_by-native_desc_sort--Plan] [SKIPPED] >> test.py::test[order_by-native_desc_sort--Results] [SKIPPED] >> test.py::test[order_by-order_by_dynum-default.txt-Debug] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Debug] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-ForceBlocks] >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] [GOOD] >> test.py::test[window-win_func_first_last--Analyze] >> test.py::test[expr-cast_longint-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_longint-default.txt-Plan] [GOOD] >> test.py::test[expr-cast_longint-default.txt-Results] >> test.py::test[schema-select_all-row_spec_diff_sort-Debug] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-Plan] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort-Results] >> test.py::test[pg-join_using_multiple2--Debug] [GOOD] >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Debug] [GOOD] >> test.py::test[pg-join_using_multiple2--Plan] >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Plan] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Debug] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Plan] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> test.py::test[pg-join_using_multiple2--Plan] [GOOD] >> test.py::test[pg-join_using_multiple2--Results] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests1Inflight1BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10Inflight1BlobSize1000 >> test.py::test[window-empty/aggregations_leadlag--Plan] [GOOD] >> test.py::test[window-empty/aggregations_leadlag--Results] >> KqpScan::ScanRetryRead >> test.py::test[window-current/aggregations--Debug] [GOOD] >> test.py::test[window-current/aggregations--ForceBlocks] >> test.py::test[aggr_factory-bitxor-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-def_value_full_table-default.txt-Debug] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |73.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests1Inflight1BlobSize1000 [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10Inflight1BlobSize1000 >> test.py::test[pg-wide_top_sort--Analyze] [GOOD] >> test.py::test[pg-wide_top_sort--Debug] >> test.py::test[pg-pg_types_aggregate1-default.txt-Results] [GOOD] >> test.py::test[pg-pg_types_array_literal-default.txt-Debug] >> test.py::test[aggregate-group_by_session_compact--Debug] [GOOD] >> test.py::test[aggregate-group_by_session_compact--Plan] [GOOD] >> test.py::test[aggregate-group_by_session_compact--Results] >> test.py::test[key_filter-complex-default.txt-Results] [GOOD] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Analyze] [SKIPPED] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Debug] [SKIPPED] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--ForceBlocks] [SKIPPED] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Plan] [SKIPPED] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Results] >> test.py::test[pg-tpcds-q30-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_left-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_left_const-default.txt-Debug] >> test.py::test[window-win_by_simple-default.txt-Analyze] [GOOD] >> test.py::test[window-win_by_simple-default.txt-Debug] >> test.py::test[pg-tpcds-q41-default.txt-Analyze] >> test.py::test[window-win_func_first_last--Analyze] [GOOD] >> test.py::test[window-win_func_first_last--Debug] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10Inflight1BlobSize1000 [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10000Inflight1BlobSize1000 |73.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[in-in_with_tuple_simple-default.txt-Results] [GOOD] >> test.py::test[agg_phases-min_null-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-min_null-default.txt-ForceBlocks] >> test.py::test[expr-cast_longint-default.txt-Results] [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10Inflight1BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10000Inflight1BlobSize1000 >> TBtreeIndexTPartLarge::History [GOOD] >> TFlatTableLongTxLarge::LargeDeltaChain >> TKeyValueTest::TestWriteReadPatchRead >> test.py::test[join-join_without_correlation_and_dict_access--Debug] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Plan] >> test.py::test[pg-pg_corr_sort_limit-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_corr_sort_limit-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_corr_sort_limit-default.txt-Results] >> test.py::test[join-join_without_correlation_and_dict_access--Plan] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access--Results] >> test.py::test[action-eval_on_modif_table_fail--Results] [GOOD] >> test.py::test[action-eval_range--Debug] >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi >> test.py::test[join-premap_common_multiparents_no_premap--Results] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Plan] [GOOD] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Results] >> test.py::test[aggr_factory-mode-default.txt-Debug] [GOOD] |73.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[join-grace_join1-map-Results] [SKIPPED] >> test.py::test[epochs-read_modified--Debug] [GOOD] >> test.py::test[epochs-read_modified--Plan] >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 [GOOD] >> test.py::test[aggr_factory-mode-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-mode-default.txt-Results] >> test.py::test[epochs-read_modified--Plan] [GOOD] >> test.py::test[aggregate-group_by_session_distinct--Results] [GOOD] >> test.py::test[aggregate-group_by_session_only--Debug] >> test.py::test[tpch-q8-default.txt-Results] [GOOD] >> test.py::test[tpch-q9-default.txt-Debug] >> test.py::test[epochs-read_modified--Results] >> test.py::test[blocks-coalesce_complex-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--Analyze] >> test.py::test[schema-select_all-row_spec_diff_sort-Results] [GOOD] >> test.py::test[schema-select_all-schema-Debug] >> test.py::test[blocks-json_document_type--Results] [GOOD] >> test.py::test[blocks-pg_tofrom--Debug] >> test.py::test[aggr_factory-count_if-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-Results] >> TKeyValueTest::TestIncorrectRequestThenResponseError >> test.py::test[window-current/session_incompat_sort--Results] [GOOD] >> test.py::test[window-full/aggregations--Debug] >> test.py::test[aggr_factory-def_value_full_table-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-def_value_full_table-default.txt-Plan] >> test.py::test[lambda-lambda_with_tie_bad_count_fail--Results] [GOOD] >> test.py::test[aggr_factory-def_value_full_table-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-def_value_full_table-default.txt-Results] >> test.py::test[order_by-order_by_dynum-default.txt-Debug] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt-Plan] [GOOD] >> test.py::test[order_by-order_by_dynum-default.txt-Results] >> test.py::test[like-ilike_clause-default.txt-Analyze] |73.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 [GOOD] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |73.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> test.py::test[schema-remap_desc--Debug] [GOOD] >> test.py::test[schema-remap_desc--Plan] [GOOD] >> test.py::test[schema-remap_desc--Results] |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> TBlobStorageWardenTest::TestSendToInvalidGroupId >> test.py::test[insert-multiappend_sorted-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-wide_top_sort--Debug] [GOOD] >> test.py::test[pg-wide_top_sort--ForceBlocks] >> test.py::test[window-win_by_simple-default.txt-Debug] [GOOD] >> test.py::test[window-win_by_simple-default.txt-ForceBlocks] >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet >> test.py::test[pg-pg_types_array_literal-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_types_array_literal-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_types_array_literal-default.txt-Results] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |73.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> test.py::test[pg-tpch-q13-default.txt-Results] [GOOD] >> test.py::test[pg-uuid_from_pg-default.txt-Debug] >> test.py::test[pg-order_by_agg_input_columns_prj_and_aggr-default.txt-Results] [GOOD] >> test.py::test[pg-pg_column_case--Analyze] >> test.py::test[pg-select_join_left_const-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_left_const-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_left_const-default.txt-Results] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |74.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> test.py::test[action-eval_range--Debug] [GOOD] >> test.py::test[action-eval_range--Plan] [GOOD] >> test.py::test[action-eval_range--Results] >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> test.py::test[aggregate-group_by_session_compact--Results] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--Analyze] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--Debug] >> test.py::test[aggregate-group_by_session_extended--Debug] >> test.py::test[pg-tpcds-q41-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Debug] >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] >> test.py::test[aggr_factory-list--Results] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-Debug] >> BindQueue::Basic |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |74.0%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit >> test.py::test[pg-join_using_multiple2--Results] [GOOD] >> test.py::test[pg-name--Debug] >> test.py::test[window-empty/aggregations_leadlag--Results] [GOOD] >> test.py::test[pg-pg_corr_sort_limit-default.txt-Results] [GOOD] >> test.py::test[pg-pg_interval_literal-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestIncrementalKeySet [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> test.py::test[window-full/session_compact--Debug] >> test.py::test[window-win_func_first_last--Debug] [GOOD] >> test.py::test[window-win_func_first_last--ForceBlocks] >> test.py::test[aggr_factory-def_value_full_table-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-min_by-default.txt-Debug] >> test.py::test[blocks-pg_tofrom--Debug] [GOOD] >> test.py::test[blocks-pg_tofrom--Plan] [GOOD] >> test.py::test[blocks-pg_tofrom--Results] >> test.py::test[epochs-read_modified--Results] [GOOD] >> test.py::test[expr-cast_utf8-default.txt-Debug] >> test.py::test[schema-select_all-schema-Debug] [GOOD] >> test.py::test[schema-select_all-schema-Plan] [GOOD] >> test.py::test[schema-select_all-schema-Results] >> test.py::test[order_by-order_by_dynum-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_dynum_desc-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] Test command err: 2024-11-21T17:44:37.054887Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:44:37.055675Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c81/r3tmp/tmpVeaj1T/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:37.055734Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c81/r3tmp/tmpVeaj1T/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:44:37.056000Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T17:44:37.056060Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:37.056260Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T17:44:37.056273Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:37.056373Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T17:44:37.056379Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:37.056446Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T17:44:37.056451Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:37.056520Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T17:44:37.056526Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T17:44:37.056624Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:26:2073] ControllerId# 72057594037932033 2024-11-21T17:44:37.056627Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:44:37.056637Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:44:37.056671Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:44:37.059040Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:44:37.059210Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:44:37.059245Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:37.059248Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:44:37.067595Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:37.067624Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:44:37.068208Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:44:37.069006Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:44:37.069065Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:37.072444Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c81/r3tmp/tmpVeaj1T/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:37.072524Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:44:37.072654Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T17:44:37.072660Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:44:37.072679Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "8U>\'|\000\317\rw\000E\250\275t\233\001s\025\206\200" } 2024-11-21T17:44:37.072718Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2024-11-21T17:44:37.072729Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:70:2114] SessionId# [0:0:0] Cookie# 0 2024-11-21T17:44:37.072738Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.005081s 2024-11-21T17:44:37.072791Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2024-11-21T17:44:37.072799Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2024-11-21T17:44:37.078037Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:37.079362Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:37.079852Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:37.080545Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:37.080660Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:37.080820Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:37.081050Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:37.081107Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:37.081358Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:37.081372Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:37.081510Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:37.081684Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:37.081896Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:37.081909Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:37.088847Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:44:37.094653Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:44:37.094988Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:44:37.095108Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:44:37.097516Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:44:37.097533Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:44:37.097570Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:44:37.100546Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:44:37.100605Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:44:37.100640Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:44:37.100656Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsContro ... Id { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 507859270459230832 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:44:37.886316Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:37.886329Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:37.886347Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 507859270459230832 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:44:37.886373Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 507859270459230832 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:44:37.886515Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "test_storage" ErasureSpecies: "none" VDiskKind: "Default" Kind: "pool-kind-1" NumGroups: 1 PDiskFilter { Property { Type: ROT } } EncryptionMode: 1 } } } 2024-11-21T17:44:37.912395Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] bootstrap ActorId# [1:471:2455] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:1298:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T17:44:37.912445Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1298:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:44:37.912450Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1298:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:44:37.912454Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1298:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:44:37.912459Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1298:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:44:37.912463Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1298:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:44:37.912467Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1298:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:44:37.912473Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] restore Id# [72057594037932033:2:7:0:0:1298:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:44:37.912485Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:1298:1] Marker# BPG33 2024-11-21T17:44:37.912490Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:1298:1] Marker# BPG32 2024-11-21T17:44:37.912495Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:1298:2] Marker# BPG33 2024-11-21T17:44:37.912499Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:1298:2] Marker# BPG32 2024-11-21T17:44:37.912504Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:1298:3] Marker# BPG33 2024-11-21T17:44:37.912507Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:1298:3] Marker# BPG32 2024-11-21T17:44:37.912680Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:44:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1298:3] FDS# 1298 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:44:37.912692Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1298:2] FDS# 1298 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:44:37.912701Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1298:1] FDS# 1298 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:44:37.916614Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1298:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90220 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T17:44:37.916660Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1298:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 90220 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T17:44:37.916671Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1298:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90220 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T17:44:37.916685Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:1298:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T17:44:37.916693Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:1298:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T17:44:37.916828Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { VDisks { VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 507859270459230832 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "test_storage" } Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 507859270459230832 } } } EncryptionMode: 1 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038082 MainKeyVersion: 0 StoragePoolName: "test_storage" DeviceType: ROT } } InstanceId: "88053cc9-973e3dd-fc894dbd-8c4050f8" AvailDomain: 1 } 2024-11-21T17:44:37.916850Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {VDisks { VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 507859270459230832 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "test_storage" } Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 507859270459230832 } } } EncryptionMode: 1 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038082 MainKeyVersion: 0 StoragePoolName: "test_storage" DeviceType: ROT } } 2024-11-21T17:44:37.916887Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [82000002:1:0:0:0] VSlotId# 1:1000:1002 PDiskGuid# 507859270459230832 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:37.917029Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [82000002:1:0:0:0] VSlotId# 1:1000:1002 PDiskGuid# 507859270459230832 2024-11-21T17:44:37.926244Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1002 PDiskGuid: 507859270459230832 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2024-11-21T17:44:37.926451Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1002 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:44:37.927711Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1002 } } 2024-11-21T17:44:37.928431Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:37.928477Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1002 PDiskGuid: 507859270459230832 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:44:37.928773Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:37.928814Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1002 PDiskGuid: 507859270459230832 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:44:37.928869Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2024-11-21T17:44:37.929119Z node 1 :BS_PROXY CRIT: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2024-11-21T17:44:37.929173Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2024-11-21T17:44:37.929194Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 >> test.py::test[like-ilike_clause-default.txt-Analyze] [GOOD] >> test.py::test[like-ilike_clause-default.txt-Debug] >> test.py::test[pg-pg_types_array_literal-default.txt-Results] [GOOD] >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges >> test.py::test[join-join_without_correlation_and_dict_access--Results] [GOOD] >> test.py::test[join-left_only_semi_and_other-off-Debug] [SKIPPED] >> test.py::test[join-left_only_semi_and_other-off-Plan] [SKIPPED] >> test.py::test[join-left_only_semi_and_other-off-Results] >> test.py::test[join-left_only_semi_and_other-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_subst-off-Debug] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_subst-off-Plan] [SKIPPED] >> test.py::test[join-lookupjoin_bug7646_subst-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-off-Debug] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-off-Plan] [SKIPPED] >> test.py::test[join-lookupjoin_inner_2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o-off-Debug] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o-off-Plan] >> test.py::test[join-lookupjoin_semi_1o-off-Plan] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_2o-off-Debug] [SKIPPED] >> test.py::test[window-current/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-current/aggregations--Plan] [GOOD] >> test.py::test[window-current/aggregations--Results] >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring |74.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[pg_duplicated-star_from_crossjoin-default.txt-Plan] [GOOD] >> test.py::test[join-lookupjoin_semi_2o-off-Plan] [SKIPPED] >> test.py::test[join-lookupjoin_semi_2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_empty--Debug] >> test.py::test[aggr_factory-count_if-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_session_only--Debug] [GOOD] >> test.py::test[aggregate-group_by_session_only--Plan] >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg >> test.py::test[aggr_factory-linear_histogram-default.txt-Debug] >> test.py::test[aggregate-group_by_session_only--Plan] [GOOD] >> test.py::test[aggregate-group_by_session_only--Results] >> test.py::test[schema-remap_desc--Results] [GOOD] >> test.py::test[schema-user_schema_override--Debug] >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] >> test.py::test[aggr_factory-mode-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-multi_minmaxby-default.txt-Debug] >> test.py::test[schema-select_all-schema-Results] [GOOD] >> test.py::test[schema-select_all_inferschema2--Debug] |74.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[expr-cast_longint-default.txt-Results] [GOOD] >> test.py::test[pg-pg_column_case--Analyze] [GOOD] >> test.py::test[pg-pg_column_case--Debug] >> test.py::test[pg-uuid_from_pg-default.txt-Debug] [GOOD] >> test.py::test[pg-uuid_from_pg-default.txt-Plan] [GOOD] >> test.py::test[pg-uuid_from_pg-default.txt-Results] >> test.py::test[pg-wide_top_sort--ForceBlocks] [GOOD] >> test.py::test[pg-wide_top_sort--Plan] [GOOD] >> test.py::test[action-eval_range--Results] [GOOD] >> test.py::test[action-eval_skip_take--Debug] >> test.py::test[window-win_by_simple-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_by_simple-default.txt-Plan] [GOOD] >> test.py::test[window-win_by_simple-default.txt-Results] >> test.py::test[pg-wide_top_sort--Results] >> test.py::test[blocks-pg_tofrom--Results] [GOOD] >> test.py::test[blocks-sort_two_mix--Debug] >> test.py::test[tpch-q9-default.txt-Debug] [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> test.py::test[tpch-q9-default.txt-Plan] [GOOD] >> test.py::test[tpch-q9-default.txt-Results] >> test.py::test[agg_phases-min_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases-min_null-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-min_null-default.txt-Results] >> test.py::test[blocks-combine_all_count_filter_opt--Debug] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--ForceBlocks] >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] >> test.py::test[pg-pg_interval_literal-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_interval_literal-default.txt-Plan] >> test.py::test[pg-pg_interval_literal-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_interval_literal-default.txt-Results] >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> test.py::test[expr-cast_utf8-default.txt-Debug] [GOOD] >> test.py::test[expr-cast_utf8-default.txt-Plan] [GOOD] >> test.py::test[expr-cast_utf8-default.txt-Results] |74.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10000Inflight1BlobSize1000 [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests2Inflight2BlobSize1000 >> test.py::test[join-lookupjoin_semi_empty--Debug] [GOOD] >> test.py::test[join-lookupjoin_semi_empty--Plan] [GOOD] >> TBlobStorageWardenTest::TestDeleteStoragePool ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] Test command err: 2024-11-21T17:44:40.080059Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:44:40.080915Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c70/r3tmp/tmpbdxAre/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:40.080968Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c70/r3tmp/tmpbdxAre/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:44:40.081267Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T17:44:40.081328Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:40.081534Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T17:44:40.081545Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:40.081643Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T17:44:40.081650Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:40.081733Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T17:44:40.081739Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:40.081816Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T17:44:40.081823Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T17:44:40.081963Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:44:2074] ControllerId# 72057594037932033 2024-11-21T17:44:40.081967Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:44:40.081981Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:44:40.082034Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:44:40.085378Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:44:40.085576Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:44:40.086034Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:40.086068Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T17:44:40.086197Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [2:91:2068] ControllerId# 72057594037932033 2024-11-21T17:44:40.086202Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:44:40.086215Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:44:40.086246Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:44:40.087032Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:44:40.087128Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:44:40.087227Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:44:40.144862Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:40.144880Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:44:40.145587Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:40.145604Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:44:40.146139Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:44:40.146212Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:40.146218Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:44:40.146431Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:44:40.146482Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:40.146530Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:40.146536Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:44:40.146553Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:44:40.146590Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:44:40.146849Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:40.146889Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:40.146969Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:44:40.149889Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c70/r3tmp/tmpbdxAre/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:40.149962Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:44:40.150111Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T17:44:4 ... NoGroup# false 2024-11-21T17:44:40.767251Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 2181038082 2024-11-21T17:44:40.767256Z node 2 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:260} RequestGroupConfig GroupId# 2181038082 2024-11-21T17:44:40.767303Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:44:40.767307Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T17:44:40.767319Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2024-11-21T17:44:40.767330Z node 2 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2181038082 2024-11-21T17:44:40.767398Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [2:29:2059] Cookie# 0 Recipient# [1:439:2377] RecipientRewrite# [1:396:2345] Request# {NodeID: 2 GroupIDs: 2181038082 } StopGivingGroups# false 2024-11-21T17:44:40.767415Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 2 GroupIDs: 2181038082 } 2024-11-21T17:44:40.767506Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 14828001262275977914 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/6fwh/001c70/r3tmp/tmpbdxAre//key.txt" EncryptedGroupKey: "\371\032OL\331\347\021L\326\312v\rN\274L\023\017)l\377\327\231R\276Q\"\333\336\221R\275\202\023O\025\330" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } } 2024-11-21T17:44:40.767520Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 14828001262275977914 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/6fwh/001c70/r3tmp/tmpbdxAre//key.txt" EncryptedGroupKey: "\371\032OL\331\347\021L\326\312v\rN\274L\023\017)l\377\327\231R\276Q\"\333\336\221R\275\202\023O\025\330" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } 2024-11-21T17:44:40.767536Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:207} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/6fwh/001c70/r3tmp/tmpbdxAre//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2024-11-21T17:44:40.767666Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2024-11-21T17:44:40.767671Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:44:40.767908Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:599:2104] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:40.767928Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:600:2105] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:40.767945Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:601:2106] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:40.767962Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:602:2107] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:40.767977Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:603:2108] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:40.767996Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:604:2109] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:40.768017Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:605:2110] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:40.768022Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:44:40.768084Z node 2 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2181038082 2024-11-21T17:44:40.768278Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:40.768316Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:40.768342Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:40.768353Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:40.768407Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:40.768438Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:40.768445Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:40.768449Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T17:44:40.768452Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T17:44:40.768474Z node 2 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] bootstrap ActorId# [2:608:2111] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T17:44:40.768479Z node 2 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2024-11-21T17:44:40.768504Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:599:2104] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 13616671405062297587 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T17:44:40.768786Z node 2 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2024-11-21T17:44:40.768794Z node 2 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2024-11-21T17:44:40.768841Z node 2 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2024-11-21T17:44:40.768862Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2024-11-21T17:44:40.768920Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:609:2508] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T17:44:40.768943Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:44:40.768948Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:44:40.768956Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2024-11-21T17:44:40.768960Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2024-11-21T17:44:40.768975Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:584:2498] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:44:40.769019Z node 1 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2024-11-21T17:44:40.769053Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2024-11-21T17:44:40.769062Z node 1 :BS_PROXY_PUT ERROR: [b6b2c6548553d7a5] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2024-11-21T17:44:40.769068Z node 1 :BS_PROXY_PUT NOTICE: [b6b2c6548553d7a5] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T17:44:40.769121Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:599:2104] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> test.py::test[pg-tpcds-q41-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-ForceBlocks] >> test.py::test[pg-uuid_from_pg-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_locks-default.txt-Debug] >> test.py::test[like-ilike_clause-default.txt-Debug] [GOOD] >> test.py::test[like-ilike_clause-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_semi_empty--Results] >> test.py::test[window-win_func_first_last--ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last--Plan] [GOOD] >> test.py::test[window-win_func_first_last--Results] |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |74.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] |74.0%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut >> test.py::test[pg-name--Debug] [GOOD] >> test.py::test[pg-name--Plan] [GOOD] >> test.py::test[pg-name--Results] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Debug] >> test.py::test[aggr_factory-multi_minmaxby-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-multi_minmaxby-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-multi_minmaxby-default.txt-Results] >> test.py::test[aggregate-group_by_session_only--Results] [GOOD] >> test.py::test[aggregate-having_distinct_expr--Debug] >> test.py::test[schema-user_schema_override--Debug] [GOOD] >> test.py::test[schema-user_schema_override--Plan] [GOOD] >> test.py::test[schema-user_schema_override--Results] >> test.py::test[order_by-order_by_dynum_desc-default.txt-Debug] [GOOD] >> test.py::test[order_by-order_by_dynum_desc-default.txt-Plan] [GOOD] >> test.py::test[order_by-order_by_dynum_desc-default.txt-Results] >> TBlobStorageWardenTest::TestHttpMonPage |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> test.py::test[pg-pg_interval_literal-default.txt-Results] [GOOD] |74.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> test.py::test[pg-pg_types_orderby--Debug] [SKIPPED] >> test.py::test[pg-pg_types_orderby--Plan] >> test.py::test[window-win_by_simple-default.txt-Results] [GOOD] >> test.py::test[window-win_func_in_lib--Analyze] >> test.py::test[pg-pg_types_orderby--Plan] [SKIPPED] >> test.py::test[pg-pg_types_orderby--Results] [SKIPPED] >> test.py::test[pg-select_agg_gs_rollup-default.txt-Debug] |74.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[join-premap_common_multiparents_no_premap--Results] [GOOD] |74.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[insert-multiappend_sorted-default.txt-ForceBlocks] [GOOD] >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests2Inflight2BlobSize1000 [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10Inflight10BlobSize1000 >> test.py::test[expr-cast_utf8-default.txt-Results] [GOOD] >> test.py::test[join-lookupjoin_semi_empty--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite--Debug] >> test.py::test[expr-exapnd_with_singular_types-default.txt-Debug] >> test.py::test[pg-pg_column_case--Debug] [GOOD] >> test.py::test[pg-pg_column_case--ForceBlocks] >> test.py::test[pg-wide_top_sort--Results] [GOOD] >> test.py::test[pg-with_rec_distinct-default.txt-Analyze] >> test.py::test[pg-select_join_left_const-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_left_equi-default.txt-Debug] >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings >> test.py::test[schema-select_all_inferschema2--Debug] [GOOD] >> test.py::test[schema-select_all_inferschema2--Plan] >> test.py::test[pg_catalog-pg_locks-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_locks-default.txt-Plan] >> TBlobStorageWardenTest::TestSendUsefulMonitoring >> test.py::test[schema-select_all_inferschema2--Plan] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--Plan] [GOOD] >> test.py::test[blocks-combine_all_count_filter_opt--Results] >> test.py::test[schema-select_all_inferschema2--Results] >> test.py::test[pg_catalog-pg_locks-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_locks-default.txt-Results] >> test.py::test[pg-tpcds-q41-default.txt-ForceBlocks] [GOOD] |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |74.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |74.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[pg-pg_types_array_literal-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Plan] >> test.py::test[aggregate-group_by_session_extended--Debug] [GOOD] >> test.py::test[aggregate-group_by_session_extended--Plan] >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q41-default.txt-Results] >> test.py::test[action-eval_skip_take--Debug] [GOOD] >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] >> test.py::test[action-eval_skip_take--Plan] [GOOD] >> test.py::test[action-eval_skip_take--Results] >> test.py::test[aggr_factory-log_histogram-default.txt-Debug] [GOOD] >> test.py::test[schema-user_schema_override--Results] [GOOD] >> test.py::test[aggr_factory-log_histogram-default.txt-Plan] [GOOD] >> test.py::test[select-append_to_value--Debug] >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead >> test.py::test[aggregate-group_by_session_extended--Plan] [GOOD] >> test.py::test[aggregate-group_by_session_extended--Results] >> test.py::test[pg-name--Results] [GOOD] >> test.py::test[pg-pg_corr_offset-default.txt-Debug] >> test.py::test[like-ilike_clause-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-ilike_clause-default.txt-Plan] [GOOD] >> test.py::test[like-ilike_clause-default.txt-Results] >> test.py::test[blocks-sort_two_mix--Debug] [GOOD] >> test.py::test[blocks-sort_two_mix--Plan] [GOOD] >> test.py::test[blocks-sort_two_mix--Results] >> test.py::test[aggr_factory-log_histogram-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] Test command err: 2024-11-21T17:44:42.196990Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:44:42.197867Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c5f/r3tmp/tmpHVaj9C/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:42.197939Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c5f/r3tmp/tmpHVaj9C/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:44:42.198258Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T17:44:42.198329Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:42.198543Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T17:44:42.198555Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:42.198674Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T17:44:42.198681Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:42.198758Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T17:44:42.198764Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:42.198845Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T17:44:42.198852Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T17:44:42.199006Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:26:2073] ControllerId# 72057594037932033 2024-11-21T17:44:42.199011Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:44:42.199027Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:44:42.199081Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:44:42.202663Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:44:42.202957Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:44:42.203016Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:42.203022Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:44:42.221822Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:42.221845Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:44:42.222358Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:44:42.222931Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:44:42.222994Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:42.225926Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c5f/r3tmp/tmpHVaj9C/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:42.226029Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:44:42.226199Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T17:44:42.226205Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:44:42.226225Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\362\234A\026\016\003O\206\206m\366\014\021C,4\315\337Me" } 2024-11-21T17:44:42.226265Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2024-11-21T17:44:42.226275Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:70:2114] SessionId# [0:0:0] Cookie# 0 2024-11-21T17:44:42.226285Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.004404s 2024-11-21T17:44:42.226343Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2024-11-21T17:44:42.226350Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2024-11-21T17:44:42.229616Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.230924Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.231455Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.232155Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.232265Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.232398Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.232585Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.232634Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:42.232848Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:42.232861Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:42.233014Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.233179Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:42.233384Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:42.233395Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:42.237129Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:44:42.243150Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:44:42.243522Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:44:42.243664Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:44:42.246177Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:44:42.246195Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:44:42.246237Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:44:42.249009Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:44:42.249074Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:44:42.249114Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:44:42.249132Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsCo ... atus { VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } NodeId: 2 PDiskId: 0 VSlotId: 1 PDiskGuid: 1 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:44:42.764525Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.764548Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } NodeId: 2 PDiskId: 0 VSlotId: 3 PDiskGuid: 1 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:44:42.764576Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.764589Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } NodeId: 2 PDiskId: 0 VSlotId: 2 PDiskGuid: 1 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:44:42.764609Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 0 } Success: true } 2024-11-21T17:44:42.764626Z node 2 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 1 } 2024-11-21T17:44:42.764703Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 0 } } 2024-11-21T17:44:42.764797Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.767034Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.767090Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 1 } Success: true } 2024-11-21T17:44:42.767108Z node 2 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 2 } 2024-11-21T17:44:42.767187Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.767199Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 1 } } 2024-11-21T17:44:42.767282Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.768089Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.768114Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.768136Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 2 } Success: true } 2024-11-21T17:44:42.768147Z node 2 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 3 } 2024-11-21T17:44:42.768200Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 2 } } 2024-11-21T17:44:42.768276Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.768533Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.768558Z node 2 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 3 } Success: true } 2024-11-21T17:44:42.768587Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.768596Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 0 VSlotId: 3 } } 2024-11-21T17:44:42.779692Z node 2 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T17:44:42.779825Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { PDisks { NodeID: 2 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/6fwh/001c5f/r3tmp/tmpGYcK5n/pdisk_1.dat" PDiskGuid: 12692889379126453751 PDiskCategory: 0 EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 12692889379126453751 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "pool-1" } VDisks { VDiskID { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1001 PDiskGuid: 12692889379126453751 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "pool-2" } Groups { GroupID: 2181038080 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 12692889379126453751 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038080 MainKeyVersion: 0 StoragePoolName: "pool-1" DeviceType: ROT } Groups { GroupID: 2181038081 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1001 PDiskGuid: 12692889379126453751 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038081 MainKeyVersion: 0 StoragePoolName: "pool-2" DeviceType: ROT } } InstanceId: "576a1352-69d8c85e-d236736-38643c4b" AvailDomain: 1 } 2024-11-21T17:44:42.779867Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {PDisks { NodeID: 2 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/6fwh/001c5f/r3tmp/tmpGYcK5n/pdisk_1.dat" PDiskGuid: 12692889379126453751 PDiskCategory: 0 EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 12692889379126453751 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "pool-1" } VDisks { VDiskID { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1001 PDiskGuid: 12692889379126453751 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "pool-2" } Groups { GroupID: 2181038080 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 12692889379126453751 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038080 MainKeyVersion: 0 StoragePoolName: "pool-1" DeviceType: ROT } Groups { GroupID: 2181038081 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1001 PDiskGuid: 12692889379126453751 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2181038081 MainKeyVersion: 0 StoragePoolName: "pool-2" DeviceType: ROT } } 2024-11-21T17:44:42.779912Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 2 PDiskId# 1000 Path# "/home/runner/.ya/build/build_root/6fwh/001c5f/r3tmp/tmpGYcK5n/pdisk_1.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:44:42.780078Z node 2 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [82000000:1:0:0:0] VSlotId# 2:1000:1000 PDiskGuid# 12692889379126453751 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:42.780209Z node 2 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [82000000:1:0:0:0] VSlotId# 2:1000:1000 PDiskGuid# 12692889379126453751 2024-11-21T17:44:42.780223Z node 2 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [82000001:1:0:0:0] VSlotId# 2:1000:1001 PDiskGuid# 12692889379126453751 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:42.780339Z node 2 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [82000001:1:0:0:0] VSlotId# 2:1000:1001 PDiskGuid# 12692889379126453751 2024-11-21T17:44:42.829698Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 12692889379126453751 Status: INIT_PENDING OnlyPhantomsRemain: false } VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 12692889379126453751 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2024-11-21T17:44:42.830165Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:44:42.830192Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1001 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:44:42.831890Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1000 } } 2024-11-21T17:44:42.831950Z node 2 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 2 PDiskId: 1000 VSlotId: 1001 } } 2024-11-21T17:44:42.833259Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.833323Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.833369Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 12692889379126453751 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:44:42.833734Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 12692889379126453751 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:44:42.833827Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.833842Z node 2 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:42.833871Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 12692889379126453751 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:44:42.833906Z node 2 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 2 PDiskId: 1000 VSlotId: 1001 PDiskGuid: 12692889379126453751 Status: READY OnlyPhantomsRemain: false } } >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10Inflight10BlobSize1000 [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests100Inflight10BlobSize1000 >> test.py::test[window-win_func_first_last--Results] [GOOD] >> test.py::test[window-win_peephole_double_usage-default.txt-Analyze] >> test.py::test[window-current/aggregations--Results] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Analyze] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] Test command err: Disable nodeId# 67 Pick Pick Enable nodeId# 67 Pick Delete nodeId# 14 Delete nodeId# 8 Pick Delete nodeId# 50 Pick Disable nodeId# 29 Delete nodeId# 82 Enable nodeId# 29 Delete nodeId# 67 Add nodeId# 101 Delete nodeId# 100 Pick Pick Add nodeId# 102 Delete nodeId# 75 Delete nodeId# 62 Delete nodeId# 95 Delete nodeId# 36 Add nodeId# 103 Pick Add nodeId# 104 Pick Delete nodeId# 51 Disable nodeId# 47 Delete nodeId# 49 Pick Enable nodeId# 47 Delete nodeId# 81 Delete nodeId# 10 Add nodeId# 105 Add nodeId# 106 Disable nodeId# 2 Add nodeId# 107 Enable nodeId# 2 Pick Delete nodeId# 60 Pick Delete nodeId# 77 Pick Disable nodeId# 70 Delete nodeId# 58 Add nodeId# 108 Disable nodeId# 56 Delete nodeId# 79 Add nodeId# 109 Disable nodeId# 94 Delete nodeId# 37 Disable nodeId# 6 Enable nodeId# 94 Enable nodeId# 70 Delete nodeId# 35 Pick Disable nodeId# 64 Pick Disable nodeId# 72 Add nodeId# 110 Pick Enable nodeId# 56 Delete nodeId# 106 Enable nodeId# 72 Disable nodeId# 57 Disable nodeId# 108 Pick Enable nodeId# 64 Enable nodeId# 108 Delete nodeId# 78 Pick Disable nodeId# 64 Disable nodeId# 73 Add nodeId# 111 Pick Disable nodeId# 9 Disable nodeId# 109 Disable nodeId# 105 Add nodeId# 112 Add nodeId# 113 Delete nodeId# 105 Add nodeId# 114 Pick Pick Add nodeId# 115 Delete nodeId# 114 Delete nodeId# 61 Enable nodeId# 73 Enable nodeId# 109 Add nodeId# 116 Pick Add nodeId# 117 Enable nodeId# 9 Enable nodeId# 6 Pick Pick Add nodeId# 118 Delete nodeId# 20 Enable nodeId# 57 Enable nodeId# 64 Add nodeId# 119 Add nodeId# 120 Disable nodeId# 71 Add nodeId# 121 Pick Delete nodeId# 28 Disable nodeId# 59 Pick Enable nodeId# 59 Disable nodeId# 107 Add nodeId# 122 Enable nodeId# 71 Enable nodeId# 107 Delete nodeId# 90 Pick Add nodeId# 123 Add nodeId# 124 Add nodeId# 125 Pick Pick Delete nodeId# 83 Disable nodeId# 43 Delete nodeId# 125 Pick Disable nodeId# 21 Delete nodeId# 11 Delete nodeId# 55 Disable nodeId# 123 Pick Disable nodeId# 63 Disable nodeId# 26 Disable nodeId# 12 Delete nodeId# 111 Disable nodeId# 66 Add nodeId# 126 Enable nodeId# 43 Disable nodeId# 54 Pick Pick Delete nodeId# 30 Disable nodeId# 98 Pick Disable nodeId# 39 Pick Add nodeId# 127 Pick Enable nodeId# 39 Add nodeId# 128 Pick Enable nodeId# 21 Enable nodeId# 98 Disable nodeId# 70 Pick Pick Pick Pick Enable nodeId# 26 Pick Add nodeId# 129 Add nodeId# 130 Add nodeId# 131 Enable nodeId# 12 Add nodeId# 132 Delete nodeId# 96 Delete nodeId# 16 Pick Enable nodeId# 63 Pick Disable nodeId# 98 Disable nodeId# 19 Disable nodeId# 91 Add nodeId# 133 Delete nodeId# 68 Delete nodeId# 43 Delete nodeId# 45 Disable nodeId# 29 Enable nodeId# 54 Disable nodeId# 87 Enable nodeId# 19 Add nodeId# 134 Delete nodeId# 22 Disable nodeId# 40 Delete nodeId# 101 Add nodeId# 135 Pick Disable nodeId# 86 Add nodeId# 136 Delete nodeId# 117 Add nodeId# 137 Disable nodeId# 64 Add nodeId# 138 Disable nodeId# 21 Pick Pick Disable nodeId# 115 Pick Enable nodeId# 66 Add nodeId# 139 Delete nodeId# 56 Disable nodeId# 76 Delete nodeId# 26 Add nodeId# 140 Enable nodeId# 76 Delete nodeId# 122 Pick Disable nodeId# 2 Add nodeId# 141 Enable nodeId# 2 Delete nodeId# 130 Pick Delete nodeId# 91 Pick Enable nodeId# 70 Add nodeId# 142 Delete nodeId# 41 Add nodeId# 143 Delete nodeId# 102 Pick Enable nodeId# 98 Disable nodeId# 23 Add nodeId# 144 Enable nodeId# 64 Enable nodeId# 23 Disable nodeId# 89 Disable nodeId# 98 Pick Disable nodeId# 65 Enable nodeId# 115 Pick Add nodeId# 145 Add nodeId# 146 Pick Delete nodeId# 19 Disable nodeId# 137 Disable nodeId# 38 Enable nodeId# 87 Disable nodeId# 139 Delete nodeId# 7 Pick Disable nodeId# 144 Pick Add nodeId# 147 Add nodeId# 148 Disable nodeId# 146 Delete nodeId# 140 Disable nodeId# 33 Enable nodeId# 139 Disable nodeId# 121 Disable nodeId# 109 Delete nodeId# 141 Add nodeId# 149 Enable nodeId# 98 Enable nodeId# 121 Add nodeId# 150 Disable nodeId# 119 Add nodeId# 151 Enable nodeId# 146 Enable nodeId# 38 Delete nodeId# 108 Disable nodeId# 135 Pick Pick Add nodeId# 152 Delete nodeId# 123 Enable nodeId# 135 Pick Add nodeId# 153 Add nodeId# 154 Pick Add nodeId# 155 Delete nodeId# 103 Add nodeId# 156 Enable nodeId# 89 Delete nodeId# 33 Add nodeId# 157 Enable nodeId# 86 Delete nodeId# 128 Disable nodeId# 146 Pick Delete nodeId# 70 Disable nodeId# 134 Pick Delete nodeId# 135 Enable nodeId# 21 Delete nodeId# 34 Disable nodeId# 76 Pick Delete nodeId# 52 Disable nodeId# 136 Pick Disable nodeId# 27 Delete nodeId# 48 Enable nodeId# 146 Pick Enable nodeId# 29 Delete nodeId# 157 Pick Enable nodeId# 134 Enable nodeId# 40 Delete nodeId# 13 Pick Add nodeId# 158 Disable nodeId# 86 Pick Pick Disable nodeId# 156 Disable nodeId# 53 Enable nodeId# 136 Enable nodeId# 119 Enable nodeId# 76 Enable nodeId# 156 Delete nodeId# 76 Pick Disable nodeId# 73 Enable nodeId# 73 Add nodeId# 159 Enable nodeId# 137 Disable nodeId# 152 Disable nodeId# 149 Enable nodeId# 27 Disable nodeId# 143 Delete nodeId# 1 Enable nodeId# 86 Add nodeId# 160 Delete nodeId# 59 Add nodeId# 161 Delete nodeId# 38 Delete nodeId# 116 Pick Disable nodeId# 126 Delete nodeId# 4 Disable nodeId# 84 Pick Enable nodeId# 144 Disable nodeId# 159 Pick Delete nodeId# 5 Add nodeId# 162 Disable nodeId# 129 Disable nodeId# 23 Disable nodeId# 146 Enable nodeId# 143 Add nodeId# 163 Add nodeId# 164 Add nodeId# 165 Pick Delete nodeId# 54 Add nodeId# 166 Pick Add nodeId# 167 Add nodeId# 168 Add nodeId# 169 Pick Disable nodeId# 148 Add nodeId# 170 Delete nodeId# 149 Disable nodeId# 98 Disable nodeId# 47 Disable nodeId# 153 Enable nodeId# 148 Enable nodeId# 65 Delete nodeId# 25 Disable nodeId# 104 Enable nodeId# 109 Delete nodeId# 87 Disable nodeId# 119 Pick Disable nodeId# 97 Disable nodeId# 120 Disable nodeId# 32 Disable nodeId# 74 Disable nodeId# 148 Add nodeId# 171 Disable nodeId# 156 Add nodeId# 172 Disable nodeId# 133 Disable nodeId# 29 Pick Disable nodeId# 115 Disable nodeId# 6 Delete nodeId# 170 Disable nodeId# 3 Pick Delete nodeId# 144 Enable nodeId# 74 Enable nodeId# 53 Pick Add nodeId# 173 Delete nodeId# 66 Delete nodeId# 110 Disable nodeId# 18 Pick Disable nodeId# 147 Disable nodeId# 163 Pick Delete nodeId# 129 Pick Disable nodeId# 139 Enable nodeId# 115 Disable nodeId# 65 Delete nodeId# 85 Add nodeId# 174 Enable nodeId# 146 Disable nodeId# 69 Add nodeId# 175 Pick Enable nodeId# 69 Add nodeId# 176 Add nodeId# 177 Add nodeId# 178 Disable nodeId# 158 Enable nodeId# 3 Disable nodeId# 109 Enable nodeId# 148 Pick Pick Add nodeId# 179 Pick Add nodeId# 180 Enable nodeId# 6 Disable nodeId# 17 Disable nodeId# 151 Add nodeId# 181 Pick Delete nodeId# 146 Disable nodeId# 148 Delete nodeId# 169 Enable nodeId# 158 Disable nodeId# 137 Disable nodeId# 131 Delete nodeId# 118 Enable nodeId# 32 Enable nodeId# 147 Pick Add nodeId# 182 Pick Add nodeId# 183 Add nodeId# 184 Pick Add nodeId# 185 Pick Delete nodeId# 177 Disable nodeId# 40 Add nodeId# 186 Pick Disable nodeId# 74 Enable nodeId# 133 Enable nodeId# 40 Delete nodeId# 57 Delete nodeId# 124 Disable nodeId# 89 Add nodeId# 187 Enable nodeId# 98 Add nodeId# 188 Pick Pick Pick Delete nodeId# 12 Delete nodeId# 69 Disable nodeId# 127 Disable nodeId# 187 Add nodeId# 189 Pick Pick Delete nodeId# 2 Enable nodeId# 120 Delete nodeId# 71 Disable nodeId# 154 Enable nodeId# 159 Disable nodeId# 73 Add nodeId# 190 Disable nodeId# 3 Add nodeId# 191 Delete nodeId# 158 Delete nodeId# 138 Disable nodeId# 92 Add nodeId# 192 Delete nodeId# 42 Pick Enable nodeId# 153 Enable nodeId# 3 Pick Delete nodeId# 161 Enable nodeId# 131 Add nodeId# 193 Add nodeId# 194 Pick Add nodeId# 195 Delete nodeId# 97 Pick Delete nodeId# 174 Enable nodeId# 84 Disable nodeId# 179 Enable nodeId# 17 Add nodeId# 196 Disable nodeId# 107 Enable nodeId# 65 Pick Disable nodeId# 131 Pick Disable nodeId# 162 Delete nodeId# 137 Delete nodeId# 160 Disable nodeId# 168 Delete nodeId# 194 Delete nodeId# 151 Delete nodeId# 145 Delete nodeId# 46 Enable nodeId# 74 Enable nodeId# 29 Add nodeId# 197 Disable nodeId# 189 Delete nodeId# 17 Disable nodeId# 192 Add nodeId# 198 Enable nodeId# 73 Pick Pick Pick Delete nodeId# 47 Add nodeId# 199 Delete nodeId# 142 Enable nodeId# 168 Delete nodeId# 39 Pick Delete nodeId# 119 Pick Add nodeId# 200 Enable nodeId# 127 Add nodeId# 201 Add nodeId# 202 Disable nodeId# 133 Disable nodeId# 53 Pick Delete nodeId# 65 Add nodeId# 203 Add nodeId# 204 Enable nodeId# 92 Delete nodeId# 89 Add nodeId# 205 Add nodeId# 206 Add nodeId# 207 Delete nodeId# 168 Pick Pick Pick Enable nodeId# 104 Disable nodeId# 3 Delete nodeId# 40 Delete nodeId# 126 Pick Enable nodeId# 133 Disable nodeId# 29 Pick Delete nodeId# 152 Delete nodeId# 98 Disable nodeId# 200 Add nodeId# 208 Delete nodeId# 198 Delete nodeId# 21 Enable nodeId# 148 Add nodeId# 209 Pick Disable nodeId# 72 Enable nodeId# 179 Disable nodeId# 178 Disable nodeId# 181 Disable nodeId# 93 Delete nodeId# 159 Disable nodeId# 63 Delete nodeId# 209 Disable nodeId# 197 Disable nodeId# 182 Add nodeId# 210 Disable nodeId# 153 Disable nodeId# 32 Pick Add nodeId# 211 Add nodeId# 212 Pick Disable nodeId# 201 Pick Add nodeId# 213 Delete nodeId# 162 Add nodeId# 214 Delete nodeId# 155 Delete nodeId# 204 Add nodeId# 215 Disable nodeId# 86 Pick Enable nodeId# 139 Enable nodeId# 197 Delete nodeId# 167 Enable nodeId# 200 Delete nodeId# 183 Disable nodeId# 200 Add nodeId# 216 Pick Disable nodeId# 99 Enable nodeId# 3 Add nodeId# 217 Delete nodeId# 63 Delete nodeId# 24 Add nodeId# 218 Delete nodeId# 206 Add nodeId# 219 Pick Disable nodeId# 80 Delete nodeId# 203 Pick Add nodeId# 220 Pick Add nodeId# 221 Add nodeId# 222 Add nodeId# 223 Pick Enable nodeId# 201 Disable nodeId# 148 Add nodeId# 224 Enable nodeId# 182 Pick Disable nodeId# 201 Delete nodeId# 182 Add nodeId# 225 Pick Enable nodeId# 53 Delete nodeId# 134 Enable nodeId# 23 Enable nodeId# 154 Enable nodeId# 192 Pick Pick Delete nodeId# 207 Enable nodeId# 181 Delete nodeId# 136 Enable nodeId# 32 Disable nodeId# 188 Add nodeId# 226 Pick Enable nodeId# 187 Add nodeId# 227 Add nodeId# 228 Enable nodeId# 148 Delete nodeId# 217 Enable nodeId# 80 Delete nodeId# 223 Disable nodeId# 127 Disable nodeId# 192 Add nodeId# 229 Pick Add nodeId# 230 Enable nodeId# 93 Pick Delete nodeId# 6 Enable nodeId# 18 Delete nodeId# 127 Pick Pick Disable nodeId# 172 Add nodeId# 231 Disable nodeId# 93 Enable nodeId# 29 Disable nodeId# 112 Disable nodeId# 230 Add nodeId# 232 Delete nodeId# 185 Pick Disable nodeId# 229 Enable nodeId# 189 Delete nodeId# 202 Enable nodeId# 99 Pick Delete nodeId# 186 Enable nodeId# 229 Disable nodeId# 143 Delete nodeId# 154 Enable nodeId# 200 Delete nodeId# 133 Pick Enable nodeId# 178 Add nodeId# 233 Add nodeId# 234 Add nodeId# 235 Delete nodeId# 179 Disable nodeId# 212 Disable nodeId# 199 Enable nodeId# 112 Add nodeId# 236 Pick Disable nodeId# 165 Add nodeId# 237 Delete nodeId# 195 Delete nodeId# 31 Pick Enable nodeId# 107 Pick Disable nodeId# 94 Enable nodeId# 131 Delete nodeId# 199 Delete nodeId# 9 Add nodeId# 238 Disable nodeId# 171 Disable nodeId# 187 Pick Delete nodeId# 143 Pick Disable nodeId# 200 Add nodeId# 239 Add nodeId# 240 Delete nodeId# 139 Enable nodeId# 187 Add nodeId# 241 Disable nodeId# 29 Delete nodeId# 175 Delete nodeId# 241 Add nodeId# 242 Delete nodeId# 211 Disable nodeId# 23 Pick Add nodeId# 243 Delete nodeId# 163 Pick Enable nodeId# 29 Add nodeId# 244 Pick Delete nodeId# 109 Add nodeId# 245 Delete nodeId# 234 Disable nodeId# 32 Add nodeId# 246 Pick Enable nodeId# 94 Enable nodeId# 230 Enable nodeId# 171 Add nodeId# 247 Delete nodeId# 32 Pick Enable nodeId# 93 Pick Delete nodeId# 227 Delete nodeId# 181 Pick Disable nodeId# 115 Delete nodeId# 221 Add nodeId# 248 Disable nodeId# 80 Delete nodeId# 18 Disable nodeId# 148 Delete nodeId# 172 Delete nodeId# 184 Delete nodeId# 197 Pick Delete nodeId# 201 Pick Add nodeId# 249 Pick Disable nodeId# 112 Disable nodeId# 64 Disable nodeId# 214 Disable nodeId# 239 Add nodeId# 250 Enable nodeId# 23 Enable nodeId# 80 Disable nodeId# 247 Enable nodeId# 148 Add nodeId# 251 Add nodeId# 252 Add nodeId# 253 Enable nodeId# 188 Disable nodeId# 132 Add nodeId# 254 Enable nodeId# 115 Pick Delete ... d# 20261 Enable nodeId# 20261 Pick Pick Enable nodeId# 20247 Delete nodeId# 20020 Enable nodeId# 20223 Disable nodeId# 20278 Delete nodeId# 20277 Enable nodeId# 20010 Delete nodeId# 20213 Add nodeId# 20292 Disable nodeId# 20194 Enable nodeId# 20237 Delete nodeId# 20273 Delete nodeId# 20264 Enable nodeId# 20201 Pick Disable nodeId# 20259 Delete nodeId# 20147 Enable nodeId# 20278 Delete nodeId# 20267 Add nodeId# 20293 Enable nodeId# 20194 Disable nodeId# 20237 Pick Pick Disable nodeId# 20228 Delete nodeId# 20259 Enable nodeId# 20228 Delete nodeId# 20281 Disable nodeId# 20279 Pick Delete nodeId# 20058 Pick Delete nodeId# 20107 Add nodeId# 20294 Add nodeId# 20295 Disable nodeId# 20227 Pick Disable nodeId# 20270 Add nodeId# 20296 Add nodeId# 20297 Add nodeId# 20298 Disable nodeId# 20272 Pick Enable nodeId# 20270 Pick Pick Delete nodeId# 20048 Enable nodeId# 20214 Enable nodeId# 20227 Pick Add nodeId# 20299 Delete nodeId# 20067 Disable nodeId# 20187 Pick Add nodeId# 20300 Add nodeId# 20301 Pick Add nodeId# 20302 Disable nodeId# 20119 Add nodeId# 20303 Enable nodeId# 20119 Enable nodeId# 20237 Delete nodeId# 20296 Disable nodeId# 20196 Delete nodeId# 20303 Add nodeId# 20304 Add nodeId# 20305 Pick Disable nodeId# 20231 Disable nodeId# 20302 Enable nodeId# 20302 Disable nodeId# 20171 Disable nodeId# 20291 Disable nodeId# 20293 Pick Add nodeId# 20306 Add nodeId# 20307 Enable nodeId# 20196 Pick Enable nodeId# 20293 Add nodeId# 20308 Delete nodeId# 20288 Disable nodeId# 20176 Delete nodeId# 20255 Disable nodeId# 20196 Delete nodeId# 20282 Add nodeId# 20309 Enable nodeId# 20291 Enable nodeId# 20176 Add nodeId# 20310 Delete nodeId# 20210 Pick Enable nodeId# 20171 Delete nodeId# 20305 Add nodeId# 20311 Delete nodeId# 20155 Add nodeId# 20312 Pick Enable nodeId# 20231 Enable nodeId# 20187 Delete nodeId# 20010 Delete nodeId# 20149 Add nodeId# 20313 Enable nodeId# 20272 Enable nodeId# 20279 Pick Enable nodeId# 20196 Delete nodeId# 20225 Add nodeId# 20314 Delete nodeId# 20289 Disable nodeId# 20251 Disable nodeId# 20268 Disable nodeId# 20271 Enable nodeId# 20268 Delete nodeId# 20276 Add nodeId# 20315 Pick Pick Pick Delete nodeId# 20306 Enable nodeId# 20271 Add nodeId# 20316 Add nodeId# 20317 Add nodeId# 20318 Disable nodeId# 20176 Add nodeId# 20319 Disable nodeId# 20221 Delete nodeId# 20244 Pick Disable nodeId# 20310 Add nodeId# 20320 Pick Delete nodeId# 20261 Enable nodeId# 20176 Add nodeId# 20321 Add nodeId# 20322 Add nodeId# 20323 Delete nodeId# 20249 Pick Delete nodeId# 20322 Pick Add nodeId# 20324 Add nodeId# 20325 Delete nodeId# 20279 Add nodeId# 20326 Add nodeId# 20327 Enable nodeId# 20221 Enable nodeId# 20310 Disable nodeId# 20221 Add nodeId# 20328 Enable nodeId# 20251 Delete nodeId# 20316 Pick Add nodeId# 20329 Pick Disable nodeId# 20327 Disable nodeId# 20319 Delete nodeId# 20187 Disable nodeId# 20270 Delete nodeId# 20239 Delete nodeId# 20271 Add nodeId# 20330 Add nodeId# 20331 Enable nodeId# 20270 Enable nodeId# 20327 Pick Enable nodeId# 20319 Delete nodeId# 20246 Delete nodeId# 20221 Add nodeId# 20332 Disable nodeId# 20299 Delete nodeId# 20328 Pick Enable nodeId# 20299 Disable nodeId# 20278 Pick Pick Add nodeId# 20333 Enable nodeId# 20278 Delete nodeId# 20169 Add nodeId# 20334 Add nodeId# 20335 Delete nodeId# 20304 Delete nodeId# 20275 Pick Disable nodeId# 20203 Add nodeId# 20336 Add nodeId# 20337 Delete nodeId# 20314 Add nodeId# 20338 Enable nodeId# 20203 Add nodeId# 20339 Disable nodeId# 20222 Enable nodeId# 20222 Delete nodeId# 20194 Delete nodeId# 20263 Delete nodeId# 20330 Delete nodeId# 20329 Pick Add nodeId# 20340 Pick Disable nodeId# 20333 Delete nodeId# 20057 Pick Add nodeId# 20341 Pick Delete nodeId# 20283 Delete nodeId# 20313 Pick Enable nodeId# 20333 Pick Delete nodeId# 20247 Add nodeId# 20342 Pick Delete nodeId# 20327 Disable nodeId# 20176 Pick Disable nodeId# 20292 Pick Disable nodeId# 20237 Delete nodeId# 20287 Enable nodeId# 20176 Add nodeId# 20343 Pick Enable nodeId# 20237 Delete nodeId# 20293 Disable nodeId# 20217 Disable nodeId# 20339 Add nodeId# 20344 Delete nodeId# 20340 Add nodeId# 20345 Disable nodeId# 20228 Pick Pick Enable nodeId# 20228 Delete nodeId# 20005 Delete nodeId# 20176 Pick Disable nodeId# 20317 Pick Delete nodeId# 20294 Enable nodeId# 20317 Disable nodeId# 20338 Disable nodeId# 20274 Pick Enable nodeId# 20339 Enable nodeId# 20338 Enable nodeId# 20274 Disable nodeId# 20300 Add nodeId# 20346 Delete nodeId# 20331 Enable nodeId# 20292 Disable nodeId# 20307 Disable nodeId# 20269 Add nodeId# 20347 Delete nodeId# 20237 Add nodeId# 20348 Disable nodeId# 20326 Pick Pick Delete nodeId# 20292 Enable nodeId# 20217 Disable nodeId# 20196 Disable nodeId# 20311 Pick Add nodeId# 20349 Enable nodeId# 20196 Delete nodeId# 20272 Disable nodeId# 20338 Delete nodeId# 20227 Pick Pick Pick Disable nodeId# 20302 Delete nodeId# 20231 Disable nodeId# 20138 Delete nodeId# 20214 Add nodeId# 20350 Disable nodeId# 20203 Delete nodeId# 20349 Pick Delete nodeId# 20270 Enable nodeId# 20311 Pick Enable nodeId# 20203 Add nodeId# 20351 Delete nodeId# 20295 Enable nodeId# 20326 Add nodeId# 20352 Add nodeId# 20353 Add nodeId# 20354 Pick Pick Enable nodeId# 20138 Pick Disable nodeId# 20317 Delete nodeId# 20334 Add nodeId# 20355 Delete nodeId# 20317 Add nodeId# 20356 Enable nodeId# 20269 Enable nodeId# 20300 Pick Enable nodeId# 20302 Add nodeId# 20357 Pick Add nodeId# 20358 Add nodeId# 20359 Delete nodeId# 20335 Disable nodeId# 20297 Disable nodeId# 20350 Delete nodeId# 20299 Delete nodeId# 20309 Add nodeId# 20360 Pick Pick Enable nodeId# 20307 Enable nodeId# 20297 Enable nodeId# 20350 Pick Delete nodeId# 20138 Pick Pick Delete nodeId# 20222 Delete nodeId# 20251 Disable nodeId# 20307 Pick Disable nodeId# 20332 Enable nodeId# 20338 Delete nodeId# 20326 Enable nodeId# 20332 Pick Pick Add nodeId# 20361 Pick Add nodeId# 20362 Enable nodeId# 20307 Pick Delete nodeId# 20195 Pick Pick Pick Add nodeId# 20363 Pick Disable nodeId# 20308 Add nodeId# 20364 Disable nodeId# 20319 Add nodeId# 20365 Add nodeId# 20366 Pick Pick Enable nodeId# 20308 Pick Pick Pick Delete nodeId# 20196 Add nodeId# 20367 Enable nodeId# 20319 Disable nodeId# 20336 Pick Pick Delete nodeId# 20352 Delete nodeId# 20362 Disable nodeId# 20360 Delete nodeId# 20165 Add nodeId# 20368 Delete nodeId# 20312 Disable nodeId# 20366 Disable nodeId# 20324 Disable nodeId# 20269 Delete nodeId# 20217 Add nodeId# 20369 Add nodeId# 20370 Enable nodeId# 20324 Disable nodeId# 20245 Add nodeId# 20371 Add nodeId# 20372 Delete nodeId# 20278 Delete nodeId# 20357 Disable nodeId# 20302 Disable nodeId# 20343 Add nodeId# 20373 Pick Pick Disable nodeId# 20318 Delete nodeId# 20324 Pick Delete nodeId# 20345 Enable nodeId# 20318 Add nodeId# 20374 Add nodeId# 20375 Disable nodeId# 20143 Add nodeId# 20376 Delete nodeId# 20119 Pick Disable nodeId# 20297 Add nodeId# 20377 Disable nodeId# 20377 Add nodeId# 20378 Enable nodeId# 20360 Disable nodeId# 20310 Enable nodeId# 20310 Add nodeId# 20379 Enable nodeId# 20302 Pick Enable nodeId# 20143 Add nodeId# 20380 Delete nodeId# 20226 Enable nodeId# 20269 Add nodeId# 20381 Enable nodeId# 20297 Enable nodeId# 20343 Delete nodeId# 20375 Delete nodeId# 20262 Pick Enable nodeId# 20377 Delete nodeId# 20171 Pick Pick Enable nodeId# 20245 Pick Pick Disable nodeId# 20274 Delete nodeId# 20325 Disable nodeId# 20321 Disable nodeId# 20269 Add nodeId# 20382 Disable nodeId# 20338 Enable nodeId# 20366 Add nodeId# 20383 Add nodeId# 20384 Enable nodeId# 20338 Enable nodeId# 20336 Add nodeId# 20385 Pick Add nodeId# 20386 Enable nodeId# 20274 Add nodeId# 20387 Enable nodeId# 20321 Add nodeId# 20388 Add nodeId# 20389 Disable nodeId# 20351 Delete nodeId# 20343 Enable nodeId# 20269 Pick Delete nodeId# 20319 Enable nodeId# 20351 Add nodeId# 20390 Disable nodeId# 20358 Enable nodeId# 20358 Delete nodeId# 20223 Pick Disable nodeId# 20389 Delete nodeId# 20386 Enable nodeId# 20389 Delete nodeId# 20310 Pick Delete nodeId# 20363 Add nodeId# 20391 Disable nodeId# 20364 Delete nodeId# 20346 Disable nodeId# 20269 Enable nodeId# 20364 Pick Disable nodeId# 20378 Delete nodeId# 20384 Disable nodeId# 20361 Delete nodeId# 20372 Delete nodeId# 20366 Delete nodeId# 20201 Add nodeId# 20392 Disable nodeId# 20392 Pick Add nodeId# 20393 Disable nodeId# 20300 Add nodeId# 20394 Pick Enable nodeId# 20378 Pick Add nodeId# 20395 Pick Enable nodeId# 20269 Enable nodeId# 20300 Pick Delete nodeId# 20353 Pick Delete nodeId# 20290 Add nodeId# 20396 Delete nodeId# 20392 Disable nodeId# 20389 Add nodeId# 20397 Add nodeId# 20398 Enable nodeId# 20361 Enable nodeId# 20389 Delete nodeId# 20143 Disable nodeId# 20382 Disable nodeId# 20337 Enable nodeId# 20382 Delete nodeId# 20374 Pick Delete nodeId# 20365 Pick Enable nodeId# 20337 Add nodeId# 20399 Add nodeId# 20400 Disable nodeId# 20332 Add nodeId# 20401 Disable nodeId# 20321 Delete nodeId# 20395 Disable nodeId# 20364 Pick Enable nodeId# 20332 Delete nodeId# 20320 Disable nodeId# 20332 Delete nodeId# 20291 Enable nodeId# 20321 Add nodeId# 20402 Enable nodeId# 20332 Add nodeId# 20403 Delete nodeId# 20238 Add nodeId# 20404 Delete nodeId# 20308 Delete nodeId# 20336 Pick Delete nodeId# 20377 Delete nodeId# 20358 Delete nodeId# 20339 Pick Disable nodeId# 20301 Disable nodeId# 20332 Pick Disable nodeId# 20298 Disable nodeId# 20359 Pick Delete nodeId# 20268 Add nodeId# 20405 Add nodeId# 20406 Disable nodeId# 20342 Add nodeId# 20407 Disable nodeId# 20274 Delete nodeId# 20356 Delete nodeId# 20351 Delete nodeId# 20407 Disable nodeId# 20258 Disable nodeId# 20203 Delete nodeId# 20285 Add nodeId# 20408 Enable nodeId# 20359 Delete nodeId# 20301 Enable nodeId# 20342 Delete nodeId# 20403 Enable nodeId# 20258 Enable nodeId# 20364 Pick Delete nodeId# 20398 Add nodeId# 20409 Add nodeId# 20410 Disable nodeId# 20394 Delete nodeId# 20338 Pick Add nodeId# 20411 Enable nodeId# 20394 Delete nodeId# 20387 Delete nodeId# 20298 Enable nodeId# 20203 Delete nodeId# 20370 Pick Enable nodeId# 20274 Add nodeId# 20412 Delete nodeId# 20399 Pick Add nodeId# 20413 Disable nodeId# 20404 Delete nodeId# 20337 Enable nodeId# 20404 Pick Add nodeId# 20414 Pick Pick Pick Delete nodeId# 20300 Pick Disable nodeId# 20280 Pick Delete nodeId# 20333 Enable nodeId# 20280 Add nodeId# 20415 Delete nodeId# 20354 Disable nodeId# 20412 Add nodeId# 20416 Pick Pick Pick Enable nodeId# 20412 Delete nodeId# 20390 Pick Disable nodeId# 20401 Disable nodeId# 20406 Delete nodeId# 20416 Pick Disable nodeId# 20297 Add nodeId# 20417 Delete nodeId# 20111 Disable nodeId# 20369 Disable nodeId# 20389 Delete nodeId# 20371 Enable nodeId# 20332 Enable nodeId# 20297 Disable nodeId# 20258 Disable nodeId# 20381 Pick Pick Disable nodeId# 20378 Delete nodeId# 20315 Delete nodeId# 20388 Delete nodeId# 20368 Add nodeId# 20418 Pick Pick Pick Pick Disable nodeId# 20307 Delete nodeId# 20361 Delete nodeId# 20381 Delete nodeId# 20373 Enable nodeId# 20258 Enable nodeId# 20369 Disable nodeId# 20216 Delete nodeId# 20348 Delete nodeId# 20402 Pick Enable nodeId# 20389 Enable nodeId# 20216 Enable nodeId# 20307 Disable nodeId# 20367 Add nodeId# 20419 Disable nodeId# 20396 Disable nodeId# 20410 Add nodeId# 20420 Pick Add nodeId# 20421 Disable nodeId# 20332 Add nodeId# 20422 Add nodeId# 20423 Disable nodeId# 20258 Add nodeId# 20424 Disable nodeId# 20417 Delete nodeId# 20274 Add nodeId# 20425 Add nodeId# 20426 Add nodeId# 20427 Enable nodeId# 20401 Add nodeId# 20428 Pick Add nodeId# 20429 Delete nodeId# 20280 Pick Delete nodeId# 20344 Disable nodeId# 20380 Disable nodeId# 20350 Enable nodeId# 20396 Enable nodeId# 20258 Enable nodeId# 20380 Add nodeId# 20430 Add nodeId# 20431 Enable nodeId# 20350 Enable nodeId# 20378 Delete nodeId# 20420 Delete nodeId# 20401 Pick Add nodeId# 20432 Delete nodeId# 20430 Pick Delete nodeId# 20379 Delete nodeId# 20414 Pick Pick Pick Enable nodeId# 20332 Delete nodeId# 20376 Pick Pick Pick Pick Enable nodeId# 20410 Enable nodeId# 20406 Delete nodeId# 20380 Add nodeId# 20433 Enable nodeId# 20367 Pick Enable nodeId# 20417 Add nodeId# 20434 Pick Pick Disable nodeId# 20385 Disable nodeId# 20410 Pick >> test.py::test[order_by-order_by_dynum_desc-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-Debug] [GOOD] >> KqpScan::ScanRetryReadRanges [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-Plan] >> test.py::test[window-win_func_in_lib--Analyze] [GOOD] >> test.py::test[order_by-order_by_mul_columns-default.txt-Debug] >> test.py::test[aggr_factory-linear_histogram-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_in_lib--Debug] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Debug] [GOOD] >> TPDiskRaces::KillOwnerWhileDecommitting [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflight ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2024-11-21T17:44:43.214633Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |74.1%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> test.py::test[aggr_factory-min_by-default.txt-Debug] [GOOD] >> test.py::test[pg-select_agg_gs_rollup-default.txt-Debug] [GOOD] >> test.py::test[pg-select_agg_gs_rollup-default.txt-Plan] >> test.py::test[aggr_factory-multi_minmaxby-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--Debug] >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> test.py::test[window-full/session_compact--Debug] [GOOD] >> test.py::test[window-full/session_compact--Plan] >> test.py::test[expr-exapnd_with_singular_types-default.txt-Debug] [GOOD] >> test.py::test[expr-exapnd_with_singular_types-default.txt-Plan] [GOOD] >> test.py::test[expr-exapnd_with_singular_types-default.txt-Results] >> test.py::test[pg_catalog-pg_locks-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Debug] >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] >> KqpScan::ScanPg [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] >> test.py::test[pg-with_rec_distinct-default.txt-Analyze] [GOOD] >> test.py::test[pg-with_rec_distinct-default.txt-Debug] >> test.py::test[aggr_factory-min_by-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-min_by-default.txt-Results] >> test.py::test[pg-select_agg_gs_rollup-default.txt-Plan] [GOOD] >> test.py::test[pg-select_agg_gs_rollup-default.txt-Results] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10000Inflight1BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests2Inflight2BlobSize1000 |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] Test command err: 2024-11-21T17:44:41.642072Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:44:41.642830Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c65/r3tmp/tmpuZsTbE/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:41.642886Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c65/r3tmp/tmpuZsTbE/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:44:41.643135Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T17:44:41.643188Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:41.643361Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T17:44:41.643371Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:41.643454Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T17:44:41.643458Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:41.643507Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T17:44:41.643510Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:41.643555Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T17:44:41.643559Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T17:44:41.643653Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:26:2073] ControllerId# 72057594037932033 2024-11-21T17:44:41.643655Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:44:41.643664Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:44:41.643697Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:44:41.646587Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:44:41.646825Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:44:41.646872Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:41.646876Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:44:41.654964Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:41.654984Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:44:41.655539Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:44:41.656153Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:44:41.656198Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:41.668408Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c65/r3tmp/tmpuZsTbE/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:41.668555Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:44:41.668751Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T17:44:41.668758Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:44:41.668779Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\3149\340\267\320\037\031\345\247(Po\226\324\261Hc\257\010\245" } 2024-11-21T17:44:41.668824Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2024-11-21T17:44:41.668836Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:70:2114] SessionId# [0:0:0] Cookie# 0 2024-11-21T17:44:41.668847Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.013827s 2024-11-21T17:44:41.668928Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2024-11-21T17:44:41.668933Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2024-11-21T17:44:41.687047Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:41.688522Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:41.689135Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:41.689834Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:41.689949Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:41.690097Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:41.690306Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:41.690360Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:41.690590Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:41.690602Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:41.690729Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:41.690916Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:41.691152Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:41.691164Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:41.695156Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:44:41.707694Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:44:41.708074Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:44:41.708205Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:44:41.710912Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:44:41.710932Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:44:41.710976Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:44:41.715024Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:44:41.715096Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:44:41.715134Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:44:41.715153Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr: ... false NoGroup# false 2024-11-21T17:44:42.938735Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 2181038082 2024-11-21T17:44:42.938741Z node 3 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:260} RequestGroupConfig GroupId# 2181038082 2024-11-21T17:44:42.938793Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:44:42.938797Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T17:44:42.938813Z node 3 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2181038082 2024-11-21T17:44:42.938829Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2024-11-21T17:44:42.938900Z node 2 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [3:29:2059] Cookie# 0 Recipient# [2:404:2342] RecipientRewrite# [2:362:2311] Request# {NodeID: 3 GroupIDs: 2181038082 } StopGivingGroups# false 2024-11-21T17:44:42.938918Z node 2 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 3 GroupIDs: 2181038082 } 2024-11-21T17:44:42.939010Z node 3 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 3 ServiceSet { Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 2681896850772158367 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/6fwh/001c65/r3tmp/tmpGH60Mx//key.txt" EncryptedGroupKey: "|\366\242-2\177D\001\371\017q\203\311\266VA\230\007\206\211g\3419gZ\376`k\205a\231\362\346\265\355\004" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } } 2024-11-21T17:44:42.939030Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 2 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 2681896850772158367 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/6fwh/001c65/r3tmp/tmpGH60Mx//key.txt" EncryptedGroupKey: "|\366\242-2\177D\001\371\017q\203\311\266VA\230\007\206\211g\3419gZ\376`k\205a\231\362\346\265\355\004" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } 2024-11-21T17:44:42.939049Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:207} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/6fwh/001c65/r3tmp/tmpGH60Mx//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2024-11-21T17:44:42.939234Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2024-11-21T17:44:42.939242Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:44:42.939504Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:597:2104] targetNodeId# 2 Marker# DSP01 2024-11-21T17:44:42.939526Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:598:2105] targetNodeId# 2 Marker# DSP01 2024-11-21T17:44:42.939548Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:599:2106] targetNodeId# 2 Marker# DSP01 2024-11-21T17:44:42.939571Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:600:2107] targetNodeId# 2 Marker# DSP01 2024-11-21T17:44:42.939593Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:601:2108] targetNodeId# 2 Marker# DSP01 2024-11-21T17:44:42.939612Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:602:2109] targetNodeId# 2 Marker# DSP01 2024-11-21T17:44:42.939630Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:603:2110] targetNodeId# 2 Marker# DSP01 2024-11-21T17:44:42.939634Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:44:42.939694Z node 3 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2181038082 2024-11-21T17:44:42.939920Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:42.939975Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:42.939986Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:42.940010Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:42.940021Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:42.940076Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:42.940113Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:42.940119Z node 3 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T17:44:42.940124Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T17:44:42.940145Z node 3 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] bootstrap ActorId# [3:606:2111] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T17:44:42.940152Z node 3 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 2 Marker# DSPB03 2024-11-21T17:44:42.940180Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:597:2104] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 2593412838754662637 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T17:44:42.940513Z node 3 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 2 Marker# DSPB01 2024-11-21T17:44:42.940526Z node 3 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2024-11-21T17:44:42.940582Z node 3 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2024-11-21T17:44:42.940610Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2024-11-21T17:44:42.940662Z node 2 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [2:607:2506] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T17:44:42.940687Z node 2 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:44:42.940693Z node 2 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:44:42.940700Z node 2 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2024-11-21T17:44:42.940705Z node 2 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2024-11-21T17:44:42.940721Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:582:2496] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:44:42.940763Z node 2 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2024-11-21T17:44:42.940802Z node 2 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2024-11-21T17:44:42.940814Z node 2 :BS_PROXY_PUT ERROR: [b6b2c6548553d7a5] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2024-11-21T17:44:42.940820Z node 2 :BS_PROXY_PUT NOTICE: [b6b2c6548553d7a5] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T17:44:42.940882Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:597:2104] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> test.py::test[window-full/session_compact--Plan] [GOOD] >> test.py::test[window-full/session_compact--Results] >> test.py::test[tpch-q9-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt--Debug] >> TDatabaseResolverTests::Greenplum_MasterNode >> test.py::test[pg-select_join_left_equi-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_left_equi-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_left_equi-default.txt-Results] >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] Test command err: 2024-11-21T17:44:43.010957Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:44:43.011788Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c58/r3tmp/tmpDNH0bT/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:43.011845Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c58/r3tmp/tmpDNH0bT/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:44:43.012124Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T17:44:43.012184Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:43.012386Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T17:44:43.012399Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:43.012507Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T17:44:43.012513Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:43.012585Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T17:44:43.012591Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:43.012666Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T17:44:43.012672Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T17:44:43.012806Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:26:2073] ControllerId# 72057594037932033 2024-11-21T17:44:43.012810Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:44:43.012823Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:44:43.012866Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:44:43.015920Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:44:43.016199Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:44:43.016279Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:43.016285Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:44:43.021822Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:43.021843Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:44:43.022422Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:44:43.023148Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:44:43.023206Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:43.027233Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c58/r3tmp/tmpDNH0bT/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:43.027322Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:44:43.027471Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T17:44:43.027479Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:44:43.027500Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: ":\353\217{_J\370B\242\007\356c9Z\3000t\301\345\010" } 2024-11-21T17:44:43.027538Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2024-11-21T17:44:43.027549Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 2146435075 Sender# [1:70:2114] SessionId# [0:0:0] Cookie# 0 2024-11-21T17:44:43.027558Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.005681s 2024-11-21T17:44:43.027611Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2024-11-21T17:44:43.027617Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:255} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2024-11-21T17:44:43.032571Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:43.033955Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:43.034562Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:43.035283Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:43.035390Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:43.035565Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:43.035894Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:43.035970Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:43.036323Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:43.036340Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:43.036514Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:810} Handle(TEvStatusUpdate) 2024-11-21T17:44:43.036768Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:43.037078Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:43.037105Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T17:44:43.040756Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:44:43.046497Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:44:43.046770Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:44:43.046861Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:44:43.049283Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:44:43.049299Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:44:43.049335Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:44:43.051755Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:44:43.051801Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:44:43.051831Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:44:43.051848Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsControll ... DataSize# 0 Data# } cookie# 0 2024-11-21T17:44:43.314846Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:241:2] FDS# 241 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:44:43.314850Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:51:2095] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:241:1] FDS# 241 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:44:43.315530Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:241:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81897 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2024-11-21T17:44:43.315579Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:241:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81897 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T17:44:43.315594Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:241:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81897 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T17:44:43.315609Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:9:0:0:241:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T17:44:43.315615Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:9:0:0:241:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T17:44:43.315677Z node 1 :BS_CONTROLLER DEBUG: {BSCTXPGK08@propose_group_key.cpp:96} TTxProposeGroupKey Complete 2024-11-21T17:44:43.315724Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:335:2344] Cookie# 0 Recipient# [1:335:2344] RecipientRewrite# [1:335:2344] Request# {NodeID: 1 GroupIDs: 2181038082 } StopGivingGroups# false 2024-11-21T17:44:43.315737Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 2181038082 } 2024-11-21T17:44:43.315796Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 11832342462848643093 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/6fwh/001c58/r3tmp/tmpDNH0bT//key.txt" EncryptedGroupKey: "k7\017\0261?\2661\250o\307o\314~\210Ms\365>\177\242k\002\001\332!@\215\373s\376\255\221[\030~" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } } 2024-11-21T17:44:43.315809Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 11832342462848643093 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/6fwh/001c58/r3tmp/tmpDNH0bT//key.txt" EncryptedGroupKey: "k7\017\0261?\2661\250o\307o\314~\210Ms\365>\177\242k\002\001\332!@\215\373s\376\255\221[\030~" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } 2024-11-21T17:44:43.316001Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:44:43.316007Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:44:43.316242Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:520:2496] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.316263Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:521:2497] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.316275Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:522:2498] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.316286Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:523:2499] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.316298Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:524:2500] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.316308Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:525:2501] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.316320Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:526:2502] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.316325Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:44:43.316437Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.316453Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.316471Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.316491Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.316509Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.316521Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.316527Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.316530Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T17:44:43.316533Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T17:44:43.316549Z node 1 :BS_PROXY_BLOCK DEBUG: [5a9a1d6240d04444] bootstrap ActorId# [1:529:2503] Group# 2181038082 TabletId# 1234 Generation# 1 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T17:44:43.316554Z node 1 :BS_PROXY_BLOCK DEBUG: [5a9a1d6240d04444] Sending TEvVBlock Tablet# 1234 Generation# 1 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2024-11-21T17:44:43.316593Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 1 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 10018880713299432865 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T17:44:43.316885Z node 1 :BS_PROXY_BLOCK DEBUG: [5a9a1d6240d04444] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2024-11-21T17:44:43.316892Z node 1 :BS_PROXY_BLOCK DEBUG: [5a9a1d6240d04444] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2024-11-21T17:44:43.316943Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 2024-11-21T17:44:43.317226Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2024-11-21T17:44:43.317310Z node 1 :BS_PROXY_BLOCK DEBUG: [abc2fc901918ac71] bootstrap ActorId# [1:531:2505] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T17:44:43.317316Z node 1 :BS_PROXY_BLOCK DEBUG: [abc2fc901918ac71] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2024-11-21T17:44:43.317337Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 6986878145041102553 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T17:44:43.317500Z node 1 :BS_PROXY_BLOCK DEBUG: [abc2fc901918ac71] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2024-11-21T17:44:43.317507Z node 1 :BS_PROXY_BLOCK DEBUG: [abc2fc901918ac71] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2024-11-21T17:44:43.317558Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] bootstrap ActorId# [1:532:2506] Group# 2181038082 TabletId# 1234 Generation# 4 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T17:44:43.317562Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Sending TEvVBlock Tablet# 1234 Generation# 4 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2024-11-21T17:44:43.317578Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 4 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 7508207834809229985 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T17:44:43.317715Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2024-11-21T17:44:43.317720Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Result# TEvBlockResult {Status# OK} Marker# DSPB04 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: 2024-11-21T17:44:35.965931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:44:35.971212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:44:35.971376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:44:35.971432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:44:35.971824Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:44:35.971847Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0010d8/r3tmp/tmp3XcNkO/pdisk_1.dat 2024-11-21T17:44:36.109201Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:44:36.224868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:44:36.314140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:36.314176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:36.315720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:36.315746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:36.327325Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:44:36.327492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:44:36.327651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:44:36.679851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:44:37.317971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1328:2797], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:44:37.318001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1339:2802], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:44:37.318013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:44:37.319925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:44:37.826270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1342:2805], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:44:38.085661Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x6w657mkenz887pvcr850, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc3NjVkMTEtMjYyM2NlOWMtOGJiZTA4NjgtMzY0ODRiOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 2 2024-11-21T17:44:38.420913Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7x6wza5hfz99f2zf1zy5f6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzg2YTFiN2ItNzg4MTBmYjQtMTNjNDRiNjAtMWJhNTMxZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [1:1545:2928] -> [2:1501:2402] -- EvScanData from [2:1550:2409]: pass 2024-11-21T17:44:38.708112Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7x6wza5hfz99f2zf1zy5f6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzg2YTFiN2ItNzg4MTBmYjQtMTNjNDRiNjAtMWJhNTMxZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2024-11-21T17:44:38.710472Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2024-11-21T17:44:40.866958Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:44:40.870354Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:44:40.870431Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:44:40.870519Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:44:40.870831Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:44:40.870879Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0010d8/r3tmp/tmpCCuFlB/pdisk_1.dat 2024-11-21T17:44:40.953345Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:44:41.063993Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:44:41.162429Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:41.162466Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:41.165238Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:41.165267Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:41.177266Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T17:44:41.177465Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:44:41.177578Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:44:41.531790Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:44:42.021526Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1327:2799], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:44:42.021562Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1338:2804], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:44:42.021612Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:44:42.022369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:44:42.583899Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1341:2807], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:44:42.737362Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x70s5cayhr6196x8j9k6h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTFkNGYxZWItOTI2MjBkZTItNDMwZjJhNWMtY2UyZTE0M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 4 2024-11-21T17:44:43.090925Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7x71fzc3x990ddr63zhenw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjFjZjAzOTMtOWI0ZDMyNmYtZjAzNWM1MjctZWFhNGZiMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [3:1544:2930] -> [4:1498:2402] -- EvScanData from [4:1548:2409]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}]},"SeqNo":2,"QueryResultIndex":0,"ChannelId":2} 2024-11-21T17:44:43.095258Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> test.py::test[pg-pg_column_case--ForceBlocks] [GOOD] >> test.py::test[pg-pg_column_case--Plan] [GOOD] >> test.py::test[pg-pg_column_case--Results] >> test.py::test[blocks-sort_two_mix--Results] [GOOD] >> test.py::test[blocks-top_sort_two_asc--Debug] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests100Inflight10BlobSize1000 [GOOD] >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10000Inflight1000BlobSize1000 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2024-11-21T17:44:43.077898Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:44:43.078543Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c50/r3tmp/tmpu0el3Z/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:43.078590Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c50/r3tmp/tmpu0el3Z/pdisk_map" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:44:43.078821Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T17:44:43.078869Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:43.079024Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2024-11-21T17:44:43.079033Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:43.079099Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2024-11-21T17:44:43.079103Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:43.079171Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2024-11-21T17:44:43.079175Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:44:43.079224Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [2000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2024-11-21T17:44:43.079228Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T17:44:43.079327Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [1:44:2074] ControllerId# 72057594037932033 2024-11-21T17:44:43.079330Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:44:43.079339Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:44:43.079374Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:44:43.082075Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:44:43.082229Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:44:43.082628Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:43.082652Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 33554432 2024-11-21T17:44:43.082756Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 1 PipeClientId# [2:91:2068] ControllerId# 72057594037932033 2024-11-21T17:44:43.082760Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:44:43.082771Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:44:43.082795Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:44:43.083299Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:44:43.083361Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:44:43.083425Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:44:43.088817Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:43.088850Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:44:43.088950Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:43.088958Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:44:43.089585Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:44:43.089670Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:43.089677Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:44:43.090362Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:44:43.090493Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:43.090547Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:43.090553Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:44:43.090570Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:44:43.090671Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:43.090701Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:44:43.090712Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:44:43.091127Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:44:43.094527Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "SectorMap:/home/runner/.ya/build/build_root/6fwh/001c50/r3tmp/tmpu0el3Z/pdisk_map" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 33554432 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 1 } 2024-11-21T17:44:43.094607Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:44:43.094769Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T17:44:4 ... oxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.449891Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.449906Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.449936Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.449941Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T17:44:43.449946Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T17:44:43.449953Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2024-11-21T17:44:43.450092Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] bootstrap ActorId# [1:593:2505] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T17:44:43.450124Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:44:43.450131Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:44:43.450140Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2024-11-21T17:44:43.450145Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2024-11-21T17:44:43.450169Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:584:2498] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:44:43.458250Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2024-11-21T17:44:43.458303Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2024-11-21T17:44:43.458314Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T17:44:43.458418Z node 2 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:127} HandleForwarded GroupId# 2181038082 EnableProxyMock# false NoGroup# false 2024-11-21T17:44:43.458429Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 2181038082 2024-11-21T17:44:43.458435Z node 2 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:260} RequestGroupConfig GroupId# 2181038082 2024-11-21T17:44:43.458506Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:44:43.458512Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T17:44:43.458539Z node 2 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2181038082 2024-11-21T17:44:43.458554Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2024-11-21T17:44:43.458633Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [2:29:2059] Cookie# 0 Recipient# [1:439:2377] RecipientRewrite# [1:396:2345] Request# {NodeID: 2 GroupIDs: 2181038082 } StopGivingGroups# false 2024-11-21T17:44:43.458656Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 2 GroupIDs: 2181038082 } 2024-11-21T17:44:43.458771Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:551} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 13220171991348794340 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/6fwh/001c50/r3tmp/tmpu0el3Z//key.txt" EncryptedGroupKey: "\343[\326\335\027,\203?k\350\236\026R>\313z\244\364S\207\326\311\3333m\331\335\376\353\266\227\354\247\002\356\314" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } } 2024-11-21T17:44:43.458794Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2181038082 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1002 PDiskGuid: 13220171991348794340 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/6fwh/001c50/r3tmp/tmpu0el3Z//key.txt" EncryptedGroupKey: "\343[\326\335\027,\203?k\350\236\026R>\313z\244\364S\207\326\311\3333m\331\335\376\353\266\227\354\247\002\356\314" GroupKeyNonce: 2181038082 MainKeyVersion: 1 StoragePoolName: "test_storage" DeviceType: ROT } } 2024-11-21T17:44:43.458817Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:207} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/6fwh/001c50/r3tmp/tmpu0el3Z//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2024-11-21T17:44:43.459013Z node 2 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2181038082 2024-11-21T17:44:43.459027Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2024-11-21T17:44:43.459032Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:44:43.459302Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:597:2104] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.459324Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:598:2105] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.459341Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:599:2106] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.459360Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:600:2107] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.459374Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:601:2108] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.459394Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:602:2109] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.459412Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:603:2110] targetNodeId# 1 Marker# DSP01 2024-11-21T17:44:43.459417Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:44:43.459723Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.459746Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.459778Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.459815Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.459825Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.459833Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.459884Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T17:44:43.459889Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T17:44:43.459893Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T17:44:43.459926Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:597:2104] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] >> test.py::test[like-ilike_clause-default.txt-Results] [GOOD] >> test.py::test[like-like_clause-default.txt-Analyze] Test command err: 2024-11-21T17:44:43.791084Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: 2024-11-21T17:44:32.292644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:44:32.301231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:44:32.301491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:44:32.301559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:44:32.302113Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:44:32.302133Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0010ef/r3tmp/tmpK3b3z2/pdisk_1.dat 2024-11-21T17:44:32.510546Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:44:32.665576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:44:32.779604Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:44:32.783267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:32.783313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:32.783676Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:44:32.783819Z node 2 :TX_PROXY DEBUG: actor# [2:193:2108] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:44:32.784096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:32.784108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:32.784776Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2024-11-21T17:44:32.796154Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:44:32.796361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:44:32.796453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:44:33.185918Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Handle TEvProposeTransaction 2024-11-21T17:44:33.185944Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:44:33.186181Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1162:2706] 2024-11-21T17:44:33.227685Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2706] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:44:33.227997Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2706] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:44:33.228012Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2706] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:44:33.228091Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2706] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:44:33.228135Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2706] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:44:33.228149Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2706] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:44:33.228268Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2706] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:44:33.228890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:44:33.232919Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2706] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:44:33.232942Z node 1 :TX_PROXY DEBUG: Actor# [1:1162:2706] txid# 281474976715657 SEND to# [1:1071:2648] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:44:33.350291Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1215:2356] 2024-11-21T17:44:33.350390Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:44:33.375336Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:44:33.375382Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:44:33.375543Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:44:33.375552Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:44:33.375559Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:44:33.375609Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:44:33.393965Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:44:33.394154Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:44:33.394186Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1238:2370] 2024-11-21T17:44:33.394192Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:44:33.394196Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:44:33.394202Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:44:33.394973Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:44:33.395004Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:44:33.395038Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:44:33.395045Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:44:33.395055Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:44:33.395061Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:44:33.476701Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1196:2736], serverId# [2:1243:2372], sessionId# [0:0:0] 2024-11-21T17:44:33.476823Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:44:33.476880Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:44:33.476917Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:44:33.477734Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:44:33.492693Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:44:33.492929Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:44:33.992354Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1272:2758], serverId# [2:1274:2381], sessionId# [0:0:0] 2024-11-21T17:44:33.994207Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 994 RawX2: 4294969882 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:44:33.994222Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:44:33.994272Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:44:33.994279Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:44:33.994287Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:44:33.994341Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:44:33.994372Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:44:33.994441Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:44:33.994452Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:44:33.994529Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:44:33.994593Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:44:33.995099Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:44:33.995105Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:44:34.002150Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:44:34.002706Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:44:34.002718Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:44:34.002727Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:44:34.002842Z node ... ode 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd7x6xwk0p2j19hqvkhffhqa. SessionId : ydb://session/3?node_id=3&id=ZWFlNzFkOWMtYjJlNjkxYTAtOGVjMmY5MjgtMjc2NzlkYzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll inputs 2024-11-21T17:44:39.706209Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd7x6xwk0p2j19hqvkhffhqa. SessionId : ydb://session/3?node_id=3&id=ZWFlNzFkOWMtYjJlNjkxYTAtOGVjMmY5MjgtMjc2NzlkYzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll sources 2024-11-21T17:44:39.706212Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd7x6xwk0p2j19hqvkhffhqa. SessionId : ydb://session/3?node_id=3&id=ZWFlNzFkOWMtYjJlNjkxYTAtOGVjMmY5MjgtMjc2NzlkYzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Resume execution, run status: Finished 2024-11-21T17:44:39.706215Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd7x6xwk0p2j19hqvkhffhqa. SessionId : ydb://session/3?node_id=3&id=ZWFlNzFkOWMtYjJlNjkxYTAtOGVjMmY5MjgtMjc2NzlkYzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. ProcessOutputsState.Inflight: 0 2024-11-21T17:44:39.706218Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd7x6xwk0p2j19hqvkhffhqa. SessionId : ydb://session/3?node_id=3&id=ZWFlNzFkOWMtYjJlNjkxYTAtOGVjMmY5MjgtMjc2NzlkYzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Do not drain channelId: 1, finished 2024-11-21T17:44:39.706221Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd7x6xwk0p2j19hqvkhffhqa. SessionId : ydb://session/3?node_id=3&id=ZWFlNzFkOWMtYjJlNjkxYTAtOGVjMmY5MjgtMjc2NzlkYzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T17:44:39.706224Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. Tasks execution finished 2024-11-21T17:44:39.706228Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jd7x6xwk0p2j19hqvkhffhqa. SessionId : ydb://session/3?node_id=3&id=ZWFlNzFkOWMtYjJlNjkxYTAtOGVjMmY5MjgtMjc2NzlkYzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T17:44:39.706237Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. pass away 2024-11-21T17:44:39.706250Z node 3 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715664;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:44:39.706277Z node 3 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715664, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T17:44:39.706311Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1569:2903] TxId: 281474976715664. Ctx: { TraceId: 01jd7x6xwk0p2j19hqvkhffhqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWFlNzFkOWMtYjJlNjkxYTAtOGVjMmY5MjgtMjc2NzlkYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1572:2943], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 375 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 126 FinishTimeMs: 1732211079706 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 6 BuildCpuTimeUs: 120 HostName: "ghrun-2rqap2spfm" NodeId: 3 StartTimeMs: 1732211079705 } MaxMemoryUsage: 1048576 } 2024-11-21T17:44:39.706317Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715664. Ctx: { TraceId: 01jd7x6xwk0p2j19hqvkhffhqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWFlNzFkOWMtYjJlNjkxYTAtOGVjMmY5MjgtMjc2NzlkYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1572:2943] 2024-11-21T17:44:39.706335Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1569:2903] TxId: 281474976715664. Ctx: { TraceId: 01jd7x6xwk0p2j19hqvkhffhqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWFlNzFkOWMtYjJlNjkxYTAtOGVjMmY5MjgtMjc2NzlkYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:44:39.706341Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1569:2903] TxId: 281474976715664. Ctx: { TraceId: 01jd7x6xwk0p2j19hqvkhffhqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWFlNzFkOWMtYjJlNjkxYTAtOGVjMmY5MjgtMjc2NzlkYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T17:44:39.706347Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1569:2903] TxId: 281474976715664. Ctx: { TraceId: 01jd7x6xwk0p2j19hqvkhffhqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWFlNzFkOWMtYjJlNjkxYTAtOGVjMmY5MjgtMjc2NzlkYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000375s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2024-11-21T17:44:39.706567Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2024-11-21T17:44:39.706585Z node 3 :TX_PROXY DEBUG: actor# [3:164:2150] Handle TEvProposeTransaction 2024-11-21T17:44:39.706590Z node 3 :TX_PROXY DEBUG: actor# [3:164:2150] TxId# 0 ProcessProposeTransaction 2024-11-21T17:44:39.706609Z node 3 :TX_PROXY DEBUG: actor# [3:164:2150] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1574:2944] SnapshotReq marker# P0 2024-11-21T17:44:39.707299Z node 3 :TX_PROXY DEBUG: Actor# [3:1576:2944] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2024-11-21T17:44:39.707353Z node 3 :TX_PROXY DEBUG: Actor# [3:1576:2944] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2024-11-21T17:44:39.707378Z node 3 :TX_PROXY DEBUG: Actor# [3:1574:2944] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2024-11-21T17:44:40.994614Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:44:40.994723Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:44:40.994733Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:44:40.994791Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:44:40.994844Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:44:40.994869Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0010ef/r3tmp/tmpEQXZvj/pdisk_1.dat 2024-11-21T17:44:41.098248Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:44:41.212599Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:44:41.309258Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:41.309297Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:41.314762Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:41.314803Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:41.326292Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2024-11-21T17:44:41.326508Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:44:41.326643Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:44:41.656592Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:44:42.144544Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1329:2798], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:44:42.144571Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1340:2803], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:44:42.144634Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:44:42.145846Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:44:42.647037Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1343:2806], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:44:42.810617Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x70x0a85958016vehbknh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MWZiNTU2N2QtODJmMjc0Y2MtY2QzODAwY2EtZjNmNmY0MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:44:43.146347Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7x71jf5n1gs2gp7dgbkdrd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjkxNDRiNDEtYzJmYmY5ZmYtZjBhNmFkOTQtOGFiZTQ5YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:44:43.272286Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7x71jf5n1gs2gp7dgbkdrd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YjkxNDRiNDEtYzJmYmY5ZmYtZjBhNmFkOTQtOGFiZTQ5YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:44:43.273130Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> test.py::test[blocks-combine_all_count_filter_opt--Results] [GOOD] >> test.py::test[case-case_opt_cond-default.txt-Analyze] >> test.py::test[pg-tpcds-q41-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-Analyze] >> test.py::test[window-win_peephole_double_usage-default.txt-Analyze] [GOOD] >> test.py::test[window-win_peephole_double_usage-default.txt-Debug] >> test.py::test[schema-select_all_inferschema2--Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op--Debug] >> test.py::test[window-full/aggregations--Debug] [GOOD] >> test.py::test[window-full/aggregations--Plan] [GOOD] >> test.py::test[window-full/aggregations--Results] >> test.py::test[action-eval_skip_take--Results] [GOOD] >> test.py::test[action-eval_variant-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2024-11-21T17:44:43.979771Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests2Inflight2BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10Inflight10BlobSize1000 >> BootstrapperTest::LoneBootstrapper >> TTabletPipeTest::TestConsumerSidePipeReset >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers >> test.py::test[window-current/ansi_current_mixed--Analyze] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Debug] >> test.py::test[select-append_to_value--Debug] [GOOD] >> test.py::test[select-append_to_value--Plan] >> test.py::test[aggregate-group_by_session_extended--Results] [GOOD] >> test.py::test[select-append_to_value--Plan] [GOOD] >> test.py::test[aggregate-percentile_and_variance--Debug] >> test.py::test[pg-select_agg_gs_rollup-default.txt-Results] [GOOD] >> test.py::test[expr-exapnd_with_singular_types-default.txt-Results] [GOOD] >> test.py::test[expr-int_literals_negative_typed-default.txt-Debug] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Results] >> test.py::test[pg-pg_corr_offset-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_corr_offset-default.txt-Plan] >> test.py::test[select-append_to_value--Results] >> test.py::test[agg_phases-min_null-default.txt-Results] [GOOD] >> test.py::test[agg_phases_agg_apply-sum_null-default.txt-Analyze] >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestInterconnectSession >> test.py::test[pg-pg_corr_offset-default.txt-Plan] [GOOD] >> test.py::test[pg-pg_corr_offset-default.txt-Results] >> test.py::test[aggr_factory-log_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-multi_tuple-default.txt-Debug] >> test.py::test[aggregate-having_distinct_expr--Debug] [GOOD] >> test.py::test[aggregate-having_distinct_expr--Plan] [GOOD] >> test.py::test[aggregate-having_distinct_expr--Results] >> test.py::test[join-mapjoin_early_rewrite--Debug] [GOOD] >> test.py::test[join-mapjoin_early_rewrite--Plan] [GOOD] >> test.py::test[join-mapjoin_early_rewrite--Results] >> TTabletPipeTest::TestSendWithoutWaitOpen >> BootstrapperTest::RestartUnavailableTablet >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> test.py::test[order_by-order_by_mul_columns-default.txt-Debug] [GOOD] >> test.py::test[order_by-order_by_mul_columns-default.txt-Plan] [GOOD] >> TResourceBroker::TestCounters >> test.py::test[pg-with_rec_distinct-default.txt-Debug] [GOOD] >> test.py::test[pg-with_rec_distinct-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_mul_columns-default.txt-Results] >> test.py::test[window-win_func_in_lib--Debug] [GOOD] >> test.py::test[window-win_func_in_lib--ForceBlocks] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] [GOOD] >> test.py::test[solomon-Basic-default.txt-Debug] [SKIPPED] >> test.py::test[solomon-Basic-default.txt-Plan] [SKIPPED] >> test.py::test[solomon-Basic-default.txt-Results] [SKIPPED] >> test.py::test[solomon-BrokenJsonResponse--Debug] [SKIPPED] >> test.py::test[solomon-BrokenJsonResponse--Plan] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10Inflight10BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests100Inflight10BlobSize1000 >> test.py::test[solomon-BrokenJsonResponse--Plan] [SKIPPED] >> test.py::test[solomon-BrokenJsonResponse--Results] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt-Debug] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt-Plan] [SKIPPED] >> test.py::test[solomon-DownsamplingValidSettings-default.txt-Results] [SKIPPED] >> test.py::test[solomon-Subquery-default.txt-Debug] [SKIPPED] >> test.py::test[pg-select_join_left_equi-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_qstar-default.txt-Debug] >> TTabletPipeTest::TestInterconnectSession [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> test.py::test[aggr_factory-linear_histogram-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-Debug] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests100Inflight10BlobSize1000 [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10000Inflight1000BlobSize1000 >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType >> test.py::test[window-full/session_compact--Results] [GOOD] >> test.py::test[window-win_by_all_aggregate--Debug] >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage >> TResourceBroker::TestChangeTaskType [GOOD] |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] >> BootstrapperTest::MultipleBootstrappers [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> test.py::test[pg-pg_column_case--Results] [GOOD] >> test.py::test[pg-pg_types_literal_with_length-default.txt-Analyze] >> TargetDiscoverer::SystemObjects >> test.py::test[type_v3-decimal_yt--Debug] [GOOD] >> test.py::test[type_v3-decimal_yt--Plan] [GOOD] >> test.py::test[type_v3-decimal_yt--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:99:2133] ... blocking block result NO_GROUP for [1:100:2133] ... blocking block result NO_GROUP for [1:101:2133] ... blocking block result NO_GROUP for [1:102:2133] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] Test command err: 2024-11-21T17:44:47.362116Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T17:44:47.362735Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [1:97:2132]) priority=5 resources={200, 200} 2024-11-21T17:44:47.362743Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T17:44:47.362749Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {200, 200} for task task-1 (1 by [1:97:2132]) from queue queue_compaction0 2024-11-21T17:44:47.362752Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_compaction0 2024-11-21T17:44:47.362764Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 400.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T17:44:47.362770Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-2 (2 by [1:97:2132]) priority=5 resources={100, 100} 2024-11-21T17:44:47.362773Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_compaction1 2024-11-21T17:44:47.362777Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {100, 100} for task task-2 (2 by [1:97:2132]) from queue queue_compaction1 2024-11-21T17:44:47.362779Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [1:97:2132]) to queue queue_compaction1 2024-11-21T17:44:47.362782Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 200.000000 (insert task task-2 (2 by [1:97:2132])) 2024-11-21T17:44:47.362787Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-3 (3 by [1:97:2132]) priority=5 resources={100, 100} 2024-11-21T17:44:47.362789Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [1:97:2132]) to queue queue_compaction1 2024-11-21T17:44:47.362792Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {100, 100} for task task-3 (3 by [1:97:2132]) from queue queue_compaction1 2024-11-21T17:44:47.362795Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [1:97:2132]) to queue queue_compaction1 2024-11-21T17:44:47.362799Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 200.000000 to 400.000000 (insert task task-3 (3 by [1:97:2132])) 2024-11-21T17:44:47.362804Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-4 (4 by [1:97:2132]) priority=5 resources={100, 100} 2024-11-21T17:44:47.362807Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [1:97:2132]) to queue queue_compaction1 2024-11-21T17:44:47.362810Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {100, 100} for task task-4 (4 by [1:97:2132]) from queue queue_compaction1 2024-11-21T17:44:47.362812Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-4 (4 by [1:97:2132]) to queue queue_compaction1 2024-11-21T17:44:47.362815Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 400.000000 to 600.000000 (insert task task-4 (4 by [1:97:2132])) 2024-11-21T17:44:47.362819Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-5 (5 by [1:97:2132]) priority=5 resources={250, 250} 2024-11-21T17:44:47.362822Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-5 (5 by [1:97:2132]) to queue queue_compaction0 2024-11-21T17:44:47.362825Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-5 (5 by [1:97:2132]) 2024-11-21T17:44:47.362830Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-6 (6 by [1:97:2132]) priority=5 resources={250, 250} 2024-11-21T17:44:47.362832Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-6 (6 by [1:97:2132]) to queue queue_compaction1 2024-11-21T17:44:47.362835Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-5 (5 by [1:97:2132]) 2024-11-21T17:44:47.362838Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction1 blocked by an earlier queue 2024-11-21T17:44:47.362842Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction1 task task-7 (7 by [1:97:2132]) priority=5 resources={150, 150} 2024-11-21T17:44:47.362852Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-7 (7 by [1:97:2132]) to queue queue_compaction1 2024-11-21T17:44:47.362855Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-5 (5 by [1:97:2132]) 2024-11-21T17:44:47.362857Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction1 blocked by an earlier queue 2024-11-21T17:44:47.362881Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [1:97:2132]) (release resources {200, 200}) 2024-11-21T17:44:47.362886Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 0.000000 (remove task task-1 (1 by [1:97:2132])) 2024-11-21T17:44:47.362889Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-5 (5 by [1:97:2132]) 2024-11-21T17:44:47.362891Z node 1 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction1 blocked by an earlier queue 2024-11-21T17:44:47.362895Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-2 (2 by [1:97:2132]) (release resources {100, 100}) 2024-11-21T17:44:47.362898Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 600.000000 to 400.000000 (remove task task-2 (2 by [1:97:2132])) 2024-11-21T17:44:47.362901Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {250, 250} for task task-5 (5 by [1:97:2132]) from queue queue_compaction0 2024-11-21T17:44:47.362903Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-5 (5 by [1:97:2132]) to queue queue_compaction0 2024-11-21T17:44:47.362906Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 475.000000 (insert task task-5 (5 by [1:97:2132])) 2024-11-21T17:44:47.362909Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-6 (6 by [1:97:2132]) 2024-11-21T17:44:47.362923Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-3 (3 by [1:97:2132]) (release resources {100, 100}) 2024-11-21T17:44:47.362926Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 400.000000 to 200.000000 (remove task task-3 (3 by [1:97:2132])) 2024-11-21T17:44:47.362929Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-6 (6 by [1:97:2132]) 2024-11-21T17:44:47.362932Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-4 (4 by [1:97:2132]) (release resources {100, 100}) 2024-11-21T17:44:47.362935Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 200.000000 to 0.000000 (remove task task-4 (4 by [1:97:2132])) 2024-11-21T17:44:47.362937Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {250, 250} for task task-6 (6 by [1:97:2132]) from queue queue_compaction1 2024-11-21T17:44:47.362940Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-6 (6 by [1:97:2132]) to queue queue_compaction1 2024-11-21T17:44:47.362943Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 425.000000 (insert task task-6 (6 by [1:97:2132])) 2024-11-21T17:44:47.362946Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-7 (7 by [1:97:2132]) 2024-11-21T17:44:47.362959Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-5 (5 by [1:97:2132]) (release resources {250, 250}) 2024-11-21T17:44:47.362963Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 475.000000 to 0.000000 (remove task task-5 (5 by [1:97:2132])) 2024-11-21T17:44:47.362965Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {150, 150} for task task-7 (7 by [1:97:2132]) from queue queue_compaction1 2024-11-21T17:44:47.362968Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-7 (7 by [1:97:2132]) to queue queue_compaction1 2024-11-21T17:44:47.362971Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 425.000000 to 680.000000 (insert task task-7 (7 by [1:97:2132])) 2024-11-21T17:44:47.362976Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-6 (6 by [1:97:2132]) (release resources {250, 250}) 2024-11-21T17:44:47.362979Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 680.000000 to 255.000000 (remove task task-6 (6 by [1:97:2132])) 2024-11-21T17:44:47.362994Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-7 (7 by [1:97:2132]) (release resources {150, 150}) 2024-11-21T17:44:47.362998Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 255.000000 to 0.000000 (remove task task-7 (7 by [1:97:2132])) 2024-11-21T17:44:47.363002Z node 1 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1000 (1000 by [1:97:2132]) priority=5 resources={500, 500} 2024-11-21T17:44:47.363005Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1000 (1000 by [1:97:2132]) to queue queue_compaction0 2024-11-21T17:44:47.363007Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {500, 500} for task task-1000 (1000 by [1:97:2132]) from queue queue_compaction0 2024-11-21T17:44:47.363078Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1000 (1000 by [1:97:2132]) to queue queue_compaction0 2024-11-21T17:44:47.363082Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 900.000000 (insert task task-1000 (1000 by [1:97:2132])) 2024-11-21T17:44:47.363087Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-1 (1 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T17:44:47.363090Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363093Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T17:44:47.363096Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-2 (2 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T17:44:47.363099Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363101Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T17:44:47.363105Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-3 (3 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T17:44:47.363108Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363110Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T17:44:47.363114Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-4 (4 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T17:44:47.363116Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-4 (4 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363119Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T17:44:47.363122Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-5 (5 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T17:44:47.363125Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-5 (5 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363127Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T17:44:47.363131Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-6 (6 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T17:44:47.363133Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-6 (6 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363136Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T17:44:47.363140Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-7 (7 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T17:44:47.363143Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-7 (7 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363146Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T17:44:47.363150Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-8 (8 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T17:44:47.363152Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-8 (8 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363155Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T17:44:47.363159Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-9 (9 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T17:44:47.363162Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-9 (9 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363165Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T17:44:47.363169Z node 1 :RESOURCE_BROKER DEBUG: Submitted new unknown task task-10 (10 by [1:97:2132]) priority=5 resources={1, 1} 2024-11-21T17:44:47.363172Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task task-10 (10 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363175Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-1 (1 by [1:97:2132]) 2024-11-21T17:44:47.363178Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-1000 (1000 by [1:97:2132]) (release resources {500, 500}) 2024-11-21T17:44:47.363182Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 0.000000 to 1500.000000 2024-11-21T17:44:47.363185Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-1 (1 by [1:97:2132]) from queue queue_default 2024-11-21T17:44:47.363188Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363191Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 0.000000 to 2.000000 (insert task task-1 (1 by [1:97:2132])) 2024-11-21T17:44:47.363194Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-2 (2 by [1:97:2132]) from queue queue_default 2024-11-21T17:44:47.363197Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-2 (2 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363199Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 2.000000 to 4.000000 (insert task task-2 (2 by [1:97:2132])) 2024-11-21T17:44:47.363202Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-3 (3 by [1:97:2132]) from queue queue_default 2024-11-21T17:44:47.363205Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.363207Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 4.000000 to 6.000000 (insert task task-3 (3 by [1:97:2132])) 2024-11-21T17:44:47.363210Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-4 (4 by [1:97:2132]) from queue queue_default 2024-11-21T17:44:47.364206Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-4 (4 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.364209Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 6.000000 to 8.000000 (insert task task-4 (4 by [1:97:2132])) 2024-11-21T17:44:47.364213Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-5 (5 by [1:97:2132]) from queue queue_default 2024-11-21T17:44:47.364215Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-5 (5 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.364219Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 8.000000 to 10.000000 (insert task task-5 (5 by [1:97:2132])) 2024-11-21T17:44:47.364222Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-6 (6 by [1:97:2132]) from queue queue_default 2024-11-21T17:44:47.364237Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-6 (6 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.364240Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 10.000000 to 12.000000 (insert task task-6 (6 by [1:97:2132])) 2024-11-21T17:44:47.364243Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-7 (7 by [1:97:2132]) from queue queue_default 2024-11-21T17:44:47.364246Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-7 (7 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.364250Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 12.000000 to 14.000000 (insert task task-7 (7 by [1:97:2132])) 2024-11-21T17:44:47.364252Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-8 (8 by [1:97:2132]) from queue queue_default 2024-11-21T17:44:47.364255Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-8 (8 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.364258Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 14.000000 to 16.000000 (insert task task-8 (8 by [1:97:2132])) 2024-11-21T17:44:47.364261Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-9 (9 by [1:97:2132]) from queue queue_default 2024-11-21T17:44:47.364264Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-9 (9 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.364267Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 16.000000 to 18.000000 (insert task task-9 (9 by [1:97:2132])) 2024-11-21T17:44:47.364270Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {1, 1} for task task-10 (10 by [1:97:2132]) from queue queue_default 2024-11-21T17:44:47.364273Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-10 (10 by [1:97:2132]) to queue queue_default 2024-11-21T17:44:47.364277Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_default from 18.000000 to 20.000000 (insert task task-10 (10 by [1:97:2132])) 2024-11-21T17:44:47.364287Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T17:44:47.364290Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_default from 0.000000 to 20.000000 2024-11-21T17:44:47.364293Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-2 (2 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T17:44:47.364296Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-3 (3 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T17:44:47.364300Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-4 (4 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T17:44:47.364303Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-5 (5 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T17:44:47.364306Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-6 (6 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T17:44:47.364310Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-7 (7 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T17:44:47.364313Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-8 (8 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T17:44:47.364316Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-9 (9 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T17:44:47.364320Z node 1 :RESOURCE_BROKER DEBUG: Finish task task-10 (10 by [1:97:2132]) (release resources {1, 1}) 2024-11-21T17:44:48.009138Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T17:44:48.009212Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-1 (1 by [2:97:2132]) priority=5 resources={400, 400} 2024-11-21T17:44:48.009218Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T17:44:48.009224Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-1 (1 by [2:97:2132]) from queue queue_compaction0 2024-11-21T17:44:48.009228Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-1 (1 by [2:97:2132]) to queue queue_compaction0 2024-11-21T17:44:48.009238Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [2:97:2132])) 2024-11-21T17:44:48.009245Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-2 (2 by [2:97:2132]) priority=5 resources={400, 400} 2024-11-21T17:44:48.009247Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-2 (2 by [2:97:2132]) to queue queue_compaction0 2024-11-21T17:44:48.009251Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T17:44:48.009256Z node 2 :RESOURCE_BROKER DEBUG: Submitted new compaction0 task task-3 (3 by [2:97:2132]) priority=5 resources={400, 400} 2024-11-21T17:44:48.009258Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [2:97:2132]) to queue queue_compaction0 2024-11-21T17:44:48.009261Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) 2024-11-21T17:44:48.009271Z node 2 :RESOURCE_BROKER DEBUG: Update task task-3 (3 by [2:97:2132]) (priority=5 type=compaction1 resources={400, 400} resubmit=0) 2024-11-21T17:44:48.009274Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task task-3 (3 by [2:97:2132]) to queue queue_compaction1 2024-11-21T17:44:48.009277Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-3 (3 by [2:97:2132]) 2024-11-21T17:44:48.009279Z node 2 :RESOURCE_BROKER DEBUG: Skip queue queue_compaction0 blocked by an earlier queue 2024-11-21T17:44:48.009284Z node 2 :RESOURCE_BROKER DEBUG: Finish task task-1 (1 by [2:97:2132]) (release resources {400, 400}) 2024-11-21T17:44:48.009289Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction0 from 800.000000 to 80.000000 (remove task task-1 (1 by [2:97:2132])) 2024-11-21T17:44:48.009292Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_compaction0 from 0.000000 to 80.000000 2024-11-21T17:44:48.009295Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {400, 400} for task task-3 (3 by [2:97:2132]) from queue queue_compaction1 2024-11-21T17:44:48.009298Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task task-3 (3 by [2:97:2132]) to queue queue_compaction1 2024-11-21T17:44:48.009301Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 800.000000 (insert task task-3 (3 by [2:97:2132])) 2024-11-21T17:44:48.009304Z node 2 :RESOURCE_BROKER DEBUG: Not enough resources to start task task-2 (2 by [2:97:2132]) >> test.py::test[aggr_factory-min_by-default.txt-Results] [GOOD] >> test.py::test[case-case_opt_cond-default.txt-Analyze] [GOOD] >> test.py::test[case-case_opt_cond-default.txt-Debug] >> test.py::test[pg_catalog-pg_namespace_pg_syntax-default.txt-Results] [GOOD] >> test.py::test[pragma-file-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2024-11-21T17:44:45.244159Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:45.244179Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:45.244193Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:45.244343Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T17:44:45.244349Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 7090319362426798975 2024-11-21T17:44:45.244363Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T17:44:45.244367Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16229357239031646724 2024-11-21T17:44:45.244378Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T17:44:45.244381Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 11763690323019591983 2024-11-21T17:44:45.245041Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2024-11-21T17:44:45.245049Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2024-11-21T17:44:45.245062Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2024-11-21T17:44:45.245067Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2024-11-21T17:44:45.245071Z node 4 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2024-11-21T17:44:45.245103Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2024-11-21T17:44:45.245107Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.126260s 2024-11-21T17:44:45.245144Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2024-11-21T17:44:45.245148Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.186178s 2024-11-21T17:44:45.506198Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:45.506364Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] 2024-11-21T17:44:45.506453Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T17:44:45.506457Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2024-11-21T17:44:45.577977Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:45.578141Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] 2024-11-21T17:44:45.578227Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T17:44:45.578231Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 4 (idx 2) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2024-11-21T17:44:46.231954Z node 4 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 3 2024-11-21T17:44:46.231974Z node 4 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2024-11-21T17:44:46.232081Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2024-11-21T17:44:46.232088Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:46.232099Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2024-11-21T17:44:46.232104Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:46.232628Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] 2024-11-21T17:44:46.232673Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] 2024-11-21T17:44:46.232925Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T17:44:46.232930Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2024-11-21T17:44:46.232950Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T17:44:46.232954Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2024-11-21T17:44:46.851010Z node 4 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 3 2024-11-21T17:44:46.851030Z node 4 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2024-11-21T17:44:46.851106Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2024-11-21T17:44:46.851114Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:46.851154Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2024-11-21T17:44:46.851158Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:46.851722Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] 2024-11-21T17:44:46.851826Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2024-11-21T17:44:46.852060Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2024-11-21T17:44:46.852066Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 12604849125939852480 2024-11-21T17:44:46.852078Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2024-11-21T17:44:46.852081Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 5312993052334781800 2024-11-21T17:44:46.852196Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2024-11-21T17:44:46.852238Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2024-11-21T17:44:46.852243Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2024-11-21T17:44:46.852248Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 (owner) 2024-11-21T17:44:46.852265Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2024-11-21T17:44:46.852268Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2024-11-21T17:44:47.455230Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2024-11-21T17:44:47.455251Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:47.455295Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2024-11-21T17:44:47.455302Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:47.455691Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] 2024-11-21T17:44:47.455706Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:266:2094] ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2024-11-21T17:44:47.455901Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2024-11-21T17:44:47.455907Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 6528562917658346564 2024-11-21T17:44:47.455964Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2024-11-21T17:44:47.455968Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16349739802483488852 ... disconnecting nodes 2 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2024-11-21T17:44:47.456087Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2024-11-21T17:44:47.456092Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED ... disconnecting nodes 2 <-> 1 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2024-11-21T17:44:47.456199Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2024-11-21T17:44:47.456203Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2024-11-21T17:44:47.456213Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2024-11-21T17:44:47.456217Z node 5 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2024-11-21T17:44:47.456314Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2024-11-21T17:44:47.456320Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.167359s 2024-11-21T17:44:47.456706Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, tablet dead 2024-11-21T17:44:47.456716Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:47.461556Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:387:2094] 2024-11-21T17:44:47.472166Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T17:44:47.472178Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2024-11-21T17:44:47.544306Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T17:44:47.544484Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:387:2094] 2024-11-21T17:44:47.544574Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T17:44:47.544578Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... disconnecting nodes 2 <-> 0 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: 2024-11-21T17:44:34.223099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:44:34.223220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:44:34.223248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:44:34.223358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:44:34.223398Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:44:34.223423Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0010e5/r3tmp/tmpCbP0MR/pdisk_1.dat 2024-11-21T17:44:34.458669Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:44:34.618210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:44:34.706304Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:44:34.706643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:34.706659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:34.707154Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:44:34.707254Z node 2 :TX_PROXY DEBUG: actor# [2:193:2108] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:44:34.707846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:34.707879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:34.708288Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2024-11-21T17:44:34.719786Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:44:34.719935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:44:34.720069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:44:35.106035Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Handle TEvProposeTransaction 2024-11-21T17:44:35.106057Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:44:35.106096Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1164:2709] 2024-11-21T17:44:35.115701Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2709] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:44:35.116455Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2709] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:44:35.116479Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2709] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:44:35.116587Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2709] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:44:35.116639Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2709] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:44:35.116654Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2709] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:44:35.117227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:44:35.117381Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2709] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:44:35.118428Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2709] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:44:35.118443Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2709] txid# 281474976715657 SEND to# [1:1072:2650] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:44:35.229148Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1243:2768] 2024-11-21T17:44:35.229218Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:44:35.259491Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1245:2769] 2024-11-21T17:44:35.259540Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:44:35.265801Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:44:35.266031Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:44:35.266174Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:44:35.266183Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:44:35.266190Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:44:35.266229Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:44:35.273711Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:44:35.273781Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:44:35.273808Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:1332:2816] 2024-11-21T17:44:35.273813Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:44:35.273817Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:44:35.273822Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:44:35.274644Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:44:35.274670Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:44:35.274756Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:44:35.274763Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:44:35.274771Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:44:35.274777Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:44:35.274815Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:1261:2778], serverId# [1:1303:2798], sessionId# [0:0:0] 2024-11-21T17:44:35.274956Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:44:35.275001Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:44:35.275019Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:44:35.275480Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1248:2771] 2024-11-21T17:44:35.275532Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:44:35.276865Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:44:35.276928Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:44:35.277119Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2024-11-21T17:44:35.277128Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2024-11-21T17:44:35.277135Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2024-11-21T17:44:35.277170Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:44:35.277178Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037894 2024-11-21T17:44:35.277193Z node 1 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:44:35.277205Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037894, actorId: [1:1347:2826] 2024-11-21T17:44:35.277209Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2024-11-21T17:44:35.277213Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2024-11-21T17:44:35.277217Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2024-11-21T17:44:35.277747Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037894 2024-11-21T17:44:35.277763Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2024-11-21T17:44:35.278022Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [1:1266:2783], serverId# [1:1325:2813], sessionId# [0:0:0] 2024-11-21T17:44:35.278125Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2024-11-21T17:44:35.278132Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:44:35.278139Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037894 TxInFly 0 2024-11-21T17:44:35.278145Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2024-11-21T17:44:35.278341Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2024-11-21T17:44:35.278384Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037894 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:44:35.278398Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037894 2024-11-21T17:44:35.278965Z node 1 :TX_DATASHA ... baseId: /Root, SessionId: ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1891:3117], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2024-11-21T17:44:48.170450Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1888:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd7x750q6t0dcf04x2r8erwg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [5:1891:3117], 2024-11-21T17:44:48.170486Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1888:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd7x750q6t0dcf04x2r8erwg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [5:1569:2936], seqNo: 1, nRows: 1 2024-11-21T17:44:48.170496Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:44:48.170499Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll inputs 2024-11-21T17:44:48.170502Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll sources 2024-11-21T17:44:48.170506Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Resume execution, run status: Finished 2024-11-21T17:44:48.170509Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. ProcessOutputsState.Inflight: 0 2024-11-21T17:44:48.170512Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Do not drain channelId: 1, finished 2024-11-21T17:44:48.170515Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T17:44:48.170519Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T17:44:48.170544Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1888:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd7x750q6t0dcf04x2r8erwg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1891:3117], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 54 Tasks { TaskId: 1 CpuTimeUs: 28 FinishTimeMs: 1732211088170 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 3 BuildCpuTimeUs: 25 HostName: "ghrun-2rqap2spfm" NodeId: 5 StartTimeMs: 1732211088170 } MaxMemoryUsage: 1048576 } 2024-11-21T17:44:48.170551Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1888:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd7x750q6t0dcf04x2r8erwg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [5:1891:3117], ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } } SeqNo: 1 QueryResultIndex: 0 ChannelId: 1 2024-11-21T17:44:48.170615Z node 5 :KQP_EXECUTER DEBUG: TxId: 281474976715667, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 100, to: [5:1892:3117] 2024-11-21T17:44:48.170624Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Received channel data ack for channelId: 1, seqNo: 1, lastSentSeqNo: 1, freeSpace: 100, early finish: 0 2024-11-21T17:44:48.170630Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. PeerState, peerState:(freeSpace:100;inFlightBytes:0;inFlightCount:0;), sentSeqNo: 1, ackSeqNo: 1 2024-11-21T17:44:48.170633Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Resume compute actor 2024-11-21T17:44:48.170649Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:44:48.170653Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll inputs 2024-11-21T17:44:48.170656Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll sources 2024-11-21T17:44:48.170660Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Resume execution, run status: Finished 2024-11-21T17:44:48.170662Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. ProcessOutputsState.Inflight: 0 2024-11-21T17:44:48.170665Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Do not drain channelId: 1, finished 2024-11-21T17:44:48.170669Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T17:44:48.170673Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished 2024-11-21T17:44:48.170676Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1891:3117], TxId: 281474976715667, task: 1. Ctx: { SessionId : ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==. TraceId : 01jd7x750q6t0dcf04x2r8erwg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T17:44:48.170684Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. pass away 2024-11-21T17:44:48.170696Z node 5 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715667;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:44:48.170720Z node 5 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715667, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T17:44:48.170757Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1888:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd7x750q6t0dcf04x2r8erwg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1891:3117], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 214 Tasks { TaskId: 1 CpuTimeUs: 29 FinishTimeMs: 1732211088170 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 4 BuildCpuTimeUs: 25 HostName: "ghrun-2rqap2spfm" NodeId: 5 StartTimeMs: 1732211088170 } MaxMemoryUsage: 1048576 } 2024-11-21T17:44:48.170763Z node 5 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7x750q6t0dcf04x2r8erwg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [5:1891:3117] 2024-11-21T17:44:48.170777Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1888:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd7x750q6t0dcf04x2r8erwg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:44:48.170782Z node 5 :KQP_EXECUTER TRACE: ActorId: [5:1888:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd7x750q6t0dcf04x2r8erwg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T17:44:48.170788Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1888:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd7x750q6t0dcf04x2r8erwg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OGRiMjE5ZC1hYTM5ZDlmZS02OTQ1NDJkNS05NTc5NzFjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000214s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 140 >> test.py::test[action-eval_variant-default.txt-Debug] [GOOD] >> test.py::test[action-eval_variant-default.txt-Plan] [GOOD] >> test.py::test[window-win_peephole_double_usage-default.txt-Debug] [GOOD] >> test.py::test[window-win_peephole_double_usage-default.txt-ForceBlocks] >> test.py::test[like-like_clause-default.txt-Analyze] [GOOD] >> test.py::test[like-like_clause-default.txt-Debug] >> test.py::test[action-eval_variant-default.txt-Results] >> test.py::test[pg-tpcds-q78-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-Debug] >> BootstrapperTest::UnavailableStateStorage [GOOD] |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> test.py::test[select-append_to_value--Results] [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler >> CostMetricsPutBlock4Plus2::TestPut4Plus2BlockRequests10000Inflight1000BlobSize1000 [GOOD] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests1Inflight1BlobSize2000000 >> test.py::test[select-boolean_where--Debug] >> test.py::test[aggregate-avg_and_sum_float--Debug] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--Plan] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--Results] >> test.py::test[expr-int_literals_negative_typed-default.txt-Debug] [GOOD] >> test.py::test[expr-int_literals_negative_typed-default.txt-Plan] [GOOD] >> test.py::test[expr-int_literals_negative_typed-default.txt-Results] >> test.py::test[join-mapjoin_early_rewrite--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-off-Debug] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite_star-off-Plan] [SKIPPED] >> test.py::test[order_by-order_by_mul_columns-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf--Debug] >> test.py::test[join-mapjoin_early_rewrite_star-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Debug] [SKIPPED] >> test.py::test[schema-select_all_inferschema_op--Debug] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Plan] [SKIPPED] >> test.py::test[schema-select_all_inferschema_op--Plan] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt-off-Results] [SKIPPED] >> test.py::test[schema-select_all_inferschema_op--Results] >> test.py::test[join-mapjoin_with_anonymous-off-Debug] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous-off-Plan] >> test.py::test[agg_phases_agg_apply-sum_null-default.txt-Analyze] [GOOD] >> test.py::test[agg_phases_agg_apply-sum_null-default.txt-Debug] >> test.py::test[join-mapjoin_with_anonymous-off-Plan] [SKIPPED] >> test.py::test[join-mapjoin_with_anonymous-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_struct-off-Debug] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_struct-off-Plan] [SKIPPED] >> test.py::test[join-mapjoin_with_empty_struct-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_big_primary--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2024-11-21T17:44:49.372929Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA 2024-11-21T17:44:49.373308Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2024-11-21T17:44:49.373314Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.170556s 2024-11-21T17:44:49.500354Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... waiting for multiple state storage lookup attempts (done) |74.1%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[aggregate-having_distinct_expr--Results] [GOOD] >> test.py::test[aggregate-native_desc_group_compact_by--Debug] >> test.py::test[pg-select_join_qstar-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_qstar-default.txt-Plan] >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope >> test.py::test[pg-select_join_qstar-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_qstar-default.txt-Results] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests1Inflight1BlobSize2000000 [GOOD] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests10Inflight1BlobSize2000000 >> test.py::test[blocks-top_sort_two_asc--Debug] [GOOD] >> test.py::test[blocks-top_sort_two_asc--Plan] |74.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[pg-select_agg_gs_rollup-default.txt-Results] [GOOD] >> test.py::test[blocks-top_sort_two_asc--Plan] [GOOD] >> test.py::test[aggr_factory-multi_tuple-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-multi_tuple-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-multi_tuple-default.txt-Results] >> test.py::test[blocks-top_sort_two_asc--Results] >> TargetDiscoverer::SystemObjects [GOOD] >> test.py::test[pg-pg_corr_offset-default.txt-Results] [GOOD] >> test.py::test[pg-pg_like--Debug] >> test.py::test[pg-with_rec_distinct-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-with_rec_distinct-default.txt-Plan] [GOOD] >> test.py::test[pg-with_rec_distinct-default.txt-Results] >> test.py::test[type_v3-decimal_yt--Results] [GOOD] >> test.py::test[type_v3-mixed_with_columns--Debug] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests10Inflight1BlobSize2000000 [GOOD] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests100Inflight1BlobSize2000000 >> test.py::test[window-full/aggregations--Results] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact--Debug] >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb >> LocalTableWriter::ConsistentWrite >> test.py::test[pragma-file-default.txt-Debug] [GOOD] >> test.py::test[pragma-file-default.txt-Plan] >> test.py::test[pragma-file-default.txt-Plan] [GOOD] >> test.py::test[pragma-file-default.txt-Results] >> test.py::test[action-eval_variant-default.txt-Results] [GOOD] >> test.py::test[action-insert_after_eval--Debug] |74.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[solomon-Subquery-default.txt-Debug] [SKIPPED] >> test.py::test[window-current/ansi_current_mixed--Debug] [GOOD] >> test.py::test[window-current/ansi_current_mixed--ForceBlocks] |74.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb |74.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> test.py::test[window-win_func_in_lib--ForceBlocks] [GOOD] >> test.py::test[window-win_func_in_lib--Plan] [GOOD] >> test.py::test[window-win_func_in_lib--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2024-11-21T17:44:50.163127Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439789983742606266:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:44:50.163163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019d3/r3tmp/tmpZy1siV/pdisk_1.dat 2024-11-21T17:44:50.701522Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:44:50.704432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:50.704451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:50.708495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25946 TServer::EnableGrpc on GrpcPort 30070, node 1 2024-11-21T17:44:51.548730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:44:51.548741Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:44:51.548743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:44:51.548777Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25946 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:44:52.164351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:44:52.172708Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:44:52.179455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:44:52.279917Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T17:44:52.280675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:44:52.356319Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732211092213, tx_id: 1 } } } 2024-11-21T17:44:52.356333Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2024-11-21T17:44:52.365048Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211092311, tx_id: 281474976710658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732211092325, tx_id: 281474976710659 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2024-11-21T17:44:52.365059Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2024-11-21T17:44:52.628255Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211092311, tx_id: 281474976710658 } } } 2024-11-21T17:44:52.628266Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2024-11-21T17:44:52.628270Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table |74.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> test.py::test[aggregate-percentile_and_variance--Debug] [GOOD] >> test.py::test[aggregate-percentile_and_variance--Plan] [GOOD] >> test.py::test[aggregate-percentile_and_variance--Results] >> test.py::test[pg-pg_types_literal_with_length-default.txt-Analyze] [GOOD] >> test.py::test[pg-pg_types_literal_with_length-default.txt-Debug] >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits >> test.py::test[case-case_opt_cond-default.txt-Debug] [GOOD] >> test.py::test[case-case_opt_cond-default.txt-ForceBlocks] |74.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[aggr_factory-min_by-default.txt-Results] [GOOD] >> test.py::test[like-like_clause-default.txt-Debug] [GOOD] >> test.py::test[like-like_clause-default.txt-ForceBlocks] >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits >> test.py::test[window-win_peephole_double_usage-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-Results] |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> test.py::test[pg-tpcds-q78-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-ForceBlocks] >> test.py::test[pragma-file-default.txt-Results] [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-Debug] >> test.py::test[window-win_peephole_double_usage-default.txt-Plan] [GOOD] >> test.py::test[window-win_peephole_double_usage-default.txt-Results] >> test.py::test[expr-int_literals_negative_typed-default.txt-Results] [GOOD] >> test.py::test[expr-list_from_range_overflow-default.txt-Debug] >> LocalTableWriter::ConsistentWrite [GOOD] >> test.py::test[schema-select_all_inferschema_op--Results] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Debug] >> test.py::test[pg-with_rec_distinct-default.txt-Results] [GOOD] |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> test.py::test[pg_catalog-pg_auth_members-default.txt-Analyze] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> test.py::test[pg-select_join_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_right3-default.txt-Debug] >> test.py::test[blocks-top_sort_two_asc--Results] [GOOD] >> test.py::test[blocks-top_sort_two_mix--Debug] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests100Inflight1BlobSize2000000 [GOOD] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests2Inflight2BlobSize2000000 >> test.py::test[pg-pg_types_literal_with_length-default.txt-Debug] [GOOD] >> test.py::test[pg-pg_types_literal_with_length-default.txt-ForceBlocks] |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |74.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2024-11-21T17:44:55.661475Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790003221335735:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:44:55.741566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001bf9/r3tmp/tmpdZsiFU/pdisk_1.dat 2024-11-21T17:44:55.901675Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:44:55.979839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:55.980070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:55.989064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64914 TServer::EnableGrpc on GrpcPort 1381, node 1 2024-11-21T17:44:56.284299Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:44:56.284316Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:44:56.284318Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:44:56.284357Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:44:56.375074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:44:56.380206Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:44:56.382822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211096483 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) 2024-11-21T17:44:56.449213Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handshake: worker# [1:7439790007516303444:2282] 2024-11-21T17:44:56.449284Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:44:56.449380Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T17:44:56.449519Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 48b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T17:44:56.451417Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2024-11-21T17:44:56.451451Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2024-11-21T17:44:56.451496Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790007516303537:2341] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T17:44:56.451566Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T17:44:56.451588Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790007516303537:2341] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2024-11-21T17:44:56.455195Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790007516303537:2341] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T17:44:56.455209Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T17:44:56.455218Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2024-11-21T17:44:56.455337Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 4 Data: 19b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T17:44:56.455435Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 5 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 6 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T17:44:56.455509Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 7 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 8 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T17:44:56.455591Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2024-11-21T17:44:56.455604Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2024-11-21T17:44:56.455628Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790007516303537:2341] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2024-11-21T17:44:56.458956Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790007516303537:2341] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T17:44:56.458984Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T17:44:56.458994Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2024-11-21T17:44:56.459294Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 9 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 10 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T17:44:56.459334Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2024-11-21T17:44:56.459371Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790007516303537:2341] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2024-11-21T17:44:56.460695Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790007516303537:2341] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T17:44:56.460711Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T17:44:56.460719Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2024-11-21T17:44:56.460821Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790007516303534:2341] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 11 Data: 19b CreateTime: 1970-01-01T00:00:00Z }] } >> test.py::test[select-boolean_where--Debug] [GOOD] >> LocalTableWriter::WriteTable >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> test.py::test[select-boolean_where--Plan] [GOOD] >> test.py::test[select-boolean_where--Results] |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> test.py::test[aggregate-avg_and_sum_float--Results] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter--Debug] >> LocalTableWriter::SupportedTypes >> test.py::test[order_by-order_by_udf--Debug] [GOOD] >> test.py::test[order_by-order_by_udf--Plan] [GOOD] >> test.py::test[order_by-order_by_udf--Results] >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> TargetDiscoverer::IndexedTable |74.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> test.py::test[pg-pg_like--Debug] [GOOD] >> test.py::test[pg-pg_like--Plan] [GOOD] >> test.py::test[pg-pg_like--Results] >> test.py::test[case-case_opt_cond-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_opt_cond-default.txt-Plan] |74.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[type_v3-mixed_with_columns--Debug] [GOOD] |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest |74.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> test.py::test[type_v3-mixed_with_columns--Plan] [GOOD] >> test.py::test[type_v3-mixed_with_columns--Results] >> test.py::test[aggregate-percentile_and_variance--Results] [GOOD] >> test.py::test[aggregate-subquery_aggregation--Debug] >> test.py::test[window-win_peephole_double_usage-default.txt-Results] [GOOD] >> test.py::test[case-case_opt_cond-default.txt-Plan] [GOOD] >> test.py::test[case-case_opt_cond-default.txt-Results] |74.2%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[join-mergejoin_big_primary--Debug] [GOOD] >> test.py::test[join-mergejoin_big_primary--Plan] [GOOD] >> test.py::test[window-win_func_in_lib--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Analyze] >> LocalTableWriter::WriteTable [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> test.py::test[join-mergejoin_big_primary--Results] |74.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut >> test.py::test[pg-tpcds-q78-default.txt-ForceBlocks] [GOOD] >> CostMetricsGetBlock4Plus2::TestGet4Plus2BlockRequests10000Inflight1000BlobSize1000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests1Inflight1BlobSize2000000 |74.2%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |74.2%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut >> TargetDiscoverer::InvalidCredentials >> test.py::test[pg-tpcds-q78-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q78-default.txt-Results] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests2Inflight2BlobSize2000000 [GOOD] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests10Inflight10BlobSize2000000 >> test.py::test[like-like_clause-default.txt-ForceBlocks] [GOOD] >> test.py::test[like-like_clause-default.txt-Plan] [GOOD] >> test.py::test[like-like_clause-default.txt-Results] >> test.py::test[expr-list_from_range_overflow-default.txt-Debug] [GOOD] >> test.py::test[expr-list_from_range_overflow-default.txt-Plan] [GOOD] >> test.py::test[expr-list_from_range_overflow-default.txt-Results] >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> test.py::test[pg_catalog-pg_auth_members-default.txt-Analyze] [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> test.py::test[pg-pg_types_literal_with_length-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-pg_types_literal_with_length-default.txt-Plan] >> TargetDiscoverer::IndexedTable [GOOD] >> LocalTableWriter::SupportedTypes [GOOD] >> test.py::test[pg-pg_types_literal_with_length-default.txt-Plan] [GOOD] |74.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |74.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> TargetDiscoverer::Basic >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Debug] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Plan] [GOOD] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Results] >> test.py::test[action-insert_after_eval--Debug] [GOOD] >> test.py::test[action-insert_after_eval--Plan] [GOOD] >> test.py::test[action-insert_after_eval--Results] >> test.py::test[aggr_factory-top_by-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Debug] >> test.py::test[window-win_by_all_aggregate--Debug] [GOOD] >> test.py::test[window-win_by_all_aggregate--Plan] |74.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative >> test.py::test[window-win_by_all_aggregate--Plan] [GOOD] >> test.py::test[pg-pg_like--Results] [GOOD] >> test.py::test[pg-reflection-default.txt-Debug] >> test.py::test[pg-select_join_right3-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_right3-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_right3-default.txt-Results] >> test.py::test[window-win_by_all_aggregate--Results] >> test.py::test[window-current/ansi_current_mixed--ForceBlocks] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Plan] >> TargetDiscoverer::Dirs >> TargetDiscoverer::InvalidCredentials [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2024-11-21T17:44:57.694926Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790014662910007:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:44:57.694985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001b19/r3tmp/tmpQxXyfQ/pdisk_1.dat 2024-11-21T17:44:57.756570Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23576 2024-11-21T17:44:57.796351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:57.796434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:57.800700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8111, node 1 2024-11-21T17:44:57.884472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:44:57.884487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:44:57.884489Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:44:57.884528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:44:57.989029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:44:58.078975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211098184 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "int32_value" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "uint32_value" Type: "... (TRUNCATED) 2024-11-21T17:44:58.171275Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790018957877824:2342] Handshake: worker# [1:7439790018957877723:2280] 2024-11-21T17:44:58.171346Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790018957877824:2342] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:44:58.171390Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790018957877824:2342] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T17:44:58.171812Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790018957877824:2342] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 4 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 5 Data: 41b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 6 Data: 41b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 7 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 8 Data: 44b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 9 Data: 66b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 10 Data: 71b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 11 Data: 72b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 12 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 13 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 14 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 15 Data: 58b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 16 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 17 Data: 54b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 18 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 19 Data: 76b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 20 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 21 Data: 54b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 22 Data: 61b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 23 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 24 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 25 Data: 46b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 26 Data: 47b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 27 Data: 50b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 28 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 29 Data: 72b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 30 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 31 Data: 64b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T17:44:58.172409Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790018957877824:2342] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2024-11-21T17:44:58.172469Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790018957877827:2342] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T17:44:58.172475Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790018957877824:2342] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T17:44:58.172682Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790018957877827:2342] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2024-11-21T17:44:58.220517Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790018957877827:2342] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T17:44:58.220554Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790018957877824:2342] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T17:44:58.220569Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790018957877824:2342] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2024-11-21T17:44:57.849149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790013117542432:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0018ca/r3tmp/tmptAqVdw/pdisk_1.dat 2024-11-21T17:44:57.881756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:44:57.903780Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17124 2024-11-21T17:44:57.942382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:57.942416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:57.947389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23893, node 1 2024-11-21T17:44:57.972409Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:44:57.972421Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:44:57.972422Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:44:57.972453Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:44:58.073326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:44:58.082677Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:44:58.090506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:44:58.295790Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732211098121, tx_id: 1 } } } 2024-11-21T17:44:58.295808Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2024-11-21T17:44:58.297156Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211098282, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2024-11-21T17:44:58.297171Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2024-11-21T17:44:58.384814Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211098282, tx_id: 281474976715658 } } } 2024-11-21T17:44:58.384830Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2024-11-21T17:44:58.384836Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2024-11-21T17:44:58.384847Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2024-11-21T17:44:57.560572Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790015297180464:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:44:57.560616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001b47/r3tmp/tmpTBrzFN/pdisk_1.dat 2024-11-21T17:44:57.649310Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:44:57.653062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:57.653083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:57.654479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12783 TServer::EnableGrpc on GrpcPort 10067, node 1 2024-11-21T17:44:57.792424Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:44:57.792436Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:44:57.792438Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:44:57.792474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:44:57.923690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:44:57.926490Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:44:57.989159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211098051 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) 2024-11-21T17:44:58.012351Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790019592148271:2338] Handshake: worker# [1:7439790015297180886:2279] 2024-11-21T17:44:58.012408Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790019592148271:2338] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:44:58.012440Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790019592148271:2338] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T17:44:58.012535Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790019592148271:2338] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 36b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 36b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 36b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T17:44:58.012565Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790019592148271:2338] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 36 },{ Order: 3 BodySize: 36 }] } 2024-11-21T17:44:58.012592Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790019592148274:2338] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T17:44:58.012596Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790019592148271:2338] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T17:44:58.012607Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790019592148274:2338] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2024-11-21T17:44:58.013873Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439790019592148274:2338] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T17:44:58.013888Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790019592148271:2338] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T17:44:58.013895Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439790019592148271:2338] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } >> TConsoleTests::TestRestartConsoleAndPools >> TargetDiscoverer::Basic [GOOD] >> test.py::test[agg_phases_agg_apply-sum_null-default.txt-Debug] [GOOD] >> test.py::test[agg_phases_agg_apply-sum_null-default.txt-ForceBlocks] >> test.py::test[window-current/ansi_current_mixed--Plan] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Results] >> test.py::test[produce-discard_reduce_lambda-default.txt-Debug] [GOOD] >> test.py::test[order_by-order_by_udf--Results] [GOOD] >> test.py::test[select-boolean_where--Results] [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests1Inflight1BlobSize2000000 [GOOD] >> TargetDiscoverer::Negative [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser >> test.py::test[select-deep_udf_call--Debug] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests10Inflight1BlobSize2000000 |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2024-11-21T17:44:49.970509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:44:49.970547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:44:49.970552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:44:49.970558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:44:49.970583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:44:49.970591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:44:49.970605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:44:49.970703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:44:50.029417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:44:50.029445Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:44:50.040917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:44:50.041328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:44:50.041346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T17:44:50.043678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:44:50.043762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:44:50.043821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T17:44:50.043897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:44:50.045062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:44:50.045339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:44:50.045346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:44:50.045356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:44:50.045362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:44:50.045367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:44:50.045380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T17:44:50.148922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T17:44:50.149033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:50.149104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T17:44:50.149146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T17:44:50.149154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:50.151664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T17:44:50.151701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T17:44:50.151770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:50.151781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T17:44:50.151786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:44:50.151791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:44:50.156992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:50.157036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T17:44:50.157046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:44:50.160930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:50.160958Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:50.160966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:44:50.160994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:44:50.161694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:44:50.162641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:44:50.162726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:44:50.162945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:44:50.162952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T17:44:50.162957Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:44:50.389852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T17:44:50.389916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 4294969520 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T17:44:50.389929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:44:50.390228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:44:50.390235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:44:50.390272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:44:50.390284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:44:50.409209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:44:50.409235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:44:50.409288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:44:50.409292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:241:2231], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T17:44:50.409375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:50.409384Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T17:44:50.409399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:44:50.409403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:44:50.409409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:44:50.409416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:44:50.409420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:44:50.409424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:44:50.409435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T17:44:50.409441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:44:50.409444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T17:44:50.410447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:44:50.410458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:44:50.410461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T17:44:50.410465Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T17:44:50.410469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:44:50.410479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T17:44:50.410482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:95:2130] 2024-11-21T1 ... 2024-11-21T17:44:58.638262Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BLOB_DEPOT_EVENTS has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638265Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BLOB_DEPOT_EVENTS has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638268Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BLOB_DEPOT_EVENTS has been changed from 0 to 10 2024-11-21T17:44:58.638272Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DS_LOAD_TEST has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638275Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DS_LOAD_TEST has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638278Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DS_LOAD_TEST has been changed from 0 to 10 2024-11-21T17:44:58.638282Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_PROVIDER has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638285Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_PROVIDER has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638288Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_PROVIDER has been changed from 0 to 10 2024-11-21T17:44:58.638291Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_INITIALIZER has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638294Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_INITIALIZER has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638297Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_INITIALIZER has been changed from 0 to 10 2024-11-21T17:44:58.638299Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_MANAGER has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638302Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_MANAGER has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638305Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_MANAGER has been changed from 0 to 10 2024-11-21T17:44:58.638308Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_SECRET has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638311Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_SECRET has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638314Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_SECRET has been changed from 0 to 10 2024-11-21T17:44:58.638318Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_TIERING has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638322Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_TIERING has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638326Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_TIERING has been changed from 0 to 10 2024-11-21T17:44:58.638330Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BG_TASKS has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638333Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BG_TASKS has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638336Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BG_TASKS has been changed from 0 to 10 2024-11-21T17:44:58.638340Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638343Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638346Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY has been changed from 0 to 10 2024-11-21T17:44:58.638348Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY_CACHE has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638352Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY_CACHE has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638355Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY_CACHE has been changed from 0 to 10 2024-11-21T17:44:58.638359Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component EXT_INDEX has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638362Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component EXT_INDEX has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638365Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component EXT_INDEX has been changed from 0 to 10 2024-11-21T17:44:58.638369Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_CONVEYOR has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638372Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_CONVEYOR has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638375Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_CONVEYOR has been changed from 0 to 10 2024-11-21T17:44:58.638378Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_LIMITER has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638381Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_LIMITER has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638384Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_LIMITER has been changed from 0 to 10 2024-11-21T17:44:58.638388Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component ARROW_HELPER has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638391Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component ARROW_HELPER has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638394Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component ARROW_HELPER has been changed from 0 to 10 2024-11-21T17:44:58.638398Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component KAFKA_PROXY has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638401Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component KAFKA_PROXY has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638405Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component KAFKA_PROXY has been changed from 0 to 10 2024-11-21T17:44:58.638411Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component OBJECTS_MONITORING has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638414Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component OBJECTS_MONITORING has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638417Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component OBJECTS_MONITORING has been changed from 0 to 10 2024-11-21T17:44:58.638420Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component STATISTICS has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638423Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component STATISTICS has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638425Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component STATISTICS has been changed from 0 to 10 2024-11-21T17:44:58.638429Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_REQUEST_COST has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638432Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_REQUEST_COST has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638435Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_REQUEST_COST has been changed from 0 to 10 2024-11-21T17:44:58.638439Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_VDISK_BALANCING has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638442Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638445Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2024-11-21T17:44:58.638448Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638451Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638455Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2024-11-21T17:44:58.638460Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638463Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638466Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2024-11-21T17:44:58.638470Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638473Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638476Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2024-11-21T17:44:58.638480Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638483Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638486Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2024-11-21T17:44:58.638492Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from NOTICE to ALERT 2024-11-21T17:44:58.638494Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from DEBUG to ALERT 2024-11-21T17:44:58.638497Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2024-11-21T17:44:58.638535Z node 11 :CMS_CONFIGS TRACE: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } >> test.py::test[produce-discard_reduce_lambda-default.txt-Plan] >> test.py::test[produce-discard_reduce_lambda-default.txt-Plan] [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-Results] >> test.py::test[case-case_opt_cond-default.txt-Results] [GOOD] >> test.py::test[coalesce-coalesce--Analyze] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests10Inflight10BlobSize2000000 [GOOD] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests100Inflight10BlobSize2000000 >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2024-11-21T17:44:58.834666Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790018159300969:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:44:58.835141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0018ba/r3tmp/tmpgUBYlY/pdisk_1.dat 2024-11-21T17:44:58.899630Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24203 TServer::EnableGrpc on GrpcPort 6417, node 1 2024-11-21T17:44:58.936478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:58.936512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:58.940670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:44:58.940937Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:44:58.940939Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:44:58.940941Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:44:58.940975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:44:59.016800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:44:59.032409Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:44:59.077117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:44:59.324284Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_types/credentials/login/login.cpp:192: Invalid user } } } 2024-11-21T17:44:59.324302Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_types/credentials/login/login.cpp:192: Invalid user } >> test.py::test[like-like_clause-default.txt-Results] [GOOD] >> test.py::test[limit-empty_read_after_limit-default.txt-Analyze] >> test.py::test[type_v3-mixed_with_columns--Results] [GOOD] >> test.py::test[type_v3-split--Debug] [SKIPPED] >> test.py::test[type_v3-split--Plan] [SKIPPED] >> TConsoleTests::TestGetUnknownTenantStatus >> test.py::test[type_v3-split--Results] [SKIPPED] >> test.py::test[type_v3-uuid--Debug] >> TargetDiscoverer::Dirs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: 2024-11-21T17:44:35.471997Z node 1 :BS_PROXY_GET ERROR: [47ad982f08e135f5] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[72057594037927937:2:1:2:1:5:0] DEADLINE Size# 0 RequestedSize# 5} ErrorReason# "status# DEADLINE from# [0:1:0:0:0]"} Marker# BPG29 2024-11-21T17:44:35.472026Z node 1 :BS_VDISK_PATCH ERROR: VDISK[0:_:0:0:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [72057594037927937:2:1:2:1:5:0] PatchedBlobId# [72057594037927937:2:1:2:4:5:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [0:1:0:0:0] Marker# BSVSP01 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:141:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:143:2166] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:147:2057] recipient: [4:143:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:146:2167] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:216:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:143:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:146:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:145:2167] Leader for TabletID 72057594037927937 is [5:148:2168] sender: [5:149:2057] recipient: [5:145:2167] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:148:2168] Leader for TabletID 72057594037927937 is [5:148:2168] sender: [5:218:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2172] Leader for TabletID 72057594037927937 is [7:153:2173] sender: [7:154:2057] recipient: [7:150:2172] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2173] Leader for TabletID 72057594037927937 is [7:153:2173] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:151:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:155:2057] recipient: [8:153:2174] Leader for TabletID 72057594037927937 is [8:156:2175] sender: [8:157:2057] recipient: [8:153:2174] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:156:2175] Leader for TabletID 72057594037927937 is [8:156:2175] sender: [8:204:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:155:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:156:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:156:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:153:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:156:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:155:2176] Leader for TabletID 72057594037927937 is [10:158:2177] sender: [10:159:2057] recipient: [10:155:2176] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:158:2177] Leader for TabletID 72057594037927937 is [10:158:2177] sender: [10:228:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:154:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:158:2057] recipient: [11:156:2176] Leader for TabletID 72057594037927937 is [11:159:2177] sender: [11:160:2057] recipient: [11:156:2176] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:159:2177] Leader for TabletID 72057594037927937 is [11:159:2177] sender: [11:229:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:157:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:160:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:161:2057] recipient: [12:159:2179] Leader for TabletID 72057594037927937 is [12:162:2180] sender: [12:163:2057] recipient: [12:159:2179] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:162:2180] Leader for TabletID 72057594037927937 is [12:162:2180] sender: [12:215:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:161:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:163:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:165:2057] recipient: [13:164:2183] Leader for TabletID 72057594037927937 is [13:166:2184] sender: [13:167:2057] recipient: [13:164:2183] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:166:2184] Leader for TabletID 72057594037927937 is [13:166:2184] sender: [13:219:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:166:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:169:2057] recipient: [14:168:2188] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:170:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:172:2057] recipient: [14:168:2188] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:171:2189] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:241:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:166:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:168:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:170:2057] recipient: [15:169:2188] Leader for TabletID 72057594037927937 is [15:171:2189] sender: [15:172:2057] recipient: [15:169:2188] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:171:2189] Leader for TabletID 72057594037927937 is [15:171:2189] sender: [15:241:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2024-11-21T17:44:59.097948Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790021848105098:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:44:59.098451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0018b2/r3tmp/tmpksKeUJ/pdisk_1.dat 2024-11-21T17:44:59.177140Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:44:59.193194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:59.193231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:59.194293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5155 TServer::EnableGrpc on GrpcPort 30489, node 1 2024-11-21T17:44:59.208515Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:44:59.208530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:44:59.208532Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:44:59.208568Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:44:59.246865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:44:59.249581Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:44:59.250349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:44:59.318987Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732211099297, tx_id: 1 } } } 2024-11-21T17:44:59.318999Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2024-11-21T17:44:59.319937Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211099353, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2024-11-21T17:44:59.319942Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2024-11-21T17:44:59.428043Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211099353, tx_id: 281474976715658 } } } 2024-11-21T17:44:59.428057Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2024-11-21T17:44:59.428062Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2024-11-21T17:44:59.273624Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790023140215993:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:44:59.273727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001893/r3tmp/tmpQN3Dip/pdisk_1.dat 2024-11-21T17:44:59.346225Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:44:59.364476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:59.364502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:59.368182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18570 TServer::EnableGrpc on GrpcPort 32222, node 1 2024-11-21T17:44:59.459151Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:44:59.459165Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:44:59.459171Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:44:59.459231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:44:59.483912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:44:59.486240Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:44:59.490664Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2024-11-21T17:44:59.490697Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found } |74.3%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-top_sort_two_mix--Debug] [GOOD] >> test.py::test[blocks-top_sort_two_mix--Plan] [GOOD] >> test.py::test[blocks-top_sort_two_mix--Results] >> test.py::test[expr-list_from_range_overflow-default.txt-Results] [GOOD] >> test.py::test[expr-pickle-default.txt-Debug] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient >> test.py::test[pg-tpcds-q78-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2024-11-21T17:44:59.308448Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790020054541267:2071];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:44:59.308698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0018b6/r3tmp/tmpFk5JKj/pdisk_1.dat 2024-11-21T17:44:59.377948Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29684 2024-11-21T17:44:59.407682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:44:59.407724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:44:59.412031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1120, node 1 2024-11-21T17:44:59.460243Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:44:59.460254Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:44:59.460255Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:44:59.460289Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:44:59.497178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:44:59.512487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:44:59.545148Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732211099542, tx_id: 1 } } } 2024-11-21T17:44:59.545162Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2024-11-21T17:44:59.546266Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732211099556, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2024-11-21T17:44:59.546276Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2024-11-21T17:44:59.546882Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211099570, tx_id: 281474976715659 } }] } } 2024-11-21T17:44:59.546890Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2024-11-21T17:44:59.656562Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211099570, tx_id: 281474976715659 } } } 2024-11-21T17:44:59.656576Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2024-11-21T17:44:59.656581Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Analyze] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Debug] >> test.py::test[schema-user_schema_empty_table_ranges-default.txt-Results] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt-Debug] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription >> test.py::test[window-full/aggregations_leadlag_compact--Debug] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact--Plan] >> test.py::test[join-mergejoin_big_primary--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique--Debug] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests10Inflight1BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests100Inflight1BlobSize2000000 >> test.py::test[pg-reflection-default.txt-Debug] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter--Debug] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter--Plan] [GOOD] >> test.py::test[aggregate-count_distinct_with_filter--Results] >> test.py::test[window-full/aggregations_leadlag_compact--Plan] [GOOD] >> test.py::test[window-full/aggregations_leadlag_compact--Results] >> test.py::test[pg-select_join_right3-default.txt-Results] [GOOD] >> test.py::test[pg-reflection-default.txt-Plan] [GOOD] >> test.py::test[pg-reflection-default.txt-Results] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> test.py::test[aggr_factory-multi_tuple-default.txt-Results] [GOOD] >> TConsoleTests::TestCreateTenant >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Debug] >> TConsoleConfigTests::TestModifyConfigItem >> test.py::test[coalesce-coalesce--Analyze] [GOOD] >> test.py::test[coalesce-coalesce--Debug] >> test.py::test[action-insert_after_eval--Results] [GOOD] >> test.py::test[action-parallel_for-default.txt-Debug] |74.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[window-win_peephole_double_usage-default.txt-Results] [GOOD] |74.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[pg_catalog-pg_auth_members-default.txt-Analyze] [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached >> test.py::test[limit-empty_read_after_limit-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-subquery_aggregation--Debug] [GOOD] >> test.py::test[aggregate-subquery_aggregation--Plan] [GOOD] >> test.py::test[aggregate-subquery_aggregation--Results] >> test.py::test[select-deep_udf_call--Debug] [GOOD] >> test.py::test[select-deep_udf_call--Plan] [GOOD] >> test.py::test[select-deep_udf_call--Results] >> CostMetricsPutHugeMirror3dc::TestPutMirror3dcRequests100Inflight10BlobSize2000000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10Inflight1BlobSize1000 >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> test.py::test[aggregate-native_desc_group_compact_by--Debug] [GOOD] >> test.py::test[aggregate-native_desc_group_compact_by--Plan] >> test.py::test[aggregate-native_desc_group_compact_by--Plan] [GOOD] >> test.py::test[aggregate-native_desc_group_compact_by--Results] |74.3%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions >> TTabletResolver::NodeProblem >> TTabletPipeTest::TestPipeConnectToHint >> TPDiskRaces::KillOwnerWhileDecommittingWithInflight [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflightMock >> KqpScanArrowInChanels::AllTypesColumns |74.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[pg-pg_types_literal_with_length-default.txt-Plan] [GOOD] >> KqpScanArrowFormat::SingleKey >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove >> test.py::test[type_v3-uuid--Debug] [GOOD] >> test.py::test[type_v3-uuid--Plan] >> KqpScanArrowFormat::AllTypesColumns >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10Inflight1BlobSize1000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10000Inflight1BlobSize1000 >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions >> test.py::test[type_v3-uuid--Plan] [GOOD] >> test.py::test[type_v3-uuid--Results] >> test.py::test[expr-pickle-default.txt-Debug] [GOOD] >> test.py::test[expr-pickle-default.txt-Plan] [GOOD] >> test.py::test[expr-pickle-default.txt-Results] >> TTabletResolver::NodeProblem [GOOD] >> test.py::test[produce-discard_reduce_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-native_desc_reduce_with_presort--Debug] >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem >> test.py::test[blocks-top_sort_two_mix--Results] [GOOD] >> test.py::test[case-case_many_val--Debug] [SKIPPED] >> test.py::test[case-case_many_val--Plan] [SKIPPED] >> test.py::test[case-case_many_val--Results] [SKIPPED] >> test.py::test[coalesce-coalesce_few_opt--Debug] >> test.py::test[agg_phases_agg_apply-sum_null-default.txt-ForceBlocks] [GOOD] >> test.py::test[agg_phases_agg_apply-sum_null-default.txt-Plan] [GOOD] >> test.py::test[agg_phases_agg_apply-sum_null-default.txt-Results] |74.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[order_by-order_by_udf--Results] [GOOD] |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectToHint [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2024-11-21T17:45:03.637081Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StInit ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:45:03.637125Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [1:201:2134] CurrentLeaderTablet: [1:202:2135] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T17:45:03.637131Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2024-11-21T17:45:03.637138Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:201:2134] 2024-11-21T17:45:03.637166Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StInit ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:45:03.637190Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [1:207:2138] CurrentLeaderTablet: [1:208:2139] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T17:45:03.637194Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2024-11-21T17:45:03.637199Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2138] 2024-11-21T17:45:03.637358Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:45:03.637365Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:201:2134] 2024-11-21T17:45:03.637386Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:45:03.637391Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2138] 2024-11-21T17:45:03.637411Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 2 2024-11-21T17:45:03.637417Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [1:201:2134] by NodeId 2024-11-21T17:45:03.637423Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:45:03.637454Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [2:217:2092] CurrentLeaderTablet: [2:218:2093] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T17:45:03.637458Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2024-11-21T17:45:03.637463Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:217:2092] 2024-11-21T17:45:03.637489Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [1:207:2138] by NodeId 2024-11-21T17:45:03.637493Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:45:03.637516Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [2:223:2094] CurrentLeaderTablet: [2:224:2095] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T17:45:03.637521Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2024-11-21T17:45:03.637526Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2024-11-21T17:45:03.637701Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 2 2024-11-21T17:45:03.637710Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:45:03.637715Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:217:2092] 2024-11-21T17:45:03.637738Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:45:03.637742Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2024-11-21T17:45:03.637763Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 4 2024-11-21T17:45:03.637768Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [2:217:2092] by NodeId 2024-11-21T17:45:03.637774Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:45:03.637796Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [3:235:2092] CurrentLeaderTablet: [3:236:2093] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T17:45:03.637800Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2024-11-21T17:45:03.637805Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:235:2092] 2024-11-21T17:45:03.637835Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:45:03.637839Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2024-11-21T17:45:03.637863Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2024-11-21T17:45:03.637870Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:45:03.637875Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:235:2092] 2024-11-21T17:45:03.637898Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [2:223:2094] by NodeId 2024-11-21T17:45:03.637904Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:45:03.637929Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [3:241:2094] CurrentLeaderTablet: [3:242:2095] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T17:45:03.637935Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2024-11-21T17:45:03.637940Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:241:2094] >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore >> test.py::test[window-current/ansi_current_mixed--Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Analyze] >> test.py::test[pg-reflection-default.txt-Results] [GOOD] >> test.py::test[pg-select_agg-default.txt-Debug] >> test.py::test[aggregate-count_distinct_with_filter--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Debug] >> test.py::test[select-deep_udf_call--Results] [GOOD] >> test.py::test[select-dict_lookup-default.txt-Debug] |74.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |74.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |74.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |74.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> test.py::test[pg-tpcds-q85-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Debug] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions >> TStorageTenantTest::Empty [GOOD] >> test.py::test[coalesce-coalesce--Debug] [GOOD] >> test.py::test[coalesce-coalesce--ForceBlocks] |74.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> test.py::test[select-corr_name_in_select_seq-default.txt-Debug] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt-Plan] >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> test.py::test[select-corr_name_in_select_seq-default.txt-Plan] [GOOD] >> test.py::test[select-corr_name_in_select_seq-default.txt-Results] >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |74.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[limit-empty_read_after_limit-default.txt-Analyze] [GOOD] >> test.py::test[aggregate-subquery_aggregation--Results] [GOOD] >> test.py::test[aggregate-yql-18511--Debug] >> test.py::test[expr-pickle-default.txt-Results] [GOOD] >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-Debug] >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec >> test.py::test[join-mergejoin_big_primary_unique--Debug] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique--Plan] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique--Results] |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] >> test.py::test[aggregate-yql-18511--Debug] [SKIPPED] >> test.py::test[aggregate-yql-18511--Plan] [SKIPPED] >> test.py::test[aggregate-yql-18511--Results] [SKIPPED] >> test.py::test[ansi_idents-escaping-default.txt-Debug] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription |74.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |74.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber >> test.py::test[window-win_func_lead_lag_worm_with_part--Debug] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--ForceBlocks] |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |74.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |74.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain >> test.py::test[type_v3-uuid--Results] [GOOD] >> test.py::test[udf-automap_null--Debug] >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::JoinWithParams >> test.py::test[pg-select_agg-default.txt-Debug] [GOOD] >> test.py::test[pg-select_agg-default.txt-Plan] [GOOD] >> test.py::test[pg-select_agg-default.txt-Results] >> test.py::test[action-parallel_for-default.txt-Debug] [GOOD] >> test.py::test[action-parallel_for-default.txt-Plan] [GOOD] >> test.py::test[action-parallel_for-default.txt-Results] |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> test.py::test[window-win_func_aggr_with_qualified_all--Analyze] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Debug] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests100Inflight1BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests2Inflight2BlobSize2000000 >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-udaf-default.txt-Results] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] >> test.py::test[select-dict_lookup-default.txt-Debug] [GOOD] >> test.py::test[select-dict_lookup-default.txt-Plan] [GOOD] |74.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[pg-select_join_right3-default.txt-Results] [GOOD] >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems >> test.py::test[select-dict_lookup-default.txt-Results] >> test.py::test[coalesce-coalesce--ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce--Plan] [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification >> YdbSdkSessionsPool::PeriodicTask10 >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10000Inflight1BlobSize1000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10Inflight10BlobSize1000 >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AggregateByColumn >> test.py::test[window-full/aggregations_leadlag_compact--Results] [GOOD] >> test.py::test[window-full/session_incompat_sort--Debug] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests2Inflight2BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests10Inflight10BlobSize2000000 |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] >> YdbSdkSessionsPool::StressTestAsync10 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 17132, MsgBus: 27658 2024-11-21T17:45:03.984516Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790039386223525:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:03.984739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016d4/r3tmp/tmpjGbQ5Y/pdisk_1.dat 2024-11-21T17:45:04.107083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:04.107108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:04.110102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:04.113470Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17132, node 1 2024-11-21T17:45:04.123416Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:04.123428Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:04.123429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:04.123450Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27658 TClient is connected to server localhost:27658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:04.213350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:04.218700Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:04.231645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:04.298754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:04.356884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:04.368698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.453456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790043681192368:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.453488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.482971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.489383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.499615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.515537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.527589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.589137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.614218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790043681192884:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.614246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.614344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790043681192889:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.615081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:04.617618Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:04.617687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790043681192891:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:04.796734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 864000000000 2024-11-21T17:45:04.876658Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211104918, txId: 281474976715675] shutting down Trying to start YDB, gRPC: 16976, MsgBus: 6686 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016d4/r3tmp/tmp37BJCI/pdisk_1.dat 2024-11-21T17:45:05.070095Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:05.070530Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16976, node 2 2024-11-21T17:45:05.079901Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:05.079915Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:05.079917Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:05.079956Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6686 TClient is connected to server localhost:6686 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:05.159422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:05.159454Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:05.160705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:05.161844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:05.163252Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:05.202302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:05.258597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:05.273711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:05.297073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:05.397505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790047321120754:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:05.397560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:05.402844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:05.421555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:05.435652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:05.446483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:05.460448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:05.472573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:05.501004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790047321121257:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:05.501043Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:05.501236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790047321121262:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:05.501861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:05.504600Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:05.504650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790047321121264:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:05.788115Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211105828, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 25750, MsgBus: 26388 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016d4/r3tmp/tmpCbWANE/pdisk_1.dat 2024-11-21T17:45:06.068333Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:06.068588Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25750, node 3 2024-11-21T17:45:06.080590Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:06.080600Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:06.080601Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:06.080630Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26388 TClient is connected to server localhost:26388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:45:06.145882Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:06.145912Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:06.146288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:06.147365Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:06.148522Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:06.151122Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:06.170331Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:45:06.230637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:06.238062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:06.348639Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790050659378708:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:06.348658Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:06.353260Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:06.373642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:06.391270Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:06.408509Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:06.419401Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:06.438339Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:06.457137Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790050659379221:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:06.457167Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:06.460394Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790050659379226:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:06.461250Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:06.463876Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:06.463943Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790050659379228:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:06.673030Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211106717, txId: 281474976715671] shutting down 2024-11-21T17:45:06.702771Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211106745, txId: 281474976715673] shutting down |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> test.py::test[coalesce-coalesce_few_opt--Debug] [GOOD] >> test.py::test[coalesce-coalesce_few_opt--Plan] [GOOD] >> test.py::test[coalesce-coalesce_few_opt--Results] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Debug] [GOOD] >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> test.py::test[pg-tpcds-q85-default.txt-Debug] [GOOD] |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> test.py::test[pg-tpcds-q85-default.txt-ForceBlocks] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Plan] [GOOD] >> YdbSdkSessionsPool::StressTestSync1 >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Results] >> YdbSdkSessionsPool::RunSmallPlan >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-Debug] [GOOD] >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-Plan] [GOOD] >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-Results] >> test.py::test[aggregate-native_desc_group_compact_by--Results] [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> test.py::test[aggregate-table_row_aggregation-default.txt-Debug] >> test.py::test[ansi_idents-escaping-default.txt-Debug] [GOOD] >> YdbSdkSessionsPool::StressTestSync10 >> test.py::test[ansi_idents-escaping-default.txt-Plan] [GOOD] >> test.py::test[ansi_idents-escaping-default.txt-Results] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10Inflight10BlobSize1000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests100Inflight10BlobSize1000 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests10Inflight10BlobSize2000000 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests100Inflight10BlobSize2000000 >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleConfigTests::TestAutoOrder >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Debug] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Plan] >> test.py::test[join-mergejoin_big_primary_unique--Results] [GOOD] >> test.py::test[join-mergejoin_force_align1-off-Debug] >> test.py::test[pg-select_agg-default.txt-Results] [GOOD] >> test.py::test[pg-select_agg_gs_grouping-default.txt-Debug] >> test.py::test[aggregate-group_by_gs_duo--Plan] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Results] >> test.py::test[join-mergejoin_force_align1-off-Debug] [SKIPPED] >> test.py::test[join-mergejoin_force_align1-off-Plan] [SKIPPED] >> test.py::test[join-mergejoin_force_align1-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_one_sorted--Debug] >> test.py::test[select-corr_name_in_select_seq-default.txt-Results] [GOOD] >> test.py::test[select-id_xor-default.txt-Debug] >> YdbSdkSessionsPool::WaitQueue10 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending >> test.py::test[udf-automap_null--Debug] [GOOD] >> test.py::test[udf-automap_null--Plan] [GOOD] >> test.py::test[udf-automap_null--Results] >> test.py::test[agg_phases_agg_apply-sum_null-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-avg_if-default.txt-Analyze] >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleConfigTests::TestAutoKind >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests100Inflight10BlobSize1000 [GOOD] >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10000Inflight1000BlobSize1000 |74.4%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleConfigTests::TestAutoSplit >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification >> test.py::test[window-win_func_lead_lag_worm_with_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Plan] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part--Results] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleConfigTests::TestAllowedScopes >> test.py::test[window-win_by_all_aggregate--Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateWithFunction [GOOD] Test command err: Trying to start YDB, gRPC: 3991, MsgBus: 8228 2024-11-21T17:45:04.350618Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790045091427808:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:04.350703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0017f8/r3tmp/tmplYgnNy/pdisk_1.dat 2024-11-21T17:45:04.419957Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3991, node 1 2024-11-21T17:45:04.434507Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:04.434519Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:04.434520Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:04.434552Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:04.446513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:04.446556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:04.447651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8228 TClient is connected to server localhost:8228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:04.499881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:04.504885Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:04.513212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:04.585448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:04.632964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:04.648926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:04.744342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790045091429190:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.744458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.750740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.758230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.764566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.771906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.779314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.793401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.801923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790045091429691:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.801944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.801945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790045091429696:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.802456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:04.805955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790045091429698:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:45:05.000300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 864000000000 2024-11-21T17:45:05.075789Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211105121, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 14792, MsgBus: 30001 2024-11-21T17:45:05.316446Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790047315151220:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:05.318683Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0017f8/r3tmp/tmpwIszUG/pdisk_1.dat 2024-11-21T17:45:05.341598Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14792, node 2 2024-11-21T17:45:05.361410Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:05.361426Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:05.361428Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:05.361462Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30001 2024-11-21T17:45:05.406020Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:05.406053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:05.407047Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:45:05.444682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:05.446449Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:05.569481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:05.587630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:05.612640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:45:05.625178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:05.770918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2 ... 0 2024-11-21T17:45:07.105200Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:07.137436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:07.174723Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:07.192334Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:07.272408Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790057600306012:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:07.272456Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:07.276749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:07.288211Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:07.299598Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:07.313952Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:07.320244Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:07.327733Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:07.357476Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790057600306514:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:07.357518Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:07.357766Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790057600306519:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:07.358492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:07.360688Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:07.360765Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790057600306524:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:08.130404Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211107690, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 29066, MsgBus: 11243 2024-11-21T17:45:08.904471Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790060472428071:2095];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:08.905980Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0017f8/r3tmp/tmpXH3PZN/pdisk_1.dat 2024-11-21T17:45:08.930685Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29066, node 4 2024-11-21T17:45:08.963805Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:08.963830Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:08.963832Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:08.963879Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:08.976496Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:08.976531Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:08.984523Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11243 TClient is connected to server localhost:11243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:09.201171Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:09.215325Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:09.242104Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:09.308827Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:09.335558Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:09.405115Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439790064767396853:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:09.405219Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:09.407842Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:09.423826Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:09.435799Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:09.458210Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:09.468239Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:09.485514Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:09.502298Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439790064767397356:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:09.502327Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:09.502408Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439790064767397361:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:09.503351Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:09.505750Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439790064767397363:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:10.157374Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211109804, txId: 281474976715671] shutting down |74.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[coalesce-coalesce--Plan] [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] Test command err: Trying to start YDB, gRPC: 26758, MsgBus: 26286 2024-11-21T17:45:04.119796Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790045698491676:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:04.119812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001823/r3tmp/tmpbRBt1o/pdisk_1.dat 2024-11-21T17:45:04.223633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:04.223655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:04.224936Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:04.228646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26758, node 1 2024-11-21T17:45:04.241113Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:04.241120Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:04.241122Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:04.241166Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26286 TClient is connected to server localhost:26286 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:04.292986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:04.297536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:04.358662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:04.383042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:04.395488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.627441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790045698493219:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.631860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.637393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.656067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.671917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.685585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.695922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.710236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:04.766903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790045698493738:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.766922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.766990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790045698493743:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:04.767646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:04.771263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790045698493745:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:04.958409Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211105002, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 3715, MsgBus: 6649 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001823/r3tmp/tmpsigu3i/pdisk_1.dat 2024-11-21T17:45:05.297141Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:05.298310Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3715, node 2 2024-11-21T17:45:05.306670Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:05.306688Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:05.306690Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:05.306725Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6649 2024-11-21T17:45:05.383871Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:05.383895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:05.384956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:45:05.405401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:05.408703Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:05.419490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:05.444168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:05.469896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:05.482667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:05.660490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790048863515997:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:05.660519Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:05.664744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchem ... eTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:06.716840Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:06.731495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:06.738898Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:06.968504Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790052243892394:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:06.968521Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:06.971292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:06.987373Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:07.002739Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:07.019555Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:07.031179Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:07.050004Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:07.066067Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790056538860183:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:07.066083Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:07.066203Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790056538860188:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:07.066845Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:07.069160Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:07.069223Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790056538860190:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:07.417274Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211107361, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 20000, MsgBus: 18077 2024-11-21T17:45:08.112907Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790061964486934:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:08.115662Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001823/r3tmp/tmp8ApLsy/pdisk_1.dat 2024-11-21T17:45:08.143017Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20000, node 4 2024-11-21T17:45:08.154083Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:08.154094Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:08.154095Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:08.154120Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18077 TClient is connected to server localhost:18077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:08.236445Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:08.236474Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:08.236836Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:08.237254Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:45:08.239159Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:08.240933Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:45:08.256950Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:08.287259Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:08.301304Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:08.467856Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439790061964488321:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:08.467872Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:08.470258Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:08.483843Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:08.499100Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:08.511685Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:08.524646Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:08.542126Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:08.557166Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439790061964488824:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:08.557185Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:08.557389Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439790061964488829:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:08.558001Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:08.569537Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439790061964488831:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:09.200411Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211108894, txId: 281474976715671] shutting down >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TConsoleConfigTests::TestValidation >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateSharedTenant >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd >> test.py::test[window-win_func_aggr_with_qualified_all--Debug] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--ForceBlocks] >> YdbSdkSessionsPool::Get1Session >> DSProxyCounters::PutGeneratedSubrequestBytes >> test.py::test[action-parallel_for-default.txt-Results] [GOOD] >> test.py::test[action-pending_arg_fail--Debug] [SKIPPED] >> test.py::test[action-pending_arg_fail--Plan] >> DSProxyCounters::PutGeneratedSubrequestBytes [GOOD] >> TDSProxyGetTest::TestBlock42GetSpecific3 >> test.py::test[action-pending_arg_fail--Plan] [SKIPPED] >> test.py::test[action-pending_arg_fail--Results] >> test.py::test[select-dict_lookup-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Debug] >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates >> YdbSdkSessionsPool::CustomPlan >> test.py::test[coalesce-coalesce_few_opt--Results] [GOOD] >> test.py::test[column_group-hint_anon-perusage-Debug] [SKIPPED] >> test.py::test[column_group-hint_anon-perusage-Plan] [SKIPPED] >> test.py::test[column_group-hint_anon-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-disable-Debug] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-disable-Plan] [SKIPPED] >> test.py::test[column_group-hint_anon_groups-disable-Results] [SKIPPED] >> test.py::test[column_group-hint_dup_def_fail--Debug] [SKIPPED] >> test.py::test[column_group-hint_dup_def_fail--Plan] [SKIPPED] >> test.py::test[column_group-hint_dup_def_fail--Results] [SKIPPED] >> test.py::test[column_group-length-perusage-Debug] [SKIPPED] >> test.py::test[column_group-length-perusage-Plan] [SKIPPED] >> test.py::test[column_group-length-perusage-Results] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Debug] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Plan] [SKIPPED] >> test.py::test[column_group-min_group-default.txt-Results] [SKIPPED] >> test.py::test[column_order-insert--Debug] >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber >> test.py::test[expr-to_hashed_dict_dict_key-default.txt-Results] [GOOD] >> test.py::test[expr-tzdate_result-default.txt-Debug] >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators >> test.py::test[aggr_factory-udaf-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--Debug] >> TDSProxyGetTest::TestBlock42GetSpecific3 [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGet_ErasureMirror3dc >> DSProxyCounters::MultiPutGeneratedSubrequestBytes [GOOD] >> TDSProxyGetTest::TestBlock42GetSpecific2 >> TDSProxyPatchTest::NaiveErrorOnGet_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestBlock42MaxPartCountOnHandoff [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Block [GOOD] >> TDSProxyGetTest::TestBlock42GetSpecific2 [GOOD] >> TDSProxyPatchTest::SecuredOk_ErasureNone >> TYardTest::TestLogWriteCutEqualRandomWait [GOOD] >> TYardTest::TestLogWriteCutUnequal >> TBlobStorageProxySequenceTest::TestGivenBlock42Put6PartsOnOneVDiskWhenDiscoverThenRecoverFirst >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem >> TDSProxyPatchTest::SecuredOk_ErasureNone [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGet_Erasure4Plus2Block >> TDSProxyPatchTest::NaiveErrorOnGet_Erasure4Plus2Block [GOOD] >> TDSProxyPutTest::TestMirror3dcWith3x3MinLatencyMod >> TDSProxyPutTest::TestMirror3dcWith3x3MinLatencyMod [GOOD] >> TConsoleConfigTests::TestManageValidators [GOOD] >> TConsoleConfigTests::TestDryRun >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant >> YdbSdkSessionsPool::WaitQueue1 >> TBlobStorageProxySequenceTest::TestGivenBlock42Put6PartsOnOneVDiskWhenDiscoverThenRecoverFirst [GOOD] >> TDSProxyGetTest::TestMirror32GetIntervalsWipedAllOk >> test.py::test[ansi_idents-escaping-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-inplace_yql-default.txt-Debug] >> YdbSdkSessionsPool::RunSmallPlan [GOOD] |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus1Block [GOOD] >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag |74.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcWith3x3MinLatencyMod [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10000Inflight1000BlobSize1000 [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt-Debug] [GOOD] >> test.py::test[pg-select_agg_gs_grouping-default.txt-Debug] [GOOD] >> test.py::test[pg-select_agg_gs_grouping-default.txt-Plan] >> test.py::test[aggregate-table_row_aggregation-default.txt-Plan] [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestListTenants >> test.py::test[pg-select_agg_gs_grouping-default.txt-Plan] [GOOD] >> test.py::test[pg-select_agg_gs_grouping-default.txt-Results] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] >> test.py::test[produce-native_desc_reduce_with_presort--Debug] [GOOD] >> test.py::test[produce-native_desc_reduce_with_presort--Plan] [GOOD] >> test.py::test[produce-native_desc_reduce_with_presort--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::RunSmallPlan [GOOD] Test command err: 2024-11-21T17:45:12.538233Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790079045947886:2152];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:12.538530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0020c2/r3tmp/tmptUgn9j/pdisk_1.dat 2024-11-21T17:45:13.059591Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:13.065002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:13.065018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:13.072962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8598, node 1 2024-11-21T17:45:13.185353Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:13.185367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:13.185369Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:13.185402Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:13.754687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:13.778982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:45:13.779427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:13.792339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:45:13.792426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:45:13.792431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:45:13.793620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:13.793623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:45:13.794287Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:13.795270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211113843, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:45:13.795275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:45:13.795355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:45:13.795876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:13.795924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:13.795933Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:45:13.795943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:45:13.795952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:45:13.795962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T17:45:13.797224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:45:13.797434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:45:13.797438Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 waiting... 2024-11-21T17:45:13.804011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T17:45:13.804128Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> test.py::test[udf-automap_null--Results] [GOOD] >> test.py::test[udf-complex_return_type--Debug] >> test.py::test[window-full/session_incompat_sort--Debug] [GOOD] >> test.py::test[window-full/session_incompat_sort--Plan] [GOOD] >> test.py::test[window-full/session_incompat_sort--Results] >> test.py::test[select-id_xor-default.txt-Debug] [GOOD] >> test.py::test[select-id_xor-default.txt-Plan] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> test.py::test[select-id_xor-default.txt-Plan] [GOOD] >> test.py::test[select-id_xor-default.txt-Results] >> TBlobStorageProxySequenceTest::TestGivenBlock42GroupGenerationGreaterThanVDiskGenerations [GOOD] >> TDSProxyGetTest::TestBlock42WipedOneDiskAndErrorDurringGet [GOOD] >> TDSProxyPatchTest::NaiveErrorOnPut_ErasureMirror3dc >> test.py::test[aggr_factory-avg_if-default.txt-Analyze] [GOOD] >> test.py::test[aggr_factory-avg_if-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> CostMetricsPutMirror3dc::TestPutMirror3dcRequests10000Inflight1000BlobSize1000 [GOOD] Test command err: RandomSeed# 15270197646955972877 2024-11-21T17:44:30.550482Z 8 00h00m30.010000s :BS_PROXY_GET ERROR: [02472f50fba2cd3e] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:1:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:2:1:0]"} Marker# BPG29 2024-11-21T17:44:30.550525Z 5 00h00m30.010000s :BS_PROXY_GET ERROR: [59190601d459780c] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[1:1:1:10:2:1000:0] DEADLINE Size# 0 RequestedSize# 1000} ErrorReason# "status# DEADLINE from# [82000000:1:1:1:0]"} Marker# BPG29 2024-11-21T17:44:30.550546Z 8 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:2:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:1:10:1:1000:0] PatchedBlobId# [1:1:2:10:1:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:2:1:0] Marker# BSVSP01 2024-11-21T17:44:30.550567Z 5 00h00m30.010000s :BS_VDISK_PATCH ERROR: VDISK[82000000:_:1:1:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [1:1:1:10:2:1000:0] PatchedBlobId# [1:1:2:10:4098:1000:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [82000000:1:1:1:0] Marker# BSVSP01 >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> TDSProxyPatchTest::NaiveErrorOnPut_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_1_2_VdiskErrors >> TSubscriberTest::StrongNotificationAfterCommit >> TDSProxyGetTest::TestBlock42GetBlobCrcCheck >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient >> YdbSdkSessionsPool::Get1Session [GOOD] >> YdbSdkSessionsPool::PeriodicTask1 >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_1_2_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Stripe >> test.py::test[pg-tpcds-q85-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Results] >> test.py::test[join-mergejoin_force_one_sorted--Debug] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted--Plan] [GOOD] >> test.py::test[join-mergejoin_force_one_sorted--Results] >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> TYardTest::TestLogWriteCutUnequal [GOOD] >> TYardTest::TestLogMultipleWriteRead >> test.py::test[aggregate-group_by_gs_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_list_key--Debug] [SKIPPED] >> test.py::test[aggregate-group_by_hop_list_key--Plan] [SKIPPED] >> test.py::test[aggregate-group_by_hop_list_key--Results] >> test.py::test[aggr_factory-udaf_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_phases_table1-default.txt-Debug] >> test.py::test[aggregate-group_by_hop_list_key--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only--Debug] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only--Plan] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant >> TDSProxyGetTest::TestBlock42GetBlobCrcCheck [GOOD] >> TDSProxyPatchTest::SecuredErrorOnPut_Erasure4Plus2Block >> test.py::test[aggregate-group_by_hop_only--Plan] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only--Results] [SKIPPED] >> test.py::test[aggregate-group_by_tz_date--Debug] >> YdbSdkSessionsPool::WaitQueue10 [GOOD] >> CostMetricsGetHugeMirror3dc::TestGetMirror3dcRequests100Inflight10BlobSize2000000 [GOOD] >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10000Inflight1000BlobSize1000 >> TDSProxyPatchTest::SecuredErrorOnPut_Erasure4Plus2Block [GOOD] >> TDSProxyPatchTest::MovedOk_ErasureMirror3dc >> test.py::test[action-pending_arg_fail--Results] [GOOD] >> test.py::test[action-subquery_orderby2-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] Test command err: 2024-11-21T17:45:19.794867Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:45:19.796628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T17:45:19.796651Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T17:45:19.796659Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T17:45:19.796670Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T17:45:19.796700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T17:45:19.796923Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Set up state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:19.796934Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T17:45:19.796940Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:19.796985Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T17:45:19.796992Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T17:45:19.796997Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Update to strong state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:19.797011Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T17:45:19.797017Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T17:45:19.797021Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates >> test.py::test[ansi_idents-inplace_yql-default.txt-Debug] [GOOD] >> test.py::test[ansi_idents-inplace_yql-default.txt-Plan] [GOOD] >> TYardTest::TestLogMultipleWriteRead [GOOD] >> TYardTest::TestLogWriteLsnConsistency >> test.py::test[window-win_func_lead_lag_worm_with_part--Results] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Analyze] >> TDSProxyPatchTest::MovedOk_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_1_0_VdiskErrors >> test.py::test[select-id_xor-default.txt-Results] [GOOD] >> test.py::test[select-opt_list_access-default.txt-Debug] >> test.py::test[ansi_idents-inplace_yql-default.txt-Results] >> TSubscriberTest::SyncPartial >> test.py::test[expr-tzdate_result-default.txt-Debug] [GOOD] >> test.py::test[expr-tzdate_result-default.txt-Plan] >> TYardTest::TestLogWriteLsnConsistency [GOOD] >> TYardTest::TestLotsOfTinyAsyncLogLatency >> test.py::test[window-win_func_aggr_with_qualified_all--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Plan] >> test.py::test[expr-tzdate_result-default.txt-Plan] [GOOD] >> test.py::test[expr-tzdate_result-default.txt-Results] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_1_0_VdiskErrors [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue10 [GOOD] Test command err: 2024-11-21T17:45:17.402074Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790098067330414:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:17.402357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0020c1/r3tmp/tmpuhiOr4/pdisk_1.dat 2024-11-21T17:45:18.082014Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:18.085415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:18.085443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:18.090239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6597, node 1 2024-11-21T17:45:18.236635Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:18.236646Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:18.236647Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:18.236685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:18.369857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:18.380185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:45:18.380212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:18.392797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:45:18.393274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:45:18.393279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:45:18.400454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:18.400470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:45:18.401882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:18.408626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:18.423751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211118463, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:45:18.423774Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:45:18.423837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:45:18.427848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:18.427903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:18.427916Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:45:18.427930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:45:18.427939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:45:18.427957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:45:18.428682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:45:18.428702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:45:18.428706Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:45:18.428721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Debug] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Plan] [GOOD] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Results] >> test.py::test[window-win_func_aggr_with_qualified_all--Plan] [GOOD] >> YdbSdkSessionsPool::WaitQueue1 [GOOD] >> TSubscriberTest::Sync >> test.py::test[window-win_func_aggr_with_qualified_all--Results] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> test.py::test[udf-complex_return_type--Debug] [GOOD] >> test.py::test[udf-complex_return_type--Plan] [GOOD] >> test.py::test[udf-complex_return_type--Results] |74.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |74.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |74.4%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut >> TYardTest::TestLotsOfTinyAsyncLogLatency [GOOD] >> TYardTest::TestLogLatency >> TSubscriberTest::SyncPartial [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] >> test.py::test[pg-select_agg_gs_grouping-default.txt-Results] [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Stripe [GOOD] >> test.py::test[pg-select_common_type_intersect-default.txt-Debug] >> TSubscriberTest::NotifyUpdate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_1_1_0_VdiskErrors [GOOD] Test command err: 2024-11-21T17:45:20.912668Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [3:81:2127] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T17:45:20.912727Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T17:45:20.912733Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T17:45:20.912739Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T17:45:20.912744Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T17:45:20.912749Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T17:45:20.912753Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T17:45:20.915753Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T17:45:20.915791Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T17:45:20.915797Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T17:45:20.915832Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2024-11-21T17:45:20.915838Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T17:45:20.915842Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T17:45:20.915880Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2024-11-21T17:45:20.915924Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T17:45:20.916016Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:2:0] Marker# BPP01 2024-11-21T17:45:20.916033Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T17:45:20.916041Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> TSubscriberTest::NotifyDelete >> TSubscriberTest::Sync [GOOD] >> test.py::test[pg-tpcds-q85-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Analyze] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue1 [GOOD] Test command err: 2024-11-21T17:45:18.880077Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790103209719480:2064];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:18.880188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0020aa/r3tmp/tmpULavg4/pdisk_1.dat 2024-11-21T17:45:19.262893Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13731, node 1 2024-11-21T17:45:19.306826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:19.306845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:19.307519Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:19.307532Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:19.307535Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:19.307579Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:19.313546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:19.364641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:19.365809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:45:19.365821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:19.367165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:45:19.367218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:45:19.367222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:45:19.372727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:19.372748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:45:19.376423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:19.380325Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:19.380746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211119429, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:45:19.380754Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:45:19.380841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:45:19.384782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:19.384865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:19.384884Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:45:19.384906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:45:19.384920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:45:19.384946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:45:19.385828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:45:19.385846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:45:19.385851Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:45:19.385874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 |74.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncPartial [GOOD] Test command err: 2024-11-21T17:45:20.991848Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:45:20.992301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T17:45:20.992328Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T17:45:20.992335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T17:45:20.992346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T17:45:20.992378Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T17:45:20.992395Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Set up state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:20.992406Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T17:45:20.992414Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:20.992450Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 1 2024-11-21T17:45:20.992473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:33:2064], cookie# 1 2024-11-21T17:45:20.992482Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:34:2064], cookie# 1 2024-11-21T17:45:20.992488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2064], cookie# 1 2024-11-21T17:45:20.992501Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:6:2053], cookie# 1 2024-11-21T17:45:20.992506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 1 2024-11-21T17:45:20.992517Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:33:2064], cookie# 1 2024-11-21T17:45:20.992523Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 0, faulires# 1 2024-11-21T17:45:20.992529Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T17:45:20.992535Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:20.992541Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:34:2064], cookie# 1 2024-11-21T17:45:20.992557Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 1 2024-11-21T17:45:20.992562Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:35:2064], cookie# 1 2024-11-21T17:45:20.992567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 1, partial# 0 2024-11-21T17:45:20.992579Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 2 2024-11-21T17:45:20.992592Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:33:2064], cookie# 2 2024-11-21T17:45:20.992596Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is in progress: cookie# 2, size# 3, half# 1, successes# 0, faulires# 1 2024-11-21T17:45:20.992602Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:34:2064], cookie# 2 2024-11-21T17:45:20.992608Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2064], cookie# 2 2024-11-21T17:45:20.992618Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 2 2024-11-21T17:45:20.992623Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:34:2064], cookie# 2 2024-11-21T17:45:20.992628Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:32:2064][path] Sync is done: cookie# 2, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T17:45:20.992634Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T17:45:20.992640Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:20.992647Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:35:2064], cookie# 2 2024-11-21T17:45:20.992651Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Unexpected sync response: sender# [1:35:2064], cookie# 2 2024-11-21T17:45:20.992660Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 3 2024-11-21T17:45:20.992672Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:33:2064], cookie# 3 2024-11-21T17:45:20.992676Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is in progress: cookie# 3, size# 3, half# 1, successes# 0, faulires# 1 2024-11-21T17:45:20.992681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:34:2064], cookie# 3 2024-11-21T17:45:20.992685Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:32:2064][path] Sync is done: cookie# 3, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T17:45:20.992691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2064], cookie# 3 2024-11-21T17:45:20.992702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2064], cookie# 3 2024-11-21T17:45:20.992706Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Unexpected sync response: sender# [1:35:2064], cookie# 3 2024-11-21T17:45:20.992712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T17:45:20.992717Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |74.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> test.py::test[ansi_idents-inplace_yql-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Stripe [GOOD] Test command err: 2024-11-21T17:45:19.625995Z node 2 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [2:73:2119] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T17:45:19.626055Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626062Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626066Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626069Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626074Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626077Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626081Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626085Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626089Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626093Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626096Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626100Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626103Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626106Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626110Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626113Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626116Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626119Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.626124Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:45:19.626137Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T17:45:19.626142Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T17:45:19.626147Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T17:45:19.626150Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T17:45:19.626155Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T17:45:19.626158Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T17:45:19.626162Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2024-11-21T17:45:19.626165Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2024-11-21T17:45:19.626169Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2024-11-21T17:45:19.626172Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2024-11-21T17:45:19.626177Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2024-11-21T17:45:19.626181Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2024-11-21T17:45:19.629048Z node 2 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T17:45:19.629092Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T17:45:19.629099Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2024-11-21T17:45:19.629104Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2024-11-21T17:45:19.629108Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2024-11-21T17:45:19.629111Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2024-11-21T17:45:19.629114Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2024-11-21T17:45:19.629117Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629120Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629123Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629127Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629131Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629133Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629136Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629139Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629142Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629145Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629148Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629151Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629156Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:45:19.629169Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T17:45:19.629173Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T17:45:19.629204Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T17:45:19.629228Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T17:45:19.629236Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2024-11-21T17:45:19.629242Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2024-11-21T17:45:19.629253Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2024-11-21T17:45:19.629283Z node 2 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2024-11-21T17:45:19.629288Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T17:45:19.629292Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Present Marker# BPG51 2024-11-21T17:45:19.629297Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Present Marker# BPG51 2024-11-21T17:45:19.629301Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Present Marker# BPG51 2024-11-21T17:45:19.629304Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Present Marker# BPG51 2024-11-21T17:45:19.629307Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Present Marker# BPG51 2024-11-21T17:45:19.629310Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 error Marker# BPG50 2024-11-21T17:45:19.629313Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629316Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629319Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629322Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629325Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629328Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:19.629331Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:45:19.629335Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T17:45:19.629338Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T17:45:19.629365Z node 2 :BS_PROXY_PUT INFO: [69a94228033ea6a6] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:7:0] Marker# BPP01 2024-11-21T17:45:19.629369Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2024-11-21T17:45:19.629372Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Present Marker# BPG51 2024-11-21T17:45:19.629375Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Present Marker# BPG51 2024-11-21T17:45:19.629378Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Present Marker# BPG51 2024-11-21T17:45:19.629381Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Present Marker# BPG51 2024-11-21T17:45:19.629395Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Present Marker# BPG51 2024-11-21T17:45:19.629400Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 error Marker# BPG50 2024-11-21T17:45:19.629403Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 error Marker# BPG50 2024-11-21T17:45:19.629406Z node 2 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 5 optimisticState# EBS_DISINTEGRATED Marker# BPG55 2024-11-21T17:45:19.629416Z node 2 :BS_PROXY_PUT ERROR: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T17:45:19.629422Z node 2 :BS_PROXY_PUT NOTICE: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> TSubscriberTest::NotifyUpdate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2024-11-21T17:45:04.060764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:04.060793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:04.060798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:04.060802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:04.060817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:04.060820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:04.060828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:04.061124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:04.075431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:04.075457Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:04.093515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:04.093925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:04.093946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T17:45:04.096142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:04.096415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:04.096485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:04.096572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:04.097684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:04.097940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:04.097948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:04.097958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:04.097964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:04.097969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:04.097982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T17:45:04.143768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T17:45:04.143856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:04.143924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T17:45:04.143969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T17:45:04.143978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:04.144746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:04.144776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T17:45:04.144831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:04.144845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T17:45:04.144850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:04.144856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:04.145263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:04.145276Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T17:45:04.145281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:04.145591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:04.145601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:04.145606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:04.145624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:04.146174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:04.146493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:04.146563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:45:04.146738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:04.146745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T17:45:04.146749Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:04.320386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:04.320439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 4294969520 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T17:45:04.320450Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:04.320527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:04.320534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:04.320567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:04.320576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:04.321101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:04.321112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:04.321160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:04.321165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:241:2231], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T17:45:04.321244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:04.321252Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:04.321262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:04.321266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:04.321271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:04.321276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:04.321281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:04.321284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:04.321292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T17:45:04.321298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:45:04.321302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T17:45:04.321621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:04.321632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:04.321636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T17:45:04.321641Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T17:45:04.321645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:04.321656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T17:45:04.321659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:95:2130] 2024-11-21T1 ... : {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:45:21.222590Z node 23 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:45:21.222644Z node 23 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:45:21.222865Z node 23 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:45:21.222882Z node 23 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:45:21.222933Z node 23 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:45:21.223093Z node 23 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/001db7/r3tmp/tmpaMyb1q/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T17:45:21.223156Z node 23 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 23:1 Path# /home/runner/.ya/build/build_root/6fwh/001db7/r3tmp/tmpaMyb1q/pdisk_1.dat 2024-11-21T17:45:21.234457Z node 23 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:45:21.234552Z node 23 :CONFIGS_DISPATCHER DEBUG: TConfigsDispatcher Bootstrap 2024-11-21T17:45:21.234619Z node 23 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:45:21.234630Z node 23 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:45:21.234662Z node 23 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:45:21.234698Z node 23 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:45:21.234727Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [23:377:2335], Recipient [23:376:2334]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:21.234736Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:21.234823Z node 23 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:45:21.234831Z node 23 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:45:21.234835Z node 23 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:45:21.234851Z node 23 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[23:384:2339] 2024-11-21T17:45:21.234897Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [23:379:2333], Recipient [23:376:2334]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:21.234901Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:21.235429Z node 23 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:45:21.235437Z node 23 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [23:377:2335] 2024-11-21T17:45:21.235565Z node 23 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[23:384:2339] 2024-11-21T17:45:21.235589Z node 23 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:45:21.235597Z node 23 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:45:21.235600Z node 23 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:45:21.238287Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [23:410:2346], Recipient [23:376:2334]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:21.238306Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:21.250819Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [23:434:2372], Recipient [23:376:2334]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:21.250863Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:21.273174Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273285146, Sender [23:380:2334], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false } } RawConsoleConfig { } } 2024-11-21T17:45:21.273204Z node 23 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T17:45:21.273251Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: AllowEditYamlInUiItem 2024-11-21T17:45:21.273266Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:410:2346]: Config { } ItemKinds: 75 Local: true 2024-11-21T17:45:21.273293Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: FeatureFlagsItem 2024-11-21T17:45:21.273304Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:434:2372]: Config { FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false } } ItemKinds: 26 Local: true 2024-11-21T17:45:21.273328Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T17:45:21.273334Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:377:2335]: Config { } ItemKinds: 10 Local: true 2024-11-21T17:45:21.273338Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T17:45:21.273344Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:379:2333]: Config { } ItemKinds: 10 Local: true 2024-11-21T17:45:21.274451Z node 23 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Got new monitoring config: 2024-11-21T17:45:21.274485Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:377:2335], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:45:21.274491Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:45:21.274510Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:434:2372], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:45:21.274513Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:45:21.274524Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:410:2346], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:45:21.274528Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:45:21.274536Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:379:2333], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:45:21.274540Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:45:21.291190Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [23:380:2334], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableExternalHive: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } } } AffectedKinds: 26 RawConsoleConfig { FeatureFlags { EnableExternalHive: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } } } } 2024-11-21T17:45:21.291223Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T17:45:21.291270Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: FeatureFlagsItem 2024-11-21T17:45:21.291288Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:434:2372]: Config { FeatureFlags { EnableExternalHive: false } } ItemKinds: 26 Local: true 2024-11-21T17:45:21.291367Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:434:2372], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:45:21.291372Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:45:21.304728Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [23:380:2334], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableExternalHive: false EnableDataShardVolatileTransactions: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 2 Generation: 1 } } } AffectedKinds: 26 RawConsoleConfig { FeatureFlags { EnableExternalHive: false EnableDataShardVolatileTransactions: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 2 Generation: 1 } } } } 2024-11-21T17:45:21.304749Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T17:45:21.304780Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: FeatureFlagsItem 2024-11-21T17:45:21.304797Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:434:2372]: Config { FeatureFlags { EnableExternalHive: false EnableDataShardVolatileTransactions: false } } ItemKinds: 26 Local: true 2024-11-21T17:45:21.304832Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:434:2372], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:45:21.304837Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:45:21.319030Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [23:380:2334], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { FeatureFlags { EnableVolatileTransactionArbiters: false } Version { Items { Kind: 26 Id: 3 Generation: 1 } } } AffectedKinds: 26 RawConsoleConfig { FeatureFlags { EnableVolatileTransactionArbiters: false } Version { Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 1 Generation: 1 } Items { Kind: 26 Id: 2 Generation: 1 } Items { Kind: 26 Id: 3 Generation: 1 } } } } 2024-11-21T17:45:21.319050Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T17:45:21.319075Z node 23 :CONFIGS_DISPATCHER TRACE: Sending for kinds: FeatureFlagsItem 2024-11-21T17:45:21.319090Z node 23 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [23:434:2372]: Config { FeatureFlags { EnableVolatileTransactionArbiters: false } } ItemKinds: 26 Local: true 2024-11-21T17:45:21.319118Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [23:434:2372], Recipient [23:376:2334]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:45:21.319123Z node 23 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse >> test.py::test[join-mergejoin_force_one_sorted--Results] [GOOD] >> test.py::test[join-mergejoin_left_null_column--Debug] >> TSubscriberTest::NotifyDelete [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] Test command err: 2024-11-21T17:45:03.643248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:03.643280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:03.643286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:03.643291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:03.643306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:03.643310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:03.643320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:03.643391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:03.646501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:03.646525Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:03.648409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:03.648584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:03.648604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T17:45:03.649517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:03.649613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:03.649689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:03.649779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:03.650309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:03.650592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:03.650602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:03.650615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:03.650622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:03.650627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:03.650644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.691213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T17:45:03.691292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.691354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T17:45:03.691398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T17:45:03.691406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.692674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:03.692710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T17:45:03.692762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.692774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T17:45:03.692779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:03.692784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:03.694187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.694204Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T17:45:03.694208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:03.694600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.694618Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.694623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:03.694641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:03.695160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:03.695568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:03.695629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:45:03.695835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:03.695843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T17:45:03.695848Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:03.868313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:03.868381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 4294969520 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T17:45:03.868394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:03.868480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:03.868488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:03.868527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:03.868539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:03.869108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:03.869120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:03.869169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:03.869175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:241:2231], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T17:45:03.869255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.869263Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:03.869272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:03.869275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:03.869281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:03.869285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:03.869289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:03.869292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:03.869300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T17:45:03.869304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:45:03.869306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T17:45:03.869596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:03.869611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:03.869616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T17:45:03.869621Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T17:45:03.869625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:03.869639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T17:45:03.869643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:95:2130] 2024-11-21T1 ... istered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:21.539842Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:21.539895Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 94489282736 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T17:45:21.539908Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:21.539978Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:21.539988Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:21.540030Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:21.540040Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:21.540800Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:21.540817Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:21.540884Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:21.540890Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [22:244:2234], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T17:45:21.540911Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:21.540919Z node 22 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:21.540933Z node 22 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:21.540938Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:21.540945Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:21.540951Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:21.540956Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:21.540960Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:21.540971Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T17:45:21.540991Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:45:21.540995Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T17:45:21.541185Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:21.541194Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:21.541197Z node 22 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T17:45:21.541200Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T17:45:21.541204Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:21.541217Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T17:45:21.541220Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [22:95:2130] 2024-11-21T17:45:21.541924Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 1 2024-11-21T17:45:21.542003Z node 22 :TX_PROXY DEBUG: actor# [22:292:2274] Bootstrap 2024-11-21T17:45:21.542972Z node 22 :TX_PROXY DEBUG: actor# [22:292:2274] Become StateWork (SchemeCache [22:298:2279]) 2024-11-21T17:45:21.543207Z node 22 :TX_PROXY DEBUG: actor# [22:292:2274] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:45:21.543609Z node 22 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:45:21.544966Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:45:21.546316Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:45:21.546613Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:45:21.546714Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:45:21.547609Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:45:21.547620Z node 22 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:45:21.547660Z node 22 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:45:21.549399Z node 22 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:45:21.549424Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:45:21.549447Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:45:21.549465Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:45:21.549474Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:45:21.549481Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:45:21.571413Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:45:21.571477Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:45:21.585209Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:45:21.585264Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:45:21.585282Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:45:21.585296Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:45:21.585325Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:45:21.585337Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:45:21.585343Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:45:21.585352Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:45:21.596246Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:45:21.596294Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:45:21.596479Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:45:21.596486Z node 22 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:45:21.596530Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:45:21.596683Z node 22 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/001dc9/r3tmp/tmpdzTGrD/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T17:45:21.596740Z node 22 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 22:1 Path# /home/runner/.ya/build/build_root/6fwh/001dc9/r3tmp/tmpdzTGrD/pdisk_1.dat 2024-11-21T17:45:21.607773Z node 22 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:45:21.607861Z node 22 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:45:21.607870Z node 22 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:45:21.607901Z node 22 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:45:21.607920Z node 22 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:45:21.608008Z node 22 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:45:21.608519Z node 22 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:45:21.608529Z node 22 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:45:21.608552Z node 22 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[22:385:2341] 2024-11-21T17:45:21.608566Z node 22 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:45:21.608571Z node 22 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [22:380:2338] 2024-11-21T17:45:21.608708Z node 22 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[22:385:2341] 2024-11-21T17:45:21.608721Z node 22 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:45:21.608730Z node 22 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:45:21.608733Z node 22 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2024-11-21T17:45:21.267821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:45:21.268211Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2024-11-21T17:45:21.269657Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2024-11-21T17:45:21.269683Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2024-11-21T17:45:21.269697Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:34:2065] 2024-11-21T17:45:21.269706Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2024-11-21T17:45:21.269725Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:33:2065][path] Set up state: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:21.269764Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2065] 2024-11-21T17:45:21.269772Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:21.269802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 1 2024-11-21T17:45:21.269831Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:34:2065], cookie# 1 2024-11-21T17:45:21.269839Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2024-11-21T17:45:21.269845Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2024-11-21T17:45:21.269854Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:3:2050], cookie# 1 2024-11-21T17:45:21.269859Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2024-11-21T17:45:21.269864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2024-11-21T17:45:21.269881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:34:2065], cookie# 1 2024-11-21T17:45:21.269888Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:45:21.269893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:35:2065], cookie# 1 2024-11-21T17:45:21.269899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:45:21.269906Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2065], cookie# 1 2024-11-21T17:45:21.269910Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Unexpected sync response: sender# [1:36:2065], cookie# 1 >> TYardTest::TestLogLatency [GOOD] >> TYardTest::TestMultiYardLogLatency >> test.py::test[udf-complex_return_type--Results] [GOOD] >> test.py::test[udf-named_args--Debug] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse >> test.py::test[action-subquery_orderby2-default.txt-Debug] [GOOD] >> test.py::test[action-subquery_orderby2-default.txt-Plan] [GOOD] >> test.py::test[action-subquery_orderby2-default.txt-Results] >> test.py::test[window-win_func_on_cloned_source-default.txt-Analyze] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Debug] |74.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |74.5%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> test.py::test[aggr_factory-avg_if-default.txt-Debug] [GOOD] >> TSubscriberCombinationsTest::CombinationsRootDomain >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyUpdate [GOOD] Test command err: 2024-11-21T17:45:21.534229Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:45:21.534671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T17:45:21.534691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T17:45:21.534700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T17:45:21.534713Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T17:45:21.534743Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T17:45:21.534760Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Set up state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:21.534771Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T17:45:21.534777Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:21.534888Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2024-11-21T17:45:21.534898Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:33:2064] 2024-11-21T17:45:21.534907Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Update to strong state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } >> TSubscriberTest::InvalidNotification >> test.py::test[expr-tzdate_result-default.txt-Results] [GOOD] >> test.py::test[expr-untag-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyDelete [GOOD] Test command err: 2024-11-21T17:45:21.682120Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:45:21.682385Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2024-11-21T17:45:21.682399Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2024-11-21T17:45:21.682403Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2024-11-21T17:45:21.682410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:34:2065] 2024-11-21T17:45:21.682415Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2024-11-21T17:45:21.682423Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:33:2065][path] Set up state: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:21.682445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2065] 2024-11-21T17:45:21.682450Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:21.682492Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2024-11-21T17:45:21.682499Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2024-11-21T17:45:21.682506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:9:2056] 2024-11-21T17:45:21.682514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:34:2065] 2024-11-21T17:45:21.682519Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:33:2065][path] Path was updated to new version: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:21.682523Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:35:2065] 2024-11-21T17:45:21.682526Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:21.682531Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:36:2065] 2024-11-21T17:45:21.682534Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } >> test.py::test[column_order-insert--Debug] [GOOD] >> test.py::test[column_order-insert--Plan] [GOOD] >> test.py::test[column_order-insert--Results] >> test.py::test[pg-select_common_type_intersect-default.txt-Debug] [GOOD] >> test.py::test[pg-select_common_type_intersect-default.txt-Plan] >> test.py::test[select-dict_lookup_by_key_with_def-default.txt-Results] [GOOD] >> test.py::test[select-optional_in_job--Debug] >> test.py::test[pg-select_common_type_intersect-default.txt-Plan] [GOOD] >> test.py::test[pg-select_common_type_intersect-default.txt-Results] >> TSubscriberTest::ReconnectOnFailure >> TSubscriberTest::InvalidNotification [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Debug] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Plan] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] [GOOD] >> test.py::test[window-win_func_first_last_with_part--Analyze] >> test.py::test[aggregate-group_by_tz_date--Debug] [GOOD] >> test.py::test[aggregate-group_by_tz_date--Plan] [GOOD] >> test.py::test[aggregate-group_by_tz_date--Results] >> test.py::test[action-subquery_orderby2-default.txt-Results] [GOOD] >> test.py::test[agg_apply-avg_numeric_opt-default.txt-Debug] |74.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |74.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut >> TSubscriberCombinationsTest::MigratedPathRecreation >> test.py::test[aggregate-agg_phases_table1-default.txt-Debug] [GOOD] >> TSubscriberTest::SyncWithOutdatedReplica >> test.py::test[aggregate-agg_phases_table1-default.txt-Plan] [GOOD] >> test.py::test[aggregate-agg_phases_table1-default.txt-Results] >> TSubscriberTest::ReconnectOnFailure [GOOD] |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |74.5%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |74.5%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2024-11-21T17:45:22.638990Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:45:22.639416Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T17:45:22.639436Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T17:45:22.639442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T17:45:22.639452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T17:45:22.639481Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T17:45:22.639494Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Set up state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:22.639505Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T17:45:22.639511Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:22.639552Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:31:2063] 2024-11-21T17:45:22.639557Z node 1 :SCHEME_BOARD_SUBSCRIBER ERROR: [main][1:32:2064][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:31:2063] >> test.py::test[ansi_idents-order_by-default.txt-Debug] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-Plan] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-Results] |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest |74.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |74.5%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut >> TYardTest::TestMultiYardLogLatency [GOOD] >> TYardTest::TestMultiYardFirstRecordToKeep >> test.py::test[udf-named_args--Debug] [GOOD] >> test.py::test[udf-named_args--Plan] [GOOD] >> test.py::test[udf-named_args--Results] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflightMock [GOOD] >> TPDiskRaces::OwnerRecreationRaces |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut >> test.py::test[ansi_idents-basic_columns-default.txt-Debug] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt-Plan] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt-Results] >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberTest::Boot >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2024-11-21T17:45:23.066002Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:45:23.066483Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T17:45:23.066508Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T17:45:23.066532Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:33:2064] 2024-11-21T17:45:23.066558Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:34:2064] 2024-11-21T17:45:23.066571Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:32:2064][path] Set up state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.066585Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T17:45:23.066600Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2064] 2024-11-21T17:45:23.066608Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.066686Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:33:2064] 2024-11-21T17:45:23.066693Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.066699Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:34:2064] 2024-11-21T17:45:23.066704Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.066710Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2064] 2024-11-21T17:45:23.066715Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.077012Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:43:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T17:45:23.077049Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:33:2064] 2024-11-21T17:45:23.077064Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.077098Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:44:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T17:45:23.077105Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T17:45:23.077116Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:34:2064] 2024-11-21T17:45:23.077122Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.077134Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2064] 2024-11-21T17:45:23.077139Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.077247Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:43:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2024-11-21T17:45:23.077257Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:33:2064] 2024-11-21T17:45:23.077266Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:32:2064][path] Update to strong state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } >> test.py::test[window-win_func_on_cloned_source-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-ForceBlocks] |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> test.py::test[expr-untag-default.txt-Debug] [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer >> TYardTest::TestMultiYardFirstRecordToKeep [GOOD] >> TYardTest::TestMultiYardLogMultipleWriteRead >> test.py::test[expr-untag-default.txt-Plan] [GOOD] >> test.py::test[expr-untag-default.txt-Results] >> test.py::test[window-full/session_incompat_sort--Results] [GOOD] >> TSubscriberTest::Boot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2024-11-21T17:45:23.333162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:45:23.333586Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:3:2050] 2024-11-21T17:45:23.333616Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:6:2053] 2024-11-21T17:45:23.333625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:9:2056] 2024-11-21T17:45:23.333638Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:34:2065] 2024-11-21T17:45:23.333647Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:35:2065] 2024-11-21T17:45:23.333662Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:33:2065][path] Set up state: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.333701Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:36:2065] 2024-11-21T17:45:23.333712Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.333737Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 1 2024-11-21T17:45:23.333753Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:34:2065], cookie# 1 2024-11-21T17:45:23.333763Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2024-11-21T17:45:23.333770Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2024-11-21T17:45:23.333779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:3:2050], cookie# 1 2024-11-21T17:45:23.333785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2024-11-21T17:45:23.333791Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2024-11-21T17:45:23.333800Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:34:2065], cookie# 1 2024-11-21T17:45:23.333807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:45:23.333814Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:35:2065], cookie# 1 2024-11-21T17:45:23.333821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:45:23.333828Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2065], cookie# 1 2024-11-21T17:45:23.333832Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Unexpected sync response: sender# [1:36:2065], cookie# 1 >> test.py::test[window-lagging/aggregations--Debug] >> test.py::test[udf-named_args--Results] [GOOD] >> test.py::test[pg-select_common_type_intersect-default.txt-Results] [GOOD] >> test.py::test[produce-native_desc_reduce_with_presort--Results] [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> test.py::test[produce-process_streaming-default.txt-Debug] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning >> test.py::test[select-opt_list_access-default.txt-Debug] [GOOD] >> test.py::test[select-opt_list_access-default.txt-Plan] [GOOD] >> test.py::test[select-opt_list_access-default.txt-Results] >> TYardTest::TestMultiYardLogMultipleWriteRead [GOOD] >> TYardTest::TestLogOverwriteRestarts >> test.py::test[aggregate-group_by_cube_join_count--Debug] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--Plan] [GOOD] >> test.py::test[udf-udf--Debug] >> test.py::test[pg-select_intersect-default.txt-Debug] >> YdbSdkSessionsPool::StressTestSync1 [GOOD] |74.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest |74.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |74.6%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> test.py::test[aggregate-group_by_cube_join_count--Results] >> test.py::test[pg-tpch-q18-default.txt-Analyze] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-string_escaping-default.txt-Debug] >> test.py::test[aggregate-group_by_tz_date--Results] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Debug] >> test.py::test[pg-tpch-q18-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: 2024-11-21T17:45:23.344248Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:32:2064] 2024-11-21T17:45:23.344269Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2024-11-21T17:45:23.344286Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:32:2064] 2024-11-21T17:45:23.344290Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2024-11-21T17:45:23.344294Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:33:2065] 2024-11-21T17:45:23.344296Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 900, generation# 1 2024-11-21T17:45:23.344317Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:33:2065] 2024-11-21T17:45:23.344319Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 900, generation# 1 2024-11-21T17:45:23.344328Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:45:23.344364Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:39:2067] 2024-11-21T17:45:23.344367Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside 2024-11-21T17:45:23.344387Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:39:2067], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:45:23.344399Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:40:2067] 2024-11-21T17:45:23.344401Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside 2024-11-21T17:45:23.344404Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:40:2067], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:45:23.344414Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:41:2067] 2024-11-21T17:45:23.344416Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/db/dir_inside 2024-11-21T17:45:23.344419Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:41:2067], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:45:23.344426Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:3:2050] 2024-11-21T17:45:23.344432Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:39:2067] 2024-11-21T17:45:23.344436Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:6:2053] 2024-11-21T17:45:23.344439Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:40:2067] 2024-11-21T17:45:23.344442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:9:2056] 2024-11-21T17:45:23.344445Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2067] 2024-11-21T17:45:23.344451Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:36:2067] 2024-11-21T17:45:23.344468Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:37:2067] 2024-11-21T17:45:23.344477Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/db/dir_inside] Set up state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.344481Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:38:2067] 2024-11-21T17:45:23.344485Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2067][/root/db/dir_inside] Ignore empty state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2024-11-21T17:45:23.344517Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:32:2064], cookie# 0, event size# 118 2024-11-21T17:45:23.344521Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2024-11-21T17:45:23.345092Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2024-11-21T17:45:23.345124Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:3:2050] 2024-11-21T17:45:23.345131Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:39:2067] 2024-11-21T17:45:23.345137Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:36:2067] 2024-11-21T17:45:23.345144Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/db/dir_inside] Update to strong state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() < argsRight.GetSuperId() =========== !argsRight.IsDeletion 2024-11-21T17:45:23.345175Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:33:2065], cookie# 0, event size# 117 2024-11-21T17:45:23.345178Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2024-11-21T17:45:23.345182Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2024-11-21T17:45:23.345189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:6:2053] 2024-11-21T17:45:23.345193Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:40:2067] 2024-11-21T17:45:23.345199Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:37:2067] 2024-11-21T17:45:23.345204Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/db/dir_inside] Path was updated to new version: owner# [1:34:2066], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 900, LocalPathId: 11], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.756279Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:45:23.756358Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2024-11-21T17:45:23.756365Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2024-11-21T17:45:23.756370Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2024-11-21T17:45:23.756376Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:33:2064] 2024-11-21T17:45:23.756386Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:34:2064] 2024-11-21T17:45:23.756392Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:32:2064][path] Set up state: owner# [3:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:23.756397Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:35:2064] 2024-11-21T17:45:23.756401Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:32:2064][path] Ignore empty state: owner# [3:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |74.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/pytest >> test.py::test[aggr_factory-avg_if-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-avg_numeric_opt-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-avg_numeric_opt-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-avg_numeric_opt-default.txt-Results] >> test.py::test[column_order-insert--Results] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt-Results] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all--Results] [GOOD] >> test.py::test[window-win_func_percent_rank-default.txt-Debug] >> test.py::test[window-win_func_first_last_with_part--Analyze] [GOOD] >> test.py::test[window-win_func_first_last_with_part--Debug] >> ColumnShardTiers::TTLUsage >> test.py::test[bigdate-bitcast_datetime64-default.txt-Debug] >> test.py::test[column_order-values-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync1 [GOOD] Test command err: 2024-11-21T17:45:12.237284Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790078599885130:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:12.237330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0020f6/r3tmp/tmpe4PF8l/pdisk_1.dat 2024-11-21T17:45:12.914991Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:12.953883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:12.953909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:12.965351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21341, node 1 2024-11-21T17:45:13.228717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:13.228728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:13.228730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:13.228764Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:13.700314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:13.705145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:45:13.705680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:13.712624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:45:13.712679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:45:13.712683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:45:13.713550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:13.713554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:45:13.720432Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:13.721223Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:13.728948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211113773, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:45:13.728963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:45:13.729035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:45:13.736657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:13.736718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:13.736729Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:45:13.736743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:45:13.736752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:45:13.736767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:45:13.738151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:45:13.738161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:45:13.738165Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:45:13.738178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:45:17.234886Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790078599885130:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:17.234910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test.py::test[aggregate-agg_phases_table1-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Debug] >> test.py::test[expr-untag-default.txt-Results] [GOOD] >> test.py::test[file-second_pass_parse_file_fail--Debug] [SKIPPED] >> test.py::test[file-second_pass_parse_file_fail--Plan] [SKIPPED] >> test.py::test[file-second_pass_parse_file_fail--Results] >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> test.py::test[select-optional_in_job--Debug] [GOOD] >> test.py::test[select-optional_in_job--Plan] [GOOD] >> test.py::test[select-optional_in_job--Results] >> TContinuousBackupTests::TakeIncrementalBackup >> TContinuousBackupTests::Basic |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[join-mergejoin_left_null_column--Debug] [GOOD] >> test.py::test[join-mergejoin_left_null_column--Plan] |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> test.py::test[join-mergejoin_left_null_column--Plan] [GOOD] >> test.py::test[join-mergejoin_left_null_column--Results] >> ColumnShardTiers::TieringUsage >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10000Inflight1000BlobSize1000 [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-ForceBlocks] [GOOD] >> test.py::test[ansi_idents-string_escaping-default.txt-Debug] [GOOD] >> test.py::test[ansi_idents-string_escaping-default.txt-Plan] [GOOD] >> test.py::test[ansi_idents-string_escaping-default.txt-Results] >> test.py::test[pg-select_intersect-default.txt-Debug] [GOOD] >> TContinuousBackupTests::Basic [GOOD] >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Plan] >> test.py::test[pg-select_intersect-default.txt-Plan] [GOOD] >> test.py::test[pg-select_intersect-default.txt-Results] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly >> test.py::test[window-win_func_on_cloned_source-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Results] |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig >> test.py::test[produce-process_streaming-default.txt-Debug] [GOOD] >> test.py::test[produce-process_streaming-default.txt-Plan] [GOOD] >> test.py::test[produce-process_streaming-default.txt-Results] >> test.py::test[bigdate-bitcast_datetime64-default.txt-Debug] [GOOD] >> test.py::test[bigdate-bitcast_datetime64-default.txt-Plan] [GOOD] >> test.py::test[bigdate-bitcast_datetime64-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:45:25.536481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:25.536511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:25.536515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:25.536518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:25.536523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:25.536526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:25.536532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:25.536608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:25.547042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:25.547063Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:25.556377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:25.556967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:25.556992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:25.558505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:25.558681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:25.558752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:25.558797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:25.560128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:25.560352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:25.560360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:25.560391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:25.560398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:25.560404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:25.560416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.561823Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:45:25.647731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:25.648053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.648107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:25.648145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:25.648152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.652823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:25.652859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:25.652914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.652928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:25.652933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:25.652938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:25.660795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.660826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:25.660832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:25.664932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.664953Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.664960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:25.664966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:25.666835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:25.668580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:25.668652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:25.668916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:25.668945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:25.668956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:25.669009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:25.669017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:25.669043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:25.669055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:25.672814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:25.672824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:25.672862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:25.672867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:45:25.673389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.673397Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:25.673409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:25.673412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:25.673417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:25.673421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:25.673426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:25.673430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:25.673443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:25.673448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:25.673451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:45:25.674869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:25.674880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:25.674884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:45:25.674888Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:45:25.674892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:25.674905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 594046678944, LocalPathId: 2], version: 6 2024-11-21T17:45:26.031125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:45:26.031134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2024-11-21T17:45:26.031172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:45:26.031178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:45:26.031181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:45:26.031184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2024-11-21T17:45:26.031589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:45:26.031907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:45:26.031922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:45:26.031933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:45:26.031963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:45:26.031967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:45:26.043332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 197 } } 2024-11-21T17:45:26.043346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2024-11-21T17:45:26.043365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 197 } } 2024-11-21T17:45:26.043379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 197 } } FAKE_COORDINATOR: Erasing txId 104 2024-11-21T17:45:26.043592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T17:45:26.043598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2024-11-21T17:45:26.043608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T17:45:26.043613Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:45:26.043619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T17:45:26.043628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:26.043632Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:45:26.043636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:45:26.043642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2024-11-21T17:45:26.044673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:45:26.044735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:45:26.044782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:45:26.044787Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T17:45:26.044797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2024-11-21T17:45:26.044800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T17:45:26.044805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2024-11-21T17:45:26.044815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 104 2024-11-21T17:45:26.044820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T17:45:26.044826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:45:26.044832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:45:26.044854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:45:26.044858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2024-11-21T17:45:26.044862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2024-11-21T17:45:26.044866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:45:26.044870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2024-11-21T17:45:26.044872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2024-11-21T17:45:26.044880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:45:26.044925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:45:26.044929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:45:26.044938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:45:26.044943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:45:26.044948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:45:26.046927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:45:26.046936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:728:2631] 2024-11-21T17:45:26.047002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2024-11-21T17:45:26.047079Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:45:26.047114Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl" took 42us result status StatusPathDoesNotExist 2024-11-21T17:45:26.047155Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:45:26.047209Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:45:26.047221Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 14us result status StatusPathDoesNotExist 2024-11-21T17:45:26.047236Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[select-opt_list_access-default.txt-Results] [GOOD] >> TPQCDTest::TestDiscoverClusters >> test.py::test[select-refselect--Debug] [SKIPPED] >> test.py::test[select-refselect--Plan] [SKIPPED] >> test.py::test[select-refselect--Results] [SKIPPED] >> test.py::test[select-result_size_limit--Debug] [SKIPPED] >> test.py::test[udf-udf--Debug] [GOOD] >> test.py::test[udf-udf--Plan] [GOOD] >> test.py::test[udf-udf--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/unittest >> CostMetricsGetMirror3dc::TestGetMirror3dcRequests10000Inflight1000BlobSize1000 [GOOD] Test command err: RandomSeed# 9837510320353275786 2024-11-21T17:44:30.667300Z 1 00h00m00.010512s :BS_LOCALRECOVERY CRIT: VDISK[82000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# false EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {[SyncerState 12][HugeBlobEntryPoint 1]} ReadLogReplies# {}} reason# Entry point for Syncer check failed, ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 19 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } CanLoadFrom { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 6 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 4 } StoresReadableBy { Application: "ydb" LowerLimit { Year: 0 Major: 0 Minor: 0 Hotfix: 0 } UpperLimit { Year: 1000 Major: 1000 Minor: 1000 Hotfix: 1000 } ComponentId: 6 } } status# ERROR;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T17:44:30.669791Z 1 00h00m30.000512s :BS_PROXY_PUT ERROR: [fe7bfe57e6413de9] Result# TEvPutResult {Id# [1:1:1:1:3:4:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> test.py::test[select-result_size_limit--Plan] [SKIPPED] >> test.py::test[select-result_size_limit--Results] [SKIPPED] >> test.py::test[column_order-values-default.txt-Debug] [GOOD] >> test.py::test[column_order-values-default.txt-Plan] [GOOD] >> test.py::test[column_order-values-default.txt-Results] >> test.py::test[select-scalar_subquery_with_star-default.txt-Debug] >> TPQCDTest::TestPrioritizeLocalDatacenter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:45:25.258853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:25.258880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:25.258886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:25.258891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:25.258897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:25.258901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:25.258910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:25.258988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:25.270354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:25.270377Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:25.273061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:25.273860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:25.273893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:25.275154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:25.275364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:25.275445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:25.275500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:25.276325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:25.276594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:25.276604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:25.276643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:25.276651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:25.276657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:25.276669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.277947Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:45:25.294361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:25.294453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.294514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:25.294556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:25.294564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.295250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:25.295275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:25.295317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.295328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:25.295333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:25.295338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:25.295849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.295859Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:25.295863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:25.296250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.296260Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.296265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:25.296271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:25.296853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:25.297222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:25.297268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:25.297441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:25.297463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:25.297471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:25.297519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:25.297525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:25.297552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:25.297565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:25.297950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:25.297957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:25.297995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:25.298004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:45:25.298081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:25.298088Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:25.298099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:25.298103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:25.298108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:25.298113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:25.298118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:25.298122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:25.298132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:25.298137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:25.298141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:45:25.298418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:25.298429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:25.298434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:45:25.298439Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:45:25.298443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:25.298455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:25.926153Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:1, at schemeshard: 72057594046678944 2024-11-21T17:45:25.926158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:45:25.926164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:1 129 -> 240 2024-11-21T17:45:25.926291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2024-11-21T17:45:25.928364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2024-11-21T17:45:25.928441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:1, at schemeshard: 72057594046678944 2024-11-21T17:45:25.928449Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:1 ProgressState 2024-11-21T17:45:25.928461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2024-11-21T17:45:25.928466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2024-11-21T17:45:25.928472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/4, is published: true 2024-11-21T17:45:25.928483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 103 2024-11-21T17:45:25.928489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2024-11-21T17:45:25.928496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:45:25.928501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:45:25.928511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:45:25.928516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2024-11-21T17:45:25.928519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2024-11-21T17:45:25.928531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:45:25.928535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2024-11-21T17:45:25.928539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2024-11-21T17:45:25.928546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:45:25.928550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2024-11-21T17:45:25.928557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2024-11-21T17:45:25.928566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:45:25.929125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:45:25.929134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:746:2625] TestWaitNotification: OK eventTxId 103 2024-11-21T17:45:25.929234Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:45:25.929282Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 54us result status StatusSuccess 2024-11-21T17:45:25.930712Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:25.930805Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:45:25.930833Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 32us result status StatusSuccess 2024-11-21T17:45:25.930928Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "continuousBackupImpl" TopicPath: "/MyRoot/Table/continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS OffloadConfig { IncrementalBackup { DstPath: "/MyRoot/IncrBackupImpl" DstPathId { OwnerId: 72057594046678944 LocalId: 5 } } } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:25.931067Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:45:25.931085Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 19us result status StatusSuccess 2024-11-21T17:45:25.931149Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TDSProxyGetTest::TestMirror32GetIntervalsWipedAllOk [GOOD] >> TDSProxyPatchTest::NaiveOk_ErasureMirror3dc >> test.py::test[window-win_func_percent_rank-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_percent_rank-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_percent_rank-default.txt-Results] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestCreateTenantWrongName >> test.py::test[agg_apply-avg_numeric_opt-default.txt-Results] [GOOD] >> test.py::test[agg_apply-opt_len_count-default.txt-Debug] >> TDSProxyPatchTest::NaiveOk_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusOkWith_0_0_VdiskErrors >> test.py::test[file-second_pass_parse_file_fail--Results] [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusOkWith_0_0_VdiskErrors [GOOD] >> YdbSdkSessionsPool::StressTestSync10 [GOOD] >> test.py::test[flatten_by-flatten_columns-default.txt-Debug] >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3 [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Results] >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> test.py::test[window-win_func_first_last_with_part--Debug] [GOOD] >> test.py::test[window-win_func_first_last_with_part--ForceBlocks] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Results] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency >> TPartBtreeIndexIteration::NoNodes_Groups >> test.py::test[ansi_idents-string_escaping-default.txt-Results] [GOOD] >> test.py::test[bigdate-output_timestamp64-default.txt-Debug] >> test.py::test[bigdate-bitcast_datetime64-default.txt-Results] [GOOD] >> test.py::test[bigdate-common_type-default.txt-Debug] >> test.py::test[pg-tpch-q18-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3 [GOOD] Test command err: 2024-11-21T17:45:27.842932Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] bootstrap ActorId# [3:73:2119] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T17:45:27.843212Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843217Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843220Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843223Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843226Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843228Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843231Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843234Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843237Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843239Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843242Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843245Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843248Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843250Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843253Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843256Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843259Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843261Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:45:27.843266Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:45:27.843276Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2024-11-21T17:45:27.843281Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2024-11-21T17:45:27.843285Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2024-11-21T17:45:27.843288Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2024-11-21T17:45:27.843292Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2024-11-21T17:45:27.843294Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2024-11-21T17:45:27.843299Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2024-11-21T17:45:27.843302Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2024-11-21T17:45:27.843305Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2024-11-21T17:45:27.843308Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2024-11-21T17:45:27.843313Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2024-11-21T17:45:27.843316Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2024-11-21T17:45:27.888111Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T17:45:27.888182Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2024-11-21T17:45:27.888194Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2024-11-21T17:45:27.888204Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2024-11-21T17:45:27.888218Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2024-11-21T17:45:27.900254Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2024-11-21T17:45:27.900313Z node 3 :BS_PROXY_PUT DEBUG: [69a94228033ea6a6] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2024-11-21T17:45:27.900321Z node 3 :BS_PROXY_PUT INFO: [69a94228033ea6a6] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync10 [GOOD] Test command err: 2024-11-21T17:45:15.030819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790089862843611:2209];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:15.030841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0020fb/r3tmp/tmpeEMSwn/pdisk_1.dat 2024-11-21T17:45:15.909122Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:16.131176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:16.131197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:16.144553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23916, node 1 2024-11-21T17:45:16.600796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:16.600807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:16.600809Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:16.600838Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:17.145819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:17.160190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:45:17.160222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:17.180547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:45:17.180647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:45:17.180653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:45:17.184738Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:17.184750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:45:17.185767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:17.186852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211117231, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:45:17.186857Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:45:17.186911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:45:17.187927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:17.187964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:17.187971Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:45:17.187981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:45:17.187989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:45:17.187998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T17:45:17.189505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:45:17.189784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:45:17.189787Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:45:17.189798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 waiting... 2024-11-21T17:45:17.192564Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:20.032519Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790089862843611:2209];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:20.032551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test.py::test[column_order-values-default.txt-Results] [GOOD] >> test.py::test[compute_range-huge_in-default.txt-Debug] [SKIPPED] >> test.py::test[compute_range-huge_in-default.txt-Plan] [SKIPPED] >> test.py::test[compute_range-huge_in-default.txt-Results] [SKIPPED] >> DBase::Select [GOOD] >> DBase::Subsets [GOOD] >> DBase::Garbage [GOOD] >> DBase::WideKey >> KikimrIcGateway::TestListPath |74.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestMergeConfig >> test.py::test[udf-udf--Results] [GOOD] >> test.py::test[union-union_mix-default.txt-Debug] >> YdbSdkSessionsPool::CustomPlan [GOOD] >> YdbSdkSessionsPool::FailTest >> test.py::test[pg-select_intersect-default.txt-Results] [GOOD] |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> test.py::test[pg-select_join_full3-default.txt-Debug] >> DBase::WideKey [GOOD] >> DBase::Outer [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries >> test.py::test[agg_apply-opt_len_count-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-opt_len_count-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-opt_len_count-default.txt-Results] >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> DBase::VersionCompactedParts [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables >> test.py::test[window-lagging/aggregations--Debug] [GOOD] >> test.py::test[window-lagging/aggregations--Plan] [GOOD] >> test.py::test[window-lagging/aggregations--Results] >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain >> test.py::test[window-win_func_percent_rank-default.txt-Results] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-Debug] >> KikimrIcGateway::TestLoadExternalTable >> test.py::test[produce-process_streaming-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_list-default.txt-Debug] [SKIPPED] >> test.py::test[produce-reduce_all_list-default.txt-Plan] [SKIPPED] >> test.py::test[produce-reduce_all_list-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_lambda-default.txt-Debug] >> test.py::test[join-mergejoin_left_null_column--Results] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner--Debug] >> test.py::test[aggregate-group_by_cube_join_count--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_dict--Debug] >> KikimrIcGateway::TestLoadTableMetadata >> KikimrIcGateway::TestCreateExternalTable >> test.py::test[select-optional_in_job--Results] [GOOD] >> test.py::test[select-optional_pull--Debug] >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] >> test.py::test[window-win_func_on_cloned_source-default.txt-Results] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Analyze] >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> test.py::test[aggregate-aggregate_distinct_struct_access-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_inmem_list_in_key-default.txt-Debug] >> test.py::test[bigdate-output_timestamp64-default.txt-Debug] [GOOD] >> test.py::test[bigdate-output_timestamp64-default.txt-Plan] [GOOD] >> test.py::test[bigdate-output_timestamp64-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] Test command err: 2024-11-21T17:45:09.561561Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790064397221444:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:09.745191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002116/r3tmp/tmpDqAu6o/pdisk_1.dat 2024-11-21T17:45:09.850256Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:09.863202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:09.863226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:09.881306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15693, node 1 2024-11-21T17:45:09.976529Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:09.976540Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:09.976541Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:09.976580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:10.164356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:10.169642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:45:10.169909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:10.172694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:45:10.172742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:45:10.172746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:45:10.174232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:10.176730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:10.176739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:45:10.177062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:10.177699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211110224, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:45:10.177708Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:45:10.177753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:45:10.178498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:10.178530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:10.178538Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:45:10.178545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:45:10.178552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:45:10.178561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:45:10.179169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:45:10.179178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:45:10.179182Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:45:10.179190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:45:14.564382Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790064397221444:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:14.564420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:45:24.841654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:45:24.841674Z node 1 :IMPORT WARN: Table profiles were not loaded >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestRemoveTenant >> test.py::test[select-scalar_subquery_with_star-default.txt-Debug] [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2024-11-21T17:45:27.822516Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790141013441345:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:27.822573Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ca8/r3tmp/tmp5QujuB/pdisk_1.dat 2024-11-21T17:45:28.118058Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:28.161498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:28.161527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:28.162786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12558, node 1 2024-11-21T17:45:28.198618Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001ca8/r3tmp/yandexg0Vyeo.tmp 2024-11-21T17:45:28.198631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001ca8/r3tmp/yandexg0Vyeo.tmp 2024-11-21T17:45:28.198685Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001ca8/r3tmp/yandexg0Vyeo.tmp 2024-11-21T17:45:28.198700Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10281 PQClient connected to localhost:12558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:28.400686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:28.413248Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:28.543161Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:28.752536Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:31.009700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790153898343781:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:31.010797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:31.010817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790158193311083:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:31.031636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T17:45:31.494317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790158193311123:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:31.494968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:31.503276Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T17:45:31.507527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:31.508509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790158193311094:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T17:45:31.596628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:31.618421Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790158193311274:2325], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:31.618999Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODQ0YzEyNWEtMTU0ODg4NjItZGY2NGY2YjUtZWM2Yzk0Mzc=, ActorId: [1:7439790153898343763:2301], ActorState: ExecuteState, TraceId: 01jd7x8gg6f5m9xr7j7bxqpgfs, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:31.619471Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:31.726053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:45:32.057215Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd7x8hfm8h7m2q20ja4g8skt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTcxZGNhNjctYmUwZDg0ODgtNjdjOWE0YTItNzYwNjFjNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:32.128464Z node 1 :HTTP WARN: [::1]:40418 anonymous GET /actors/pqcd/health 2024-11-21T17:45:32.256379Z node 1 :HTTP WARN: [::1]:40424 anonymous GET /actors/pqcd/health 2024-11-21T17:45:32.380406Z node 1 :HTTP WARN: [::1]:40440 anonymous GET /actors/pqcd/health 2024-11-21T17:45:32.496558Z node 1 :HTTP WARN: [::1]:40456 anonymous GET /actors/pqcd/health 2024-11-21T17:45:32.610559Z node 1 :HTTP WARN: [::1]:40462 anonymous GET /actors/pqcd/health 2024-11-21T17:45:32.716404Z node 1 :HTTP WARN: [::1]:40476 anonymous GET /actors/pqcd/health 2024-11-21T17:45:32.824904Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790141013441345:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:32.824933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:45:32.848345Z node 1 :HTTP WARN: [::1]:40486 anonymous GET /actors/pqcd/health >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable >> TPQCDTest::TestDiscoverClusters [GOOD] >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> test.py::test[agg_apply-opt_len_count-default.txt-Results] [GOOD] |74.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2024-11-21T17:45:26.878323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790136766769852:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:26.878469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001cc9/r3tmp/tmpbKKd6E/pdisk_1.dat 2024-11-21T17:45:27.156839Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:27.190630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:27.190656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:27.197045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25628, node 1 2024-11-21T17:45:27.364747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001cc9/r3tmp/yandexOL2hKG.tmp 2024-11-21T17:45:27.364760Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001cc9/r3tmp/yandexOL2hKG.tmp 2024-11-21T17:45:27.364813Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001cc9/r3tmp/yandexOL2hKG.tmp 2024-11-21T17:45:27.364852Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23341 PQClient connected to localhost:25628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:27.795329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T17:45:29.308108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790149651672426:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:29.308146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:29.312275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790149651672447:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:29.314659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T17:45:29.415562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790149651672479:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:29.415951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:29.416596Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T17:45:29.416684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790149651672449:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T17:45:29.446222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:29.561049Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790149651672582:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:29.561938Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTEwZjk5MGItN2FhMDE0ZC02OTZkMDhiLWE3YmMzZmM5, ActorId: [1:7439790149651672423:2300], ActorState: ExecuteState, TraceId: 01jd7x8eym0dkgsgs1refd26xe, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:29.603229Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:29.609076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:29.653677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:45:29.769496Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd7x8fbjcffdrys26vz2awt3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdiOTMwNmUtNWY0YTYwYWMtODQyNDIyOWUtNzE0N2U4NTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:29.790774Z node 1 :HTTP WARN: [::1]:57894 anonymous GET /actors/pqcd/health 2024-11-21T17:45:29.898372Z node 1 :HTTP WARN: [::1]:57900 anonymous GET /actors/pqcd/health 2024-11-21T17:45:30.010401Z node 1 :HTTP WARN: [::1]:57910 anonymous GET /actors/pqcd/health 2024-11-21T17:45:30.130163Z node 1 :HTTP WARN: [::1]:57916 anonymous GET /actors/pqcd/health 2024-11-21T17:45:30.237341Z node 1 :HTTP WARN: [::1]:57928 anonymous GET /actors/pqcd/health 2024-11-21T17:45:30.344305Z node 1 :HTTP WARN: [::1]:57932 anonymous GET /actors/pqcd/health 2024-11-21T17:45:30.460485Z node 1 :HTTP WARN: [::1]:57936 anonymous GET /actors/pqcd/health 2024-11-21T17:45:30.570048Z node 1 :HTTP WARN: [::1]:57948 anonymous GET /actors/pqcd/health 2024-11-21T17:45:30.678278Z node 1 :HTTP WARN: [::1]:57964 anonymous GET /actors/pqcd/health 2024-11-21T17:45:30.784419Z node 1 :HTTP WARN: [::1]:57980 anonymous GET /actors/pqcd/health 2024-11-21T17:45:30.886818Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jd7x8ges55v420knf10dpcyv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2RiMDNhN2ItNWIzNDA1OGItODNmZTJkZjUtYjVmNzMwZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:30.899296Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jd7x8ges55v420knf10dpcyv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2RiMDNhN2ItNWIzNDA1OGItODNmZTJkZjUtYjVmNzMwZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:31.874362Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jd7x8he910rb302x5z1ahz9q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQ1ZWI1N2QtOTg4ZDI0YjUtOTc0NTg4ZDctYTMyNWM2MDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:31.876528Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jd7x8he910rb302x5z1ahz9q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQ1ZWI1N2QtOTg4ZDI0YjUtOTc0NTg4ZDctYTMyNWM2MDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:31.880355Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790136766769852:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:31.880421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:45:32.954395Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jd7x8jfvcqhzz1956jdp9qc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjljOTVkNzMtZDU0NWM3ZGItMWJhZTM2NDAtNjYzMjIyMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:32.958393Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jd7x8jfvcqhzz1956jdp9qc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjljOTVkNzMtZDU0NWM3ZGItMWJhZTM2NDAtNjYzMjIyMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:33.953010Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jd7x8kexe9tdjxarv7k6pzms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGEyMGY2M2EtZmIwOTI0YjUtNzM4MDQzM2UtNWJhNjI2YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:33.954692Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jd7x8kexe9tdjxarv7k6pzms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGEyMGY2M2EtZmIwOTI0YjUtNzM4MDQzM2UtNWJhNjI2YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:33.970773Z node 1 :HTTP WARN: [::1]:45380 anonymous GET /actors/pqcd >> test.py::test[agg_apply-pg_int8-default.txt-Debug] >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata >> YdbSdkSessionsPool::FailTest [GOOD] >> test.py::test[flatten_by-flatten_columns-default.txt-Debug] [GOOD] >> test.py::test[flatten_by-flatten_columns-default.txt-Plan] [GOOD] >> test.py::test[flatten_by-flatten_columns-default.txt-Results] >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata >> KikimrIcGateway::TestDropTable [GOOD] >> test.py::test[bigdate-common_type-default.txt-Debug] [GOOD] >> KikimrIcGateway::TestDropResourcePool >> test.py::test[bigdate-common_type-default.txt-Plan] [GOOD] >> test.py::test[bigdate-common_type-default.txt-Results] >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck [GOOD] >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> test.py::test[pg-select_join_full3-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_full3-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_full3-default.txt-Results] >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestALterResourcePool >> test.py::test[aggregate-group_by_with_where-default.txt-Results] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-Debug] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool |74.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[compute_range-huge_in-default.txt-Results] [SKIPPED] >> test.py::test[bigdate-output_timestamp64-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Debug] >> test.py::test[aggregate-aggregate_inmem_list_in_key-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_inmem_list_in_key-default.txt-Plan] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::FailTest [GOOD] Test command err: 2024-11-21T17:45:18.283175Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790104708991092:2122];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:18.283258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00209d/r3tmp/tmpVfUFr1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12953, node 1 TClient is connected to server localhost:22699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:18.957409Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:18.957422Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:18.957425Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:18.957463Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:18.957623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:45:18.957625Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:18.958347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:18.959109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:45:18.959118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:18.963197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:45:18.963262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:45:18.963266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:45:18.968493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:18.968748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:18.968754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:45:18.969376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:18.971951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211119016, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:45:18.971959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:45:18.972007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:45:18.972504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:18.972540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:18.972547Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:45:18.972554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:45:18.972560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:45:18.972570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:45:18.976535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:45:18.976547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:45:18.976551Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:45:18.976799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:45:19.104604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:19.104628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:19.112688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:23.283561Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790104708991092:2122];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:23.283601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:45:32.892741Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790163375112415:2185];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:32.892772Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00209d/r3tmp/tmp4BrLXS/pdisk_1.dat 2024-11-21T17:45:33.312411Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:33.333034Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:33.333054Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:33.338391Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30007, node 4 2024-11-21T17:45:33.489939Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:33.489951Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:33.489953Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:33.489992Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:33.682138Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:33.682449Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:45:33.682456Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:33.684674Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:45:33.684722Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:45:33.684726Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:45:33.689133Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:33.692706Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:33.692715Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:45:33.700667Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:33.712530Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211133751, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:45:33.712541Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:45:33.712600Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:45:33.720576Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:33.720622Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:33.720632Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:45:33.720643Z node ... meBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:34.256034Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:34.256045Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T17:45:34.256090Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:34.256098Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:34.256106Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:34.256715Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:45:34.256725Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:45:34.256729Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:45:34.256763Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:45:34.256766Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:45:34.256767Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:45:34.256777Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:45:34.256780Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:45:34.256781Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T17:45:34.256791Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:45:34.256793Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:45:34.256794Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T17:45:34.256804Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:45:34.256806Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:45:34.256807Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T17:45:34.257233Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:45:34.258420Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211134304, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:45:34.258433Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211134304, at schemeshard: 72057594046644480 2024-11-21T17:45:34.258457Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:45:34.258474Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211134304, at schemeshard: 72057594046644480 2024-11-21T17:45:34.258481Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T17:45:34.258487Z node 4 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211134304, at schemeshard: 72057594046644480 2024-11-21T17:45:34.258495Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T17:45:34.258502Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732211134304 2024-11-21T17:45:34.258508Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T17:45:34.258960Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:34.259049Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:34.259061Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T17:45:34.259071Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T17:45:34.259098Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T17:45:34.259102Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T17:45:34.259110Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:45:34.259116Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T17:45:34.259127Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T17:45:34.259131Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T17:45:34.259136Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:45:34.259143Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T17:45:34.259145Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T17:45:34.259147Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T17:45:34.259151Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T17:45:34.259684Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:45:34.259691Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:45:34.259694Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:45:34.259723Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:45:34.259725Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:45:34.259727Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T17:45:34.259737Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:45:34.259739Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:45:34.259740Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T17:45:34.259750Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:45:34.259763Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:45:34.259764Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:45:34.259774Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:45:34.259776Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:45:34.259777Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T17:45:34.259782Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:45:34.260602Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439790171965047629:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:45:34.326336Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:45:34.326388Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:45:34.332592Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> test.py::test[pg-tpch-q18-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Results] >> test.py::test[union-union_mix-default.txt-Debug] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Analyze] [GOOD] >> test.py::test[union-union_mix-default.txt-Plan] [GOOD] >> test.py::test[union-union_mix-default.txt-Results] >> test.py::test[window-win_fuse_window-default.txt-Debug] >> test.py::test[aggregate-aggregate_inmem_list_in_key-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggregate_inmem_list_in_key-default.txt-Results] >> KikimrIcGateway::TestDropResourcePool [GOOD] >> KikimrIcGateway::TestALterResourcePool [GOOD] >> test.py::test[window-win_func_first_last_with_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last_with_part--Plan] [GOOD] >> test.py::test[window-win_func_first_last_with_part--Results] |74.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] |74.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> NFwd_TFlatIndexCache::End [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > (34) | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > (36) | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > (38) | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > (34) | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > (36) | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > (38) | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | ... 0} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > (34) | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > (36) | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > (38) | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 1909, MsgBus: 10614 2024-11-21T17:45:30.646848Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790156626061364:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:31.114075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013c3/r3tmp/tmpTcZMYo/pdisk_1.dat 2024-11-21T17:45:31.478960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:31.478987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:31.482706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:31.487867Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1909, node 1 2024-11-21T17:45:31.728546Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:31.728558Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:31.728560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:31.728595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10614 TClient is connected to server localhost:10614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:32.326107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:32.340723Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:33.368771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790169510963785:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:33.368800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:33.523811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:45:33.609933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:33.627915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:33.639327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:33.711222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790169510964092:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:33.711243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:33.711309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790169510964097:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:33.712478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2024-11-21T17:45:33.719022Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710664, at schemeshard: 72057594046644480 2024-11-21T17:45:33.719055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790169510964099:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } Trying to start YDB, gRPC: 4400, MsgBus: 25816 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013c3/r3tmp/tmpMy2UGK/pdisk_1.dat 2024-11-21T17:45:34.212332Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:34.224622Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4400, node 2 2024-11-21T17:45:34.235056Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:34.235069Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:34.235071Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:34.235109Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25816 TClient is connected to server localhost:25816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:34.311036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:34.311065Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:34.312085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:34.313771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:34.313847Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:34.325738Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:45:34.573909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790170916784378:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.573932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.579153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.638640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.647888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.655255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.662732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790170916784683:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.662760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790170916784688:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.662772Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.663351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2024-11-21T17:45:34.668348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790170916784690:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking }
: Info: Success, code: 4 2024-11-21T17:45:34.767941Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found Trying to start YDB, gRPC: 29561, MsgBus: 13637 2024-11-21T17:45:34.919479Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790172618343789:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:34.933182Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013c3/r3tmp/tmpUWOQkM/pdisk_1.dat 2024-11-21T17:45:34.945559Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29561, node 3 2024-11-21T17:45:34.959410Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:34.959424Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:34.959426Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:34.959474Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13637 TClient is connected to server localhost:13637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:35.019261Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:35.019294Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:35.020389Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:35.022567Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:35.024579Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:35.029519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 >> test.py::test[bigdate-common_type-default.txt-Results] [GOOD] >> test.py::test[bigdate-input_timestamp64-default.txt-Debug] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TConsoleTests::TestAlterUnknownTenant ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestALterResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 13350, MsgBus: 62194 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00137a/r3tmp/tmpmVat70/pdisk_1.dat 2024-11-21T17:45:33.629442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790168380735051:2261];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:33.636589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:33.716027Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13350, node 1 2024-11-21T17:45:33.844535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:33.844565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:33.848759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:33.849730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:33.849738Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:33.849739Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:33.849778Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62194 TClient is connected to server localhost:62194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:34.163475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:34.166154Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:34.177682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T17:45:34.180814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6345, MsgBus: 19706 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00137a/r3tmp/tmprBpyrY/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6345, node 2 2024-11-21T17:45:34.604986Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:34.607181Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:34.607189Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:34.607191Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:34.607232Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19706 TClient is connected to server localhost:19706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:34.679904Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:34.679938Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:34.681016Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:45:34.682229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.684741Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:34.696276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11270, MsgBus: 62471 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00137a/r3tmp/tmpJGrJQv/pdisk_1.dat 2024-11-21T17:45:35.024056Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790175813934998:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:35.024073Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:35.038161Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11270, node 3 2024-11-21T17:45:35.051506Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:35.051523Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:35.051526Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:35.051571Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62471 TClient is connected to server localhost:62471 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:35.124372Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:35.124400Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:35.125415Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:35.128085Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:35.132613Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:35.141312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:45:35.144092Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715659:0, at schemeshard: 72057594046644480 |74.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> test.py::test[agg_apply-pg_int8-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-pg_int8-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-pg_int8-default.txt-Results] |74.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> TCertificateCheckerTest::CheckSubjectDns >> test.py::test[window-lagging/aggregations--Results] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Debug] >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |74.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |74.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |74.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates >> test.py::test[select-optional_pull--Debug] [GOOD] >> test.py::test[select-optional_pull--Plan] [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> TPartBtreeIndexIteration::NoNodes_History |74.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |74.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest |74.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |74.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> test.py::test[select-optional_pull--Results] >> test.py::test[pg-select_join_full3-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_full_equi-default.txt-Debug] |74.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> test.py::test[union-union_mix-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_null-default.txt-Debug] >> test.py::test[flatten_by-flatten_columns-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_columns_non_struct-default.txt-Debug] >> KikimrIcGateway::TestCreateSameExternalTable >> test.py::test[window-win_func_with_struct_access-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-Results] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> TYardTest::TestLogOverwriteRestarts [GOOD] >> TYardTest::TestMultiYardHarakiri >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation >> test.py::test[aggregate-aggregate_inmem_list_in_key-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Debug] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata |74.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> test.py::test[produce-reduce_lambda-default.txt-Debug] [GOOD] >> test.py::test[produce-reduce_lambda-default.txt-Plan] >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalTable |74.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] |74.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] >> TConsoleTests::TestTenantGeneration [GOOD] >> test.py::test[produce-reduce_lambda-default.txt-Plan] [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> TPQCDTest::TestUnavailableWithoutNetClassifier |74.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |74.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[select-scalar_subquery_with_star-default.txt-Debug] [GOOD] |74.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity >> test.py::test[agg_apply-pg_int8-default.txt-Results] [GOOD] >> test.py::test[agg_apply-sum_type_empty-default.txt-Debug] >> TPQCDTest::TestRelatedServicesAreRunning >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain >> TPQCDTest::TestUnavailableWithoutBoth >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalDataSource >> test.py::test[aggregate-group_by_expr_dict--Debug] [GOOD] >> test.py::test[aggregate-group_by_expr_dict--Plan] [GOOD] >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed >> test.py::test[aggregate-group_by_expr_dict--Results] >> TCertificateCheckerTest::CheckSubjectDns [GOOD] |74.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] |74.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> test.py::test[select-optional_pull--Results] [GOOD] >> test.py::test[select-reuse_named_node-default.txt-Debug] >> test.py::test[pg-select_join_full_equi-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_full_equi-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_full_equi-default.txt-Results] >> test.py::test[flatten_by-flatten_columns_non_struct-default.txt-Debug] [GOOD] >> test.py::test[flatten_by-flatten_columns_non_struct-default.txt-Plan] >> test.py::test[window-win_func_first_last_with_part--Results] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Analyze] >> TPQCDTest::TestUnavailableWithoutClustersList >> TYardTest::TestMultiYardHarakiri [GOOD] >> TYardTest::TestLogOwerwrite >> test.py::test[union_all-union_all_null-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_null-default.txt-Plan] >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> test.py::test[flatten_by-flatten_columns_non_struct-default.txt-Plan] [GOOD] >> test.py::test[flatten_by-flatten_columns_non_struct-default.txt-Results] >> test.py::test[union_all-union_all_null-default.txt-Plan] [GOOD] >> test.py::test[union_all-union_all_null-default.txt-Results] |74.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest |74.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties >> test.py::test[window-row_number_no_part_from_subq-default.txt-Debug] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Plan] >> TYardTest::TestLogOwerwrite [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Plan] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] >> test.py::test[agg_apply-sum_type_empty-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-sum_type_empty-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-sum_type_empty-default.txt-Results] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Debug] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Plan] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] >> test.py::test[join-mergejoin_semi_composite_to_inner--Debug] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner--Plan] >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] Test command err: Trying to start YDB, gRPC: 2002, MsgBus: 7103 2024-11-21T17:45:33.822046Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790169314230216:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:33.822082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013b8/r3tmp/tmpalizxt/pdisk_1.dat 2024-11-21T17:45:34.116312Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2002, node 1 2024-11-21T17:45:34.140417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:34.140430Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:34.140431Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:34.140470Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7103 TClient is connected to server localhost:7103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:34.216152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:34.220918Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:34.285173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:34.285206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:34.286367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:34.435589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790173609198151:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.435657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.439808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.462293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.528092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.544093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.556553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790173609198458:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.556582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.556668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790173609198463:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.557449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2024-11-21T17:45:34.563845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790173609198465:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } Trying to start YDB, gRPC: 10209, MsgBus: 19303 2024-11-21T17:45:34.920925Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790174564167894:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013b8/r3tmp/tmpEosfro/pdisk_1.dat 2024-11-21T17:45:34.923366Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:34.932400Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10209, node 2 2024-11-21T17:45:34.948081Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:34.948092Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:34.948094Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:34.948132Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19303 TClient is connected to server localhost:19303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:45:35.022034Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:35.022422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.022778Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:35.023949Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:35.024553Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:35.029224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.042152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:35.060895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:35.071100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.276160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790178859136600:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:35.276221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:35.278858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.285403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.292274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.299140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.306126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.313479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.321976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790178859137093:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:35.321999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:35.322050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790178859137098:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:35.322660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:35.326345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790178859137100:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:35.926472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T17:45:36.001856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.084593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:45:36.158947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.235964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.290902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.346825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:45:36.404701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.645981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715704:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16734, MsgBus: 25616 2024-11-21T17:45:37.087060Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790184337164956:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:37.087471Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013b8/r3tmp/tmpg8k9zM/pdisk_1.dat 2024-11-21T17:45:37.108943Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16734, node 3 2024-11-21T17:45:37.117341Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:37.117351Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:37.117353Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:37.117399Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25616 TClient is connected to server localhost:25616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:37.184069Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:37.184096Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:37.184451Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.185573Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:37.227876Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.283135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.299494Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:37.311869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.424413Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790184337166496:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.424459Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.426492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.431978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.441929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.448731Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.465106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.477779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.492268Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790184337167000:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.492293Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.492300Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790184337167005:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.492832Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:37.496301Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790184337167007:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> test.py::test[join-mergejoin_semi_composite_to_inner--Plan] [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropExternalDataSource [GOOD] Test command err: Trying to start YDB, gRPC: 4445, MsgBus: 29443 2024-11-21T17:45:36.668675Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790180559615377:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001364/r3tmp/tmpIhfO7T/pdisk_1.dat 2024-11-21T17:45:36.698266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:36.715965Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4445, node 1 2024-11-21T17:45:36.732652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:36.732666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:36.732669Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:36.732702Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29443 2024-11-21T17:45:36.800210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:36.800264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:29443 2024-11-21T17:45:36.804927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:36.830051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:36.836455Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:36.901310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T17:45:36.908799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.918560Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:131
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:131 Trying to start YDB, gRPC: 3169, MsgBus: 29353 2024-11-21T17:45:37.219980Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001364/r3tmp/tmpCQ8WqC/pdisk_1.dat 2024-11-21T17:45:37.226527Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3169, node 2 2024-11-21T17:45:37.243650Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:37.243665Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:37.243666Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:37.243716Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29353 TClient is connected to server localhost:29353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:37.316616Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:37.316643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:37.317722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:37.319017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.342719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T17:45:37.345020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17571, MsgBus: 26984 2024-11-21T17:45:37.661036Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790187353055642:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:37.661059Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001364/r3tmp/tmpkQgiGq/pdisk_1.dat 2024-11-21T17:45:37.674071Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17571, node 3 2024-11-21T17:45:37.682527Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:37.682542Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:37.682544Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:37.682592Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26984 TClient is connected to server localhost:26984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:37.763560Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected waiting... 2024-11-21T17:45:37.763593Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:37.764394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.764915Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:37.776611Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:37.782376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 >> test.py::test[window-win_fuse_window-default.txt-Debug] [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> test.py::test[window-win_fuse_window-default.txt-ForceBlocks] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> test.py::test[bigdate-input_timestamp64-default.txt-Debug] [GOOD] >> test.py::test[bigdate-input_timestamp64-default.txt-Plan] [GOOD] >> test.py::test[bigdate-input_timestamp64-default.txt-Results] >> test.py::test[flatten_by-flatten_columns_non_struct-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_full_equi-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_right_const-default.txt-Debug] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> test.py::test[window-win_func_with_struct_access-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestLogOwerwrite [GOOD] Test command err: 2024-11-21T17:42:22.266337Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.266892Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 92593295916098522 MagicNextLogChunkReference: 5106438910287813577 MagicLogChunk: 5584356904454728699 MagicDataChunk: 14098472055695750525 MagicSysLogChunk: 13143791864326882490 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942214234 (2024-11-21T17:42:22.214234Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.268802Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:22.270061Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:22.270234Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.270362Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.270489Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.270588Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.367226Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1058467 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.373205Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 PDiskId# 1 2024-11-21T17:42:22.380807Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T17:42:22.475271Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.486068Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T17:42:22.490081Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T17:42:22.612615Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.612629Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T17:42:22.624441Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 3132861200841921552 MagicLogChunk: 9235711995472713412 MagicDataChunk: 13755844669550487037 MagicSysLogChunk: 3937711152816479980 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942577147 (2024-11-21T17:42:22.577147Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:22.632281Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:22.640309Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:22.640332Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:22.640543Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.640746Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:22.640882Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:22.640928Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1428454 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:22.641290Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 PDiskId# 1 2024-11-21T17:42:22.642263Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T17:42:22.728515Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:22.768486Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:301} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2024-11-21T17:42:22.771539Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1 2024-11-21T17:42:23.217427Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:23.217441Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1 2024-11-21T17:42:23.248440Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 7261037237659769743 MagicLogChunk: 13440115453492243981 MagicDataChunk: 14250717608984488470 MagicSysLogChunk: 6434147727575974115 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210942906255 (2024-11-21T17:42:22.906255Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:23.282480Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:23.304162Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:23.304190Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:23.304339Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.304437Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.304472Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:23.348568Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2042911 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:23.397129Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 PDiskId# 1 2024-11-21T17:42:23.432567Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1} PDiskId# 1 2024-11-21T17:42:23.696891Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:23.709289Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 12075437451805335937 MagicNextLogChunkReference: 2955491352647953730 MagicLogChunk: 6272563536920726162 MagicDataChunk: 7272691270986129885 MagicSysLogChunk: 4605625582569226676 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210943622714 (2024-11-21T17:42:23.622714Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:23.715813Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:23.719886Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:23.719910Z ... },{chunkIdx# 69 users# 0 endOfSplice# 0},{chunkIdx# 70 users# 0 endOfSplice# 0},{chunkIdx# 71 users# 0 endOfSplice# 0},{chunkIdx# 72 users# 0 endOfSplice# 0},{chunkIdx# 73 users# 0 endOfSplice# 0},{chunkIdx# 74 users# 0 endOfSplice# 0},{chunkIdx# 75 users# 0 endOfSplice# 0},{chunkIdx# 76 users# 0 endOfSplice# 0},{chunkIdx# 77 users# 0 endOfSplice# 0},{chunkIdx# 78 users# 0 endOfSplice# 0},{chunkIdx# 79 users# 0 endOfSplice# 0},{chunkIdx# 80 users# 0 endOfSplice# 0},{chunkIdx# 81 users# 0 endOfSplice# 0},{chunkIdx# 82 users# 0 endOfSplice# 0},{chunkIdx# 83 users# 0 endOfSplice# 0},{chunkIdx# 84 users# 0 endOfSplice# 0},{chunkIdx# 85 users# 0 endOfSplice# 0},{chunkIdx# 86 users# 0 endOfSplice# 0},{chunkIdx# 87 users# 0 endOfSplice# 0},{chunkIdx# 88 users# 0 endOfSplice# 0},{chunkIdx# 89 users# 0 endOfSplice# 0},{chunkIdx# 90 users# 0 endOfSplice# 0},{chunkIdx# 91 users# 0 endOfSplice# 0},{chunkIdx# 92 users# 0 endOfSplice# 0},{chunkIdx# 93 users# 0 endOfSplice# 0},{chunkIdx# 94 users# 0 endOfSplice# 0},{chunkIdx# 95 users# 0 endOfSplice# 0},{chunkIdx# 96 users# 0 endOfSplice# 0},{chunkIdx# 97 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:45:37.791431Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo cut tail log LogChunks# [{chunkIdx# 97 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:45:37.791436Z :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2145} KillOwner ownerId# 3 ownerRound# 2 VDiskId# [0:_:0:0:2] lastSeenLsn# 197 PDiskId# 1 2024-11-21T17:45:37.815169Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:45:37.815337Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 3317760000 bytes (3 GB) Guid: 0 MagicNextLogChunkReference: 15718582402342976594 MagicLogChunk: 15482348396721021244 MagicDataChunk: 707763515484632934 MagicSysLogChunk: 14022326382647186061 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 96 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732211136930367 (2024-11-21T17:45:36.930367Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:45:37.816812Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 7874847 NonceLog# 8080623 NonceData# 6740293} LogHeadChunkIdx# 97 LogHeadChunkPreviousNonce# 3556229} PDiskId# 1 2024-11-21T17:45:37.817470Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 97 SectorIdx# 34 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 97 OffsetInChunk# 139264} PDiskId# 1 2024-11-21T17:45:37.817492Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 97 OffsetInChunk# 139264} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:45:37.817634Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 97 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:45:37.817806Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 97 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:45:37.817861Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:45:37.916454Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 9497234 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:45:38.052559Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:45:38.068871Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 631261565321037888 MagicNextLogChunkReference: 14270166965635533327 MagicLogChunk: 12096820511990898282 MagicDataChunk: 15567580128770583687 MagicSysLogChunk: 4396895952717031480 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732211138023506 (2024-11-21T17:45:38.023506Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:45:38.072377Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:45:38.075911Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:45:38.075958Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:45:38.076156Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:45:38.076375Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:45:38.076485Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:45:38.152382Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1099580 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:45:38.166385Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},{chunkIdx# 2 users# 0 endOfSplice# 0},{chunkIdx# 3 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 2},},{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 2-2 firstLsnToKeep# 2},},] PDiskId# 1 2024-11-21T17:45:38.166409Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo cut tail log LogChunks# [{chunkIdx# 3 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-2 firstLsnToKeep# 2},},{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 2-2 firstLsnToKeep# 2},},] PDiskId# 1 2024-11-21T17:45:38.172410Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 3 users# 0 endOfSplice# 0},{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 2-3 firstLsnToKeep# 3},},{chunkIdx# 5 users# 1 endOfSplice# 0 {owner# 3 lsn# 3-3 firstLsnToKeep# 3},},] PDiskId# 1 2024-11-21T17:45:38.172432Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo cut tail log LogChunks# [{chunkIdx# 4 users# 1 endOfSplice# 0 {owner# 3 lsn# 2-3 firstLsnToKeep# 3},},{chunkIdx# 5 users# 1 endOfSplice# 0 {owner# 3 lsn# 3-3 firstLsnToKeep# 3},},] PDiskId# 1 2024-11-21T17:45:38.183438Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 4 users# 0 endOfSplice# 0},{chunkIdx# 5 users# 0 endOfSplice# 0},{chunkIdx# 6 users# 1 endOfSplice# 0 {owner# 3 lsn# 4-5 firstLsnToKeep# 5},},{chunkIdx# 7 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-6 firstLsnToKeep# 5},},{chunkIdx# 8 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-7 firstLsnToKeep# 5},},] PDiskId# 1 2024-11-21T17:45:38.183459Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo cut tail log LogChunks# [{chunkIdx# 6 users# 1 endOfSplice# 0 {owner# 3 lsn# 4-5 firstLsnToKeep# 5},},{chunkIdx# 7 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-6 firstLsnToKeep# 5},},{chunkIdx# 8 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-7 firstLsnToKeep# 5},},] PDiskId# 1 2024-11-21T17:45:38.259325Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:45:38.272433Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 631261565321037888 MagicNextLogChunkReference: 14270166965635533327 MagicLogChunk: 12096820511990898282 MagicDataChunk: 15567580128770583687 MagicSysLogChunk: 4396895952717031480 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732211138023506 (2024-11-21T17:45:38.023506Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:45:38.274756Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1660027 NonceLog# 1103185 NonceData# 1924030} LogHeadChunkIdx# 6 LogHeadChunkPreviousNonce# 1102128 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:45:38.279561Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 8 SectorIdx# 36 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 8 OffsetInChunk# 147456} PDiskId# 1 2024-11-21T17:45:38.279613Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 8 OffsetInChunk# 147456} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:45:38.279769Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 6 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-5 firstLsnToKeep# 5},},{chunkIdx# 7 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-6 firstLsnToKeep# 5},},{chunkIdx# 8 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-7 firstLsnToKeep# 5},},] PDiskId# 1 2024-11-21T17:45:38.279972Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 6 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-5 firstLsnToKeep# 5},},{chunkIdx# 7 users# 1 endOfSplice# 0 {owner# 3 lsn# 5-6 firstLsnToKeep# 5},},{chunkIdx# 8 users# 1 endOfSplice# 0 {owner# 3 lsn# 6-7 firstLsnToKeep# 5},},] PDiskId# 1 2024-11-21T17:45:38.280057Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:45:38.360978Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1807} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 >> test.py::test[window-win_multiaggr_library--Debug] >> test.py::test[union_all-union_all_null-default.txt-Results] [GOOD] |74.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Debug] >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |74.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> test.py::test[agg_apply-sum_type_empty-default.txt-Results] [GOOD] >> test.py::test[agg_apply-sum_type_group_by-default.txt-Debug] >> test.py::test[window-win_func_rank_by_opt_part--Analyze] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Debug] >> test.py::test[select-reuse_named_node-default.txt-Debug] [GOOD] >> test.py::test[select-reuse_named_node-default.txt-Plan] >> test.py::test[aggregate-percentile_interval-default.txt-Debug] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-Plan] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-Results] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> test.py::test[aggregate-aggregate_key_column-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggregate_key_column-default.txt-Results] |74.8%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[select-reuse_named_node-default.txt-Plan] [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> test.py::test[select-reuse_named_node-default.txt-Results] >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2024-11-21T17:45:37.565341Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790185970735565:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:37.565401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c55/r3tmp/tmp75YBls/pdisk_1.dat 2024-11-21T17:45:37.611006Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22886, node 1 2024-11-21T17:45:37.628457Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001c55/r3tmp/yandexC9iXhB.tmp 2024-11-21T17:45:37.628473Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001c55/r3tmp/yandexC9iXhB.tmp TClient is connected to server localhost:5857 PQClient connected to localhost:22886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:37.652337Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001c55/r3tmp/yandexC9iXhB.tmp 2024-11-21T17:45:37.652440Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:37.689591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.689763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:37.689777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:37.691304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:45:37.694027Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.737304Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.772925Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.913964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790185970736079:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.913985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.913992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790185970736106:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.917054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T17:45:37.917096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790185970736111:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.917113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.918690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790185970736110:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:45:37.936943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.997020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.010827Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790190265703610:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:38.011267Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YThiZGJiZWYtOTBkMjNlZjctYzc1YjRmYmMtNzBjM2Y3OTk=, ActorId: [1:7439790185970736076:2298], ActorState: ExecuteState, TraceId: 01jd7x8qbq5qsxvsztz6ynzmhe, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:38.011720Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:38.071380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:45:38.112612Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7x8qhac9gs51m3vwqpxaye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTVjYTAwMDItYmM3ZTQ5YTYtNjE1ODZlMmYtMmIyZjVhMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2024-11-21T17:45:37.677344Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790185409795868:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:37.677503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c41/r3tmp/tmp9Tx35u/pdisk_1.dat 2024-11-21T17:45:37.753078Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18337, node 1 2024-11-21T17:45:37.763677Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001c41/r3tmp/yandexVTQuM6.tmp 2024-11-21T17:45:37.763691Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001c41/r3tmp/yandexVTQuM6.tmp TClient is connected to server localhost:30730 PQClient connected to localhost:18337 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:45:37.778941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:37.778977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:37.780281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:37.793163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.806768Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001c41/r3tmp/yandexVTQuM6.tmp 2024-11-21T17:45:37.807031Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:37.807201Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:45:37.838523Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2024-11-21T17:45:38.012605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790189704763808:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.012638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.012870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790189704763844:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.013592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T17:45:38.015963Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T17:45:38.016022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790189704763846:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T17:45:38.047378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.117779Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790189704764018:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:38.118193Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThiNTJiNjctYzQ3MTFhOWMtN2VmMzNhMS00YWQ2MWM3Yw==, ActorId: [1:7439790189704763804:2298], ActorState: ExecuteState, TraceId: 01jd7x8qemfkt9h2j47aadw015, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:38.118602Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:38.119530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.139770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:45:38.198150Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd7x8qky32gv9hezwec4h4bk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDZlYThlOWQtOGE4NzA2YjQtNmI1ODEwYjQtZDY5NWFlN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:38.208322Z node 1 :HTTP WARN: [::1]:44844 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.316480Z node 1 :HTTP WARN: [::1]:44848 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.424470Z node 1 :HTTP WARN: [::1]:44852 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.527427Z node 1 :HTTP WARN: [::1]:44862 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.631205Z node 1 :HTTP WARN: [::1]:44868 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.734468Z node 1 :HTTP WARN: [::1]:44874 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.840709Z node 1 :HTTP WARN: [::1]:44882 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.941951Z node 1 :HTTP WARN: [::1]:44886 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.048432Z node 1 :HTTP WARN: [::1]:44894 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.156398Z node 1 :HTTP WARN: [::1]:44908 anonymous GET /actors/pqcd/health >> test.py::test[pg-tpch-q18-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Analyze] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic >> test.py::test[window-win_by_all_avg_interval-default.txt-Debug] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout |74.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 2741, MsgBus: 15180 2024-11-21T17:45:32.907008Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790162993034936:2126];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:32.908458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013e9/r3tmp/tmpD9lp42/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2741, node 1 2024-11-21T17:45:33.440793Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:45:33.441068Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:45:33.455131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:33.455157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:33.468307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:33.491716Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:33.564690Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:33.564700Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:33.564701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:33.564734Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15180 TClient is connected to server localhost:15180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:33.789394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:33.793249Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:33.804178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:45:33.940309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:34.083561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:34.129422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:34.278762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790171582971024:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.278823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.285922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.293393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.309405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.325949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.353096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.370391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.394251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790171582971529:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.394294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.394461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790171582971534:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.399317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:34.402856Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T17:45:34.402920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790171582971536:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:45:34.626133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.628903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9818, MsgBus: 28318 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013e9/r3tmp/tmpioaabk/pdisk_1.dat 2024-11-21T17:45:34.935240Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:34.937292Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9818, node 2 2024-11-21T17:45:34.951460Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:34.951473Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:34.951475Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:34.951513Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28318 TClient is connected to server localhost:28318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:35.026500Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:35.026532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:35.026889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.027543Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:45:35.028789Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:35.039808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:35.054545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:35.070890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:35.083881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... D_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790176465490072:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:35.934406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T17:45:36.017701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.082946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:45:36.157165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.249935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.322721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.399192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:45:36.416996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.727855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.731817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.734558Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715705, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13050, MsgBus: 16459 2024-11-21T17:45:37.092117Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790184940890174:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:37.097482Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013e9/r3tmp/tmpi2xc1k/pdisk_1.dat 2024-11-21T17:45:37.111441Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13050, node 3 2024-11-21T17:45:37.120771Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:37.120795Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:37.120796Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:37.120835Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16459 TClient is connected to server localhost:16459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:37.162474Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.194319Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:37.194355Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:37.195429Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:37.212989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.221501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.252912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.265215Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.392944Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790184940891546:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.392980Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.395302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.401622Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.413901Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.420847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.427255Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.433863Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.453007Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790184940892047:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.453032Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.453074Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790184940892052:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.453728Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:37.462235Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790184940892054:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:45:38.095596Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2024-11-21T17:45:38.188670Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.262439Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:1, at schemeshard: 72057594046644480 2024-11-21T17:45:38.342365Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.478881Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.537599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.597770Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T17:45:38.610311Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T17:45:39.086753Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710714:0, at schemeshard: 72057594046644480 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> test.py::test[aggregate-group_by_expr_dict--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_lookup--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2024-11-21T17:45:39.191478Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:39.194637Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:39.194721Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2024-11-21T17:45:39.194729Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:39.194734Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2024-11-21T17:45:39.194738Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:39.194746Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.194754Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2024-11-21T17:45:39.194758Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2024-11-21T17:45:39.194907Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.194931Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:259:2251], now have 1 active actors on pipe 2024-11-21T17:45:39.194948Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:45:39.196865Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:39.197853Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:39.197887Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.198046Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:39.198066Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Step TInitConfigStep 2024-11-21T17:45:39.198109Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:45:39.198148Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2258] 2024-11-21T17:45:39.198477Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Completed. 2024-11-21T17:45:39.198481Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2258] 2024-11-21T17:45:39.198486Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:39.198539Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:45:39.198594Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.198600Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:271:2260], now have 1 active actors on pipe 2024-11-21T17:45:39.209686Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:39.210716Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:39.210791Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2024-11-21T17:45:39.210801Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:39.210804Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2024-11-21T17:45:39.210809Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:39.210817Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.210824Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2024-11-21T17:45:39.210828Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2024-11-21T17:45:39.210946Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.210953Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:407:2362], now have 1 active actors on pipe 2024-11-21T17:45:39.210969Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:45:39.211018Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T17:45:39.211777Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T17:45:39.211808Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.211983Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T17:45:39.212008Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitConfigStep 2024-11-21T17:45:39.212069Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInternalFieldsStep 2024-11-21T17:45:39.212117Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:416:2369] 2024-11-21T17:45:39.212882Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Completed. 2024-11-21T17:45:39.212898Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:416:2369] 2024-11-21T17:45:39.212921Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:39.213011Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T17:45:39.213091Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.213097Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:419:2371], now have 1 active actors on pipe 2024-11-21T17:45:39.213596Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.213609Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:427:2376], now have 1 active actors on pipe 2024-11-21T17:45:39.213640Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvStatus 2024-11-21T17:45:39.213655Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.213658Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:429:2377], now have 1 active actors on pipe 2024-11-21T17:45:39.213689Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:45:39.213706Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvStatus 2024-11-21T17:45:39.213743Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:45:39.213783Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:39.213788Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:427:2376] destroyed 2024-11-21T17:45:39.213846Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:39.213850Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:429:2377] destroyed 2024-11-21T17:45:39.413811Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:39.414603Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:39.414659Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2024-11-21T17:45:39.414666Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:39.414670Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2024-11-21T17:45:39.414675Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:39.414681Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.414687Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2024-11-21T17:45:39.414691Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2024-11-21T17:45:39.414787Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.414794Z node 3 ... PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:541:2464] 2024-11-21T17:45:39.438285Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:39.438336Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T17:45:39.438402Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.438408Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:544:2466], now have 1 active actors on pipe 2024-11-21T17:45:39.438763Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.438772Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:552:2471], now have 1 active actors on pipe 2024-11-21T17:45:39.438789Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.438794Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:553:2472], now have 1 active actors on pipe 2024-11-21T17:45:39.438800Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvStatus 2024-11-21T17:45:39.438838Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:45:39.438855Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvStatus 2024-11-21T17:45:39.438875Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:45:39.438884Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.438889Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:554:2472], now have 1 active actors on pipe 2024-11-21T17:45:39.438911Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvStatus 2024-11-21T17:45:39.438927Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:45:39.449286Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.449311Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:562:2479], now have 1 active actors on pipe 2024-11-21T17:45:39.453365Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:39.453906Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:39.453951Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2024-11-21T17:45:39.453956Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:39.453980Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:39.454039Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.454043Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2024-11-21T17:45:39.454056Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitConfigStep 2024-11-21T17:45:39.454088Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInternalFieldsStep 2024-11-21T17:45:39.454107Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:621:2526] 2024-11-21T17:45:39.454462Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDiskStatusStep 2024-11-21T17:45:39.454616Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitMetaStep 2024-11-21T17:45:39.454639Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInfoRangeStep 2024-11-21T17:45:39.454673Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDataRangeStep 2024-11-21T17:45:39.454702Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDataStep 2024-11-21T17:45:39.454706Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Completed. 2024-11-21T17:45:39.454710Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:621:2526] 2024-11-21T17:45:39.454715Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:39.454741Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2024-11-21T17:45:39.454768Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T17:45:39.454837Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:39.454841Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:553:2472] destroyed 2024-11-21T17:45:39.454847Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:39.454849Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:552:2471] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 81 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 81 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 96 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 96 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] |74.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest |74.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2024-11-21T17:45:39.381940Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:39.382589Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:39.382635Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2024-11-21T17:45:39.382640Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:39.382642Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2024-11-21T17:45:39.382645Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:39.382649Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.382654Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2024-11-21T17:45:39.382656Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2024-11-21T17:45:39.382723Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.382734Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:257:2249], now have 1 active actors on pipe 2024-11-21T17:45:39.382743Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:45:39.384045Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:39.384612Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:39.384639Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.384770Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:39.384787Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Step TInitConfigStep 2024-11-21T17:45:39.384827Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:45:39.384873Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:266:2256] 2024-11-21T17:45:39.385277Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Completed. 2024-11-21T17:45:39.385284Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:266:2256] 2024-11-21T17:45:39.385289Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:39.385353Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:45:39.385395Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.385401Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:269:2258], now have 1 active actors on pipe 2024-11-21T17:45:39.394554Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:39.395355Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:39.395399Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2024-11-21T17:45:39.395403Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:39.395406Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2024-11-21T17:45:39.395408Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:39.395413Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.395417Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] empty tx queue 2024-11-21T17:45:39.395420Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2024-11-21T17:45:39.395502Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.395515Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:405:2360], now have 1 active actors on pipe 2024-11-21T17:45:39.395526Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:45:39.395560Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:39.396366Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:39.396393Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.396577Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:39.396598Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 0. Step TInitConfigStep 2024-11-21T17:45:39.396660Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:45:39.396700Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [3:414:2367] 2024-11-21T17:45:39.397199Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 0. Completed. 2024-11-21T17:45:39.397209Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:414:2367] 2024-11-21T17:45:39.397217Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:39.397312Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:45:39.397387Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.397392Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:417:2369], now have 1 active actors on pipe 2024-11-21T17:45:39.400913Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:39.401728Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:39.401772Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2024-11-21T17:45:39.401777Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:39.401779Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2024-11-21T17:45:39.401782Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:39.401786Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.401790Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] empty tx queue 2024-11-21T17:45:39.401793Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2024-11-21T17:45:39.401867Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.401871Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:468:2408], now have 1 active actors on pipe 2024-11-21T17:45:39.401882Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:45:39.401916Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2024-11-21T17:45:39.402441Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2024-11-21T17:45:39.402457Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.402596Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2024-11-21T17:45:39.402610Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1 ... 2024-11-21T17:45:39.789650Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 7(current 0) received from actor [4:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2024-11-21T17:45:39.790258Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2024-11-21T17:45:39.790281Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.790368Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 7 actor [4:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2024-11-21T17:45:39.790390Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 1. Step TInitConfigStep 2024-11-21T17:45:39.790437Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 1. Step TInitInternalFieldsStep 2024-11-21T17:45:39.790467Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:476:2414] 2024-11-21T17:45:39.790945Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 1. Completed. 2024-11-21T17:45:39.790956Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:476:2414] 2024-11-21T17:45:39.790963Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:39.791019Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2024-11-21T17:45:39.791085Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.791090Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:479:2416], now have 1 active actors on pipe 2024-11-21T17:45:39.794450Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:39.795072Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:39.795128Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2024-11-21T17:45:39.795134Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:39.795138Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2024-11-21T17:45:39.795143Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:39.795149Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.795155Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2024-11-21T17:45:39.795159Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2024-11-21T17:45:39.795262Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.795269Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:530:2455], now have 1 active actors on pipe 2024-11-21T17:45:39.795286Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:45:39.795333Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [4:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T17:45:39.795919Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T17:45:39.795940Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.796037Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [4:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T17:45:39.796055Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitConfigStep 2024-11-21T17:45:39.796105Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInternalFieldsStep 2024-11-21T17:45:39.796134Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:539:2462] 2024-11-21T17:45:39.796611Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Completed. 2024-11-21T17:45:39.796621Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:539:2462] 2024-11-21T17:45:39.796627Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:39.796686Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T17:45:39.796757Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.796762Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:542:2464], now have 1 active actors on pipe 2024-11-21T17:45:39.797266Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.797277Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:553:2470], now have 1 active actors on pipe 2024-11-21T17:45:39.797283Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.797287Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:552:2469], now have 1 active actors on pipe 2024-11-21T17:45:39.797303Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.797307Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:554:2470], now have 1 active actors on pipe 2024-11-21T17:45:39.810652Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:39.810677Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:559:2474], now have 1 active actors on pipe 2024-11-21T17:45:39.818141Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:39.818787Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:39.818839Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2024-11-21T17:45:39.818846Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:39.818874Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:39.818952Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:39.818958Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2024-11-21T17:45:39.818977Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitConfigStep 2024-11-21T17:45:39.819022Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInternalFieldsStep 2024-11-21T17:45:39.819050Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:618:2521] 2024-11-21T17:45:39.819572Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDiskStatusStep 2024-11-21T17:45:39.819791Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitMetaStep 2024-11-21T17:45:39.819829Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInfoRangeStep 2024-11-21T17:45:39.819869Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDataRangeStep 2024-11-21T17:45:39.819894Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDataStep 2024-11-21T17:45:39.819901Z node 4 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Completed. 2024-11-21T17:45:39.819908Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:618:2521] 2024-11-21T17:45:39.819916Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:39.819947Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2024-11-21T17:45:39.819997Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T17:45:39.820089Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:39.820096Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:553:2470] destroyed 2024-11-21T17:45:39.820107Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:39.820111Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:552:2469] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } } } |74.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> test.py::test[pg-select_join_right_const-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_right_const-default.txt-Plan] [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> test.py::test[pg-select_join_right_const-default.txt-Results] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Plan] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] [GOOD] >> test.py::test[binding-bind_select-default.txt-Debug] >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] |74.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |74.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[produce-reduce_lambda-default.txt-Plan] [GOOD] |74.8%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } >> test.py::test[agg_apply-sum_type_group_by-default.txt-Debug] [GOOD] >> test.py::test[agg_apply-sum_type_group_by-default.txt-Plan] [GOOD] >> test.py::test[agg_apply-sum_type_group_by-default.txt-Results] >> test.py::test[select-reuse_named_node-default.txt-Results] [GOOD] >> test.py::test[select-select_all_group_by_column--Debug] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding |74.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] |74.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[flatten_by-flatten_columns_non_struct-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TConsoleTests::TestAlterBorrowedStorage >> test.py::test[aggregate-aggregate_key_column-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Debug] >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] Test command err: Trying to start YDB, gRPC: 27604, MsgBus: 21840 2024-11-21T17:45:34.284649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790172247477184:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013a2/r3tmp/tmpyT4UqH/pdisk_1.dat 2024-11-21T17:45:34.376914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:34.422054Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27604, node 1 2024-11-21T17:45:34.446779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:34.446796Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:34.446798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:34.446880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21840 2024-11-21T17:45:34.468832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:34.468857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:34.469987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:34.502634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:34.507906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:34.527732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:34.589354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:34.647436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:34.680604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790172247478588:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.680653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.706721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.715399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.725037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.742918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.753351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.760277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:34.768006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790172247479088:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.768038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790172247479093:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.768055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:34.768624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:34.773327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790172247479095:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:35.377921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T17:45:35.452082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.510638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:45:35.575071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.640172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.695896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:45:35.753833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:45:35.769918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T17:45:36.022959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715704:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11195, MsgBus: 18115 2024-11-21T17:45:36.225700Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790181557560627:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013a2/r3tmp/tmpAoFbAT/pdisk_1.dat 2024-11-21T17:45:36.228611Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:36.236175Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11195, node 2 2024-11-21T17:45:36.255549Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:36.255561Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:36.255563Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:36.255602Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18115 TClient is connected to server localhost:18115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:36.324900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:36.324931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:36.326805Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0 ... 24-11-21T17:45:36.754113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:36.758077Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:36.759017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790181557562542:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:37.230167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T17:45:37.301377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.360487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:45:37.428334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.487896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.537007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.594611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:45:37.602575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.008467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715714:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26144, MsgBus: 15874 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013a2/r3tmp/tmpQ5gd1N/pdisk_1.dat 2024-11-21T17:45:38.541637Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:38.541839Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26144, node 3 2024-11-21T17:45:38.556521Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:38.556534Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:38.556535Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:38.556575Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15874 TClient is connected to server localhost:15874 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:38.636728Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:38.636757Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:38.637127Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.638588Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:45:38.644011Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:38.652347Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:38.666863Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:38.682401Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:38.695486Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:38.839330Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790188428485080:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.839437Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.841949Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.853483Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.864264Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.879379Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.891494Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.908328Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.921770Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790188428485593:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.921790Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.921942Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790188428485598:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.922600Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:38.925321Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790188428485600:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:39.543416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T17:45:39.615886Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:45:39.691092Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:45:39.785768Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:45:39.868169Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:45:39.912403Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:45:39.979984Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:45:39.990518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T17:45:40.442504Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715713:0, at schemeshard: 72057594046644480 >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] >> test.py::test[bigdate-input_timestamp64-default.txt-Results] [GOOD] >> test.py::test[window-win_multiaggr_library--Debug] [GOOD] >> test.py::test[window-win_multiaggr_library--Plan] >> KqpDataIntegrityTrails::Select >> test.py::test[bigdate-table_yt_native-default-Debug] [SKIPPED] >> test.py::test[agg_apply-sum_type_group_by-default.txt-Results] [GOOD] >> test.py::test[agg_phases-max_null-default.txt-Debug] >> test.py::test[window-win_multiaggr_library--Plan] [GOOD] >> test.py::test[window-win_multiaggr_library--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2024-11-21T17:45:40.360731Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:40.362033Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:40.362100Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2024-11-21T17:45:40.362104Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:40.362107Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2024-11-21T17:45:40.362110Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:40.362115Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:40.362121Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2024-11-21T17:45:40.362123Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2024-11-21T17:45:40.362236Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.362253Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2024-11-21T17:45:40.362269Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:45:40.364006Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:40.365485Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:40.365531Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:40.365746Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:40.365780Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Step TInitConfigStep 2024-11-21T17:45:40.365850Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:45:40.365912Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2024-11-21T17:45:40.366467Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic1' partition 0. Completed. 2024-11-21T17:45:40.366480Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2024-11-21T17:45:40.366488Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:40.366577Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:45:40.366672Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.366681Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2024-11-21T17:45:40.375331Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:40.376038Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:40.376101Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2024-11-21T17:45:40.376108Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:40.376112Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2024-11-21T17:45:40.376116Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:40.376123Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:40.376129Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2024-11-21T17:45:40.376133Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2024-11-21T17:45:40.376257Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.376267Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:409:2364], now have 1 active actors on pipe 2024-11-21T17:45:40.376283Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:45:40.376335Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T17:45:40.377007Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T17:45:40.377033Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:40.377219Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T17:45:40.377240Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitConfigStep 2024-11-21T17:45:40.377297Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInternalFieldsStep 2024-11-21T17:45:40.377342Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:418:2371] 2024-11-21T17:45:40.377846Z node 2 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Completed. 2024-11-21T17:45:40.377856Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:418:2371] 2024-11-21T17:45:40.377864Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:40.377937Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T17:45:40.378012Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.378019Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:421:2373], now have 1 active actors on pipe 2024-11-21T17:45:40.378514Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.378525Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:431:2378], now have 1 active actors on pipe 2024-11-21T17:45:40.378576Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:40.378583Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:431:2378] destroyed 2024-11-21T17:45:40.378588Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.378592Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:433:2379], now have 1 active actors on pipe 2024-11-21T17:45:40.378667Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:40.378671Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:433:2379] destroyed 2024-11-21T17:45:40.580812Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:40.581641Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:40.581698Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2024-11-21T17:45:40.581705Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:40.581709Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2024-11-21T17:45:40.581714Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:40.581721Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:40.581728Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] empty tx queue 2024-11-21T17:45:40.581732Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2024-11-21T17:45:40.581871Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.581879Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:257:2249], now have 1 active actors on pipe 2024-11-21T17:45:40.581888Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:45:40.581942Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2024-11-21T17:45:40.582541Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWa ... d from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2024-11-21T17:45:40.598372Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2024-11-21T17:45:40.598391Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:40.598539Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 5 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2024-11-21T17:45:40.598560Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 1. Step TInitConfigStep 2024-11-21T17:45:40.598601Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 1. Step TInitInternalFieldsStep 2024-11-21T17:45:40.598632Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:478:2416] 2024-11-21T17:45:40.599010Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 1. Completed. 2024-11-21T17:45:40.599017Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:478:2416] 2024-11-21T17:45:40.599023Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:40.599086Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2024-11-21T17:45:40.599156Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.599160Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:481:2418], now have 1 active actors on pipe 2024-11-21T17:45:40.601760Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:40.602260Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:40.602302Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2024-11-21T17:45:40.602306Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:40.602309Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2024-11-21T17:45:40.602311Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:40.602315Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:40.602320Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2024-11-21T17:45:40.602322Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2024-11-21T17:45:40.602414Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.602419Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:532:2457], now have 1 active actors on pipe 2024-11-21T17:45:40.602431Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:45:40.602465Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 6(current 0) received from actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T17:45:40.602908Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T17:45:40.602926Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:40.603029Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 6 actor [3:97:2132] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2024-11-21T17:45:40.603046Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitConfigStep 2024-11-21T17:45:40.603088Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInternalFieldsStep 2024-11-21T17:45:40.603127Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:541:2464] 2024-11-21T17:45:40.603489Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Completed. 2024-11-21T17:45:40.603495Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:541:2464] 2024-11-21T17:45:40.603499Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:40.603544Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T17:45:40.603602Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.603608Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:544:2466], now have 1 active actors on pipe 2024-11-21T17:45:40.603974Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.603983Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:555:2472], now have 1 active actors on pipe 2024-11-21T17:45:40.603988Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.603992Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:554:2471], now have 1 active actors on pipe 2024-11-21T17:45:40.604012Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.604015Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:556:2472], now have 1 active actors on pipe 2024-11-21T17:45:40.614412Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:40.614442Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:561:2476], now have 1 active actors on pipe 2024-11-21T17:45:40.621528Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:45:40.622204Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:45:40.622251Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2024-11-21T17:45:40.622256Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:45:40.622284Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:45:40.622366Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:45:40.622370Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2024-11-21T17:45:40.622385Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitConfigStep 2024-11-21T17:45:40.622417Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInternalFieldsStep 2024-11-21T17:45:40.622442Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:620:2523] 2024-11-21T17:45:40.623026Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDiskStatusStep 2024-11-21T17:45:40.623370Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitMetaStep 2024-11-21T17:45:40.623416Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitInfoRangeStep 2024-11-21T17:45:40.623456Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDataRangeStep 2024-11-21T17:45:40.623481Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Step TInitDataStep 2024-11-21T17:45:40.623487Z node 3 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--topic2' partition 2. Completed. 2024-11-21T17:45:40.623496Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:620:2523] 2024-11-21T17:45:40.623505Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:45:40.623551Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] empty tx queue 2024-11-21T17:45:40.623602Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T17:45:40.623710Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:40.623718Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:554:2471] destroyed 2024-11-21T17:45:40.623760Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:40.623765Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:555:2472] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } |74.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Debug] >> test.py::test[window-win_func_rank_by_opt_part--Debug] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--ForceBlocks] >> test.py::test[bigdate-table_yt_native-default-Plan] [SKIPPED] >> test.py::test[bigdate-table_yt_native-default-Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2024-11-21T17:45:37.666234Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790187385717084:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:37.666648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c63/r3tmp/tmpl27VaQ/pdisk_1.dat 2024-11-21T17:45:37.739009Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27357, node 1 2024-11-21T17:45:37.755798Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:37.755810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:37.755812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:37.755848Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:37.760608Z node 1 :HTTP WARN: [::1]:54588 anonymous GET /actors/pqcd/health 2024-11-21T17:45:37.761185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:37.761207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:37.762330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:37.861723Z node 1 :HTTP WARN: [::1]:54602 anonymous GET /actors/pqcd/health 2024-11-21T17:45:37.965285Z node 1 :HTTP WARN: [::1]:54604 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.068427Z node 1 :HTTP WARN: [::1]:54612 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.075807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790191680684827:2295], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.075837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790191680684816:2292], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.075856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.084082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T17:45:38.086722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790191680684830:2296], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T17:45:38.176499Z node 1 :HTTP WARN: [::1]:54628 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.239035Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790191680684907:2300], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:38.239524Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzYwYmI2ODItNzE3YzgxZDQtNjhmNDhjM2MtZTU1MDA3OTY=, ActorId: [1:7439790191680684814:2291], ActorState: ExecuteState, TraceId: 01jd7x8qgv258jrk9hdfny4zka, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:38.254011Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:38.278191Z node 1 :HTTP WARN: [::1]:54638 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.380779Z node 1 :HTTP WARN: [::1]:54650 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.487563Z node 1 :HTTP WARN: [::1]:54656 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.596438Z node 1 :HTTP WARN: [::1]:54664 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.704434Z node 1 :HTTP WARN: [::1]:54666 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.820385Z node 1 :HTTP WARN: [::1]:54680 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.932467Z node 1 :HTTP WARN: [::1]:54682 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.059641Z node 1 :HTTP WARN: [::1]:54684 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.164067Z node 1 :HTTP WARN: [::1]:54686 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.256937Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790195975652275:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:39.257409Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjVmYjA5MTEtY2UwZmI5N2QtMzkwMWY1YzktNjllYzAzOTU=, ActorId: [1:7439790195975652268:2305], ActorState: ExecuteState, TraceId: 01jd7x8rnnc7fgvfa4xypn4yj0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:39.257571Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:39.265264Z node 1 :HTTP WARN: [::1]:54696 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.366110Z node 1 :HTTP WARN: [::1]:54710 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.466924Z node 1 :HTTP WARN: [::1]:54724 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.569596Z node 1 :HTTP WARN: [::1]:54734 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.678087Z node 1 :HTTP WARN: [::1]:54744 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.780405Z node 1 :HTTP WARN: [::1]:54758 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.881515Z node 1 :HTTP WARN: [::1]:54764 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.982386Z node 1 :HTTP WARN: [::1]:54774 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.087377Z node 1 :HTTP WARN: [::1]:54784 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.192530Z node 1 :HTTP WARN: [::1]:54800 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.261210Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790200270619615:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:40.261591Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjQxNzlkMzQtNjE0NDdmYWItMmZmZTM3ZmEtYTlkNjQzMWQ=, ActorId: [1:7439790200270619613:2314], ActorState: ExecuteState, TraceId: 01jd7x8sn25mdes3xhd8rfhja7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:40.261740Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:40.293686Z node 1 :HTTP WARN: [::1]:54804 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.396422Z node 1 :HTTP WARN: [::1]:54810 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.500751Z node 1 :HTTP WARN: [::1]:54824 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.608311Z node 1 :HTTP WARN: [::1]:54838 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.716410Z node 1 :HTTP WARN: [::1]:54840 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.820860Z node 1 :HTTP WARN: [::1]:54846 anonymous GET /actors/pqcd/health |74.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] >> test.py::test[join-mergejoin_semi_composite_to_inner--Results] [GOOD] >> test.py::test[join-premap_common_left_cross--Debug] >> test.py::test[bigdate-tzcasts-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2024-11-21T17:45:37.340560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790184713013537:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:37.340574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c4a/r3tmp/tmppGk6oK/pdisk_1.dat 2024-11-21T17:45:37.403024Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12162, node 1 2024-11-21T17:45:37.415173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:37.415188Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:37.415189Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:37.415216Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3588 PQClient connected to localhost:12162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:45:37.441453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:37.441482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:37.442557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:37.478902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:37.484721Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:45:37.726688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790184713014199:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.726710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790184713014224:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.726717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.727305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T17:45:37.728909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790184713014228:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:45:37.760469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.786452Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790184713014371:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:37.787029Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDA5YmRhZTktYjU5ZDg4MzMtN2Y4NDQ3NGQtYzc3MDJhNTk=, ActorId: [1:7439790184713014196:2299], ActorState: ExecuteState, TraceId: 01jd7x8q5yddv8r3etddsrfep0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:37.787446Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:37.822995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.835592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:45:37.939698Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7x8qbz9fj5eba1r8bm1cgs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjE2MzVhOWEtOWQ3ZmUzMGEtMzQzZDBkZDgtMjlmY2NlNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:37.952625Z node 1 :HTTP WARN: [::1]:59258 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.060483Z node 1 :HTTP WARN: [::1]:59268 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.164482Z node 1 :HTTP WARN: [::1]:59270 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.272498Z node 1 :HTTP WARN: [::1]:59280 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.380676Z node 1 :HTTP WARN: [::1]:59294 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.494977Z node 1 :HTTP WARN: [::1]:59304 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.598694Z node 1 :HTTP WARN: [::1]:59316 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.700873Z node 1 :HTTP WARN: [::1]:59322 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.804702Z node 1 :HTTP WARN: [::1]:59324 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.907442Z node 1 :HTTP WARN: [::1]:59326 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.015333Z node 1 :HTTP WARN: [::1]:59336 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.116593Z node 1 :HTTP WARN: [::1]:59344 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.218836Z node 1 :HTTP WARN: [::1]:59354 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.320308Z node 1 :HTTP WARN: [::1]:59368 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.421069Z node 1 :HTTP WARN: [::1]:59372 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.522020Z node 1 :HTTP WARN: [::1]:59382 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.627373Z node 1 :HTTP WARN: [::1]:59394 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.730518Z node 1 :HTTP WARN: [::1]:59410 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.836481Z node 1 :HTTP WARN: [::1]:59420 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.939831Z node 1 :HTTP WARN: [::1]:59430 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.043200Z node 1 :HTTP WARN: [::1]:59438 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.148081Z node 1 :HTTP WARN: [::1]:59448 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.256459Z node 1 :HTTP WARN: [::1]:59464 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.367092Z node 1 :HTTP WARN: [::1]:59480 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.468369Z node 1 :HTTP WARN: [::1]:59492 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.569732Z node 1 :HTTP WARN: [::1]:59506 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.676315Z node 1 :HTTP WARN: [::1]:59514 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.785056Z node 1 :HTTP WARN: [::1]:59526 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.900272Z node 1 :HTTP WARN: [::1]:59530 anonymous GET /actors/pqcd/health 2024-11-21T17:45:41.004810Z node 1 :HTTP WARN: [::1]:59544 anonymous GET /actors/pqcd/health |74.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> test.py::test[binding-bind_select-default.txt-Debug] [GOOD] |74.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |74.9%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2024-11-21T17:45:38.021810Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790190018861009:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:38.021852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c2d/r3tmp/tmpXhdd8X/pdisk_1.dat 2024-11-21T17:45:38.073932Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30123, node 1 2024-11-21T17:45:38.085609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001c2d/r3tmp/yandexFsEHqq.tmp 2024-11-21T17:45:38.085618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001c2d/r3tmp/yandexFsEHqq.tmp 2024-11-21T17:45:38.122969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:38.123001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:38.124036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:38.134717Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001c2d/r3tmp/yandexFsEHqq.tmp 2024-11-21T17:45:38.134794Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:38.195440Z node 1 :HTTP WARN: [::1]:60406 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.300666Z node 1 :HTTP WARN: [::1]:60408 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.405344Z node 1 :HTTP WARN: [::1]:60420 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.468580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790190018861455:2292], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.468612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.468753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790190018861467:2295], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:38.479448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T17:45:38.484023Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:38.484148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790190018861469:2296], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T17:45:38.508415Z node 1 :HTTP WARN: [::1]:60424 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.566750Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790190018861546:2300], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:38.567119Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTQxMWE1YmQtMzUxOTFhZDktM2RiM2YzZWEtZDM5NmJkOTc=, ActorId: [1:7439790190018861453:2291], ActorState: ExecuteState, TraceId: 01jd7x8qx46f86pkb06s2fzhrp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:38.575263Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:38.611637Z node 1 :HTTP WARN: [::1]:60428 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.713086Z node 1 :HTTP WARN: [::1]:60434 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.820320Z node 1 :HTTP WARN: [::1]:60446 anonymous GET /actors/pqcd/health 2024-11-21T17:45:38.923576Z node 1 :HTTP WARN: [::1]:60452 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.024595Z node 1 :HTTP WARN: [::1]:60462 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.136419Z node 1 :HTTP WARN: [::1]:60464 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.241140Z node 1 :HTTP WARN: [::1]:60474 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.342174Z node 1 :HTTP WARN: [::1]:60480 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.443001Z node 1 :HTTP WARN: [::1]:60490 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.543780Z node 1 :HTTP WARN: [::1]:60498 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.578241Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790194313828916:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:39.578309Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWZkMjIzNjItMjJiYWEyODMtZjllNTZiMjAtOTIzMjUwMjc=, ActorId: [1:7439790194313828909:2305], ActorState: ExecuteState, TraceId: 01jd7x8rzqcewvf1bjz6j3ak4r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:39.578436Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:39.644790Z node 1 :HTTP WARN: [::1]:60514 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.752423Z node 1 :HTTP WARN: [::1]:60530 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.859047Z node 1 :HTTP WARN: [::1]:60542 anonymous GET /actors/pqcd/health 2024-11-21T17:45:39.961629Z node 1 :HTTP WARN: [::1]:60546 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.063546Z node 1 :HTTP WARN: [::1]:60554 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.172415Z node 1 :HTTP WARN: [::1]:60560 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.279083Z node 1 :HTTP WARN: [::1]:60564 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.382104Z node 1 :HTTP WARN: [::1]:60578 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.488562Z node 1 :HTTP WARN: [::1]:60590 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.583527Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790198608796253:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:40.584021Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzdkNGVhMGEtYTk3YTg5ZGItNDM4YjViMjAtYTE3MmJlZDg=, ActorId: [1:7439790198608796251:2314], ActorState: ExecuteState, TraceId: 01jd7x8sz3adyhzb0y3pj81t55, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:40.584152Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:40.590019Z node 1 :HTTP WARN: [::1]:60592 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.695724Z node 1 :HTTP WARN: [::1]:60600 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.804765Z node 1 :HTTP WARN: [::1]:60602 anonymous GET /actors/pqcd/health 2024-11-21T17:45:40.907696Z node 1 :HTTP WARN: [::1]:60606 anonymous GET /actors/pqcd/health 2024-11-21T17:45:41.020841Z node 1 :HTTP WARN: [::1]:60616 anonymous GET /actors/pqcd/health 2024-11-21T17:45:41.127792Z node 1 :HTTP WARN: [::1]:60618 anonymous GET /actors/pqcd/health 2024-11-21T17:45:41.230344Z node 1 :HTTP WARN: [::1]:60634 anonymous GET /actors/pqcd/health |74.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal |74.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> test.py::test[binding-bind_select-default.txt-Plan] [GOOD] >> test.py::test[binding-bind_select-default.txt-Results] >> test.py::test[window-win_fuse_window-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Plan] [GOOD] >> ReadSessionImplTest::ForcefulDestroyPartitionStream ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] Test command err: 2024-11-21T17:45:17.748596Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790097729714843:2096];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:17.748665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0020b3/r3tmp/tmp50Xk7S/pdisk_1.dat 2024-11-21T17:45:18.418987Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5194, node 1 2024-11-21T17:45:18.491348Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:18.491364Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:18.491366Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:18.491409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:18.653623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:18.653650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:18.661532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:18.791649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:18.797465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:45:18.797488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:18.800835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:45:18.801062Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:45:18.801079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:45:18.801644Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:18.801653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:45:18.801864Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:18.802368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:18.804045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211118848, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:45:18.804054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:45:18.804110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:45:18.812558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:18.812620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:18.812635Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:45:18.812647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:45:18.812657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:45:18.812674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:45:18.813543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:45:18.813552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:45:18.813557Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:45:18.813572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0020b3/r3tmp/tmpCtG6Qk/pdisk_1.dat 2024-11-21T17:45:20.547583Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:20.575723Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1783, node 4 2024-11-21T17:45:20.628863Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:20.628902Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:20.631680Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:20.640486Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:20.640501Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:20.640502Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:20.640546Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:20.668736Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:20.668839Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:45:20.668844Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:20.670867Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:45:20.670921Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:45:20.670926Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:45:20.672222Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:20.672252Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2024-11-21T17:45:20.676092Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:20.677773Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:20.679522Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211120724, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:45:20.679536Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:45:20.679593Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:45:20.680076Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:20.680122Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:20.680132Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:45:20.680142Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:45:20.680150Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:45:20.680163Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:45:20.680595Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:45:20.680604Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:45:20.680608Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:45:20.680922Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:45:35.564572Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:45:35.564595Z node 4 :IMPORT WARN: Table profiles were not loaded |74.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |74.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |74.9%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadSessionImplTest::SuccessfulInit [GOOD] >> test.py::test[window-win_fuse_window-default.txt-Results] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] |74.9%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |74.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> KqpDataIntegrityTrails::Select [GOOD] |74.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] |74.9%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |74.9%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate >> CompressExecutor::TestReorderedExecutor >> test.py::test[binding-bind_select-default.txt-Results] [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2024-11-21T17:45:42.607137Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.607148Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.607152Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.607262Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.617976Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.619701Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.619844Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.620300Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.620305Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.620309Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.620406Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.620510Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.623400Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.623498Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.623587Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:45:42.629646Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.629654Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.629657Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.629719Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.629912Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.629951Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.630038Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.630242Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.630371Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:45:42.630415Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.630427Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T17:45:42.630730Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.630735Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.630739Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.630785Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.630889Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.630941Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.630982Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 11 Compressed message data size: 31 2024-11-21T17:45:42.631291Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:45:42.631321Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T17:45:42.631374Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T17:45:42.631384Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T17:45:42.631406Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.631411Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:45:42.631417Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:45:42.631455Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2024-11-21T17:45:42.631474Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:45:42.631477Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T17:45:42.631480Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:45:42.631497Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2024-11-21T17:45:42.631508Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T17:45:42.631512Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T17:45:42.631515Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:45:42.631527Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2024-11-21T17:45:42.631549Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T17:45:42.631553Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T17:45:42.631556Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:45:42.631569Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2024-11-21T17:45:42.631855Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.631860Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.631862Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.631902Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.631961Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.631983Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.632025Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T17:45:42.632181Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:45:42.632203Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T17:45:42.632249Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T17:45:42.632260Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T17:45:42.632281Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.632285Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:45:42.632289Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:45:42.632292Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T17:45:42.632297Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:45:42.632326Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2024-11-21T17:45:42.632340Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T17:45:42.632344Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T17:45:42.632347Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T17:45:42.632350Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T17:45:42.632353Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:45:42.632369Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2024-11-21T17:45:42.632641Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.632645Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.632648Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.632698Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.632760Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.632782Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.632825Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.632918Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:45:42.632954Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:45:42.632986Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2024-11-21T17:45:42.632996Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:45:42.633015Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.633020Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:45:42.633024Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2024-11-21T17:45:42.633027Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2024-11-21T17:45:42.633032Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2024-11-21T17:45:42.633036Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T17:45:42.633054Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2024-11-21T17:45:42.633091Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 >> test.py::test[binding-drop_binding--Debug] >> test.py::test[window-win_by_all_avg_interval-default.txt-Debug] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-Plan] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> test.py::test[aggregate-group_by_expr_lookup--Debug] [GOOD] >> test.py::test[aggregate-group_by_expr_lookup--Plan] [GOOD] >> test.py::test[aggregate-group_by_expr_lookup--Results] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> test.py::test[bigdate-tzcasts-default.txt-Debug] [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> test.py::test[bigdate-tzcasts-default.txt-Plan] [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> test.py::test[bigdate-tzcasts-default.txt-Results] >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> test.py::test[select-select_all_group_by_column--Debug] [GOOD] |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 9540, MsgBus: 8503 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001efe/r3tmp/tmp6JGmqL/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9540, node 1 TClient is connected to server localhost:8503 TClient is connected to server localhost:8503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> test.py::test[agg_phases-max_null-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-max_null-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-max_null-default.txt-Results] >> test.py::test[select-select_all_group_by_column--Plan] [GOOD] >> test.py::test[select-select_all_group_by_column--Results] >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2024-11-21T17:45:42.544349Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.544356Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.544359Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.544440Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.544574Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:45:42.544584Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.544773Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.544775Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.544777Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.544816Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.544843Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:45:42.544847Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.544973Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.544975Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.544977Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.545024Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:45:42.545034Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.545036Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.545159Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2024-11-21T17:45:42.545304Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.545306Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.545308Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.545350Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T17:45:42.545355Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.545357Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.545361Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2024-11-21T17:45:42.545459Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T17:45:42.545462Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T17:45:42.545463Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.545509Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.545577Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.546405Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T17:45:42.546466Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.546522Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (empty maybe) 2024-11-21T17:45:42.546814Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2024-11-21T17:45:42.546847Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.546853Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:45:42.546855Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:45:42.546857Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T17:45:42.546859Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T17:45:42.546862Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T17:45:42.546864Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2024-11-21T17:45:42.546865Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2024-11-21T17:45:42.546873Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2024-11-21T17:45:42.546874Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2024-11-21T17:45:42.546876Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2024-11-21T17:45:42.546878Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2024-11-21T17:45:42.546879Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2024-11-21T17:45:42.546881Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2024-11-21T17:45:42.546883Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2024-11-21T17:45:42.546884Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2024-11-21T17:45:42.546899Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2024-11-21T17:45:42.546901Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2024-11-21T17:45:42.546903Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2024-11-21T17:45:42.546905Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2024-11-21T17:45:42.546906Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2024-11-21T17:45:42.546908Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2024-11-21T17:45:42.546910Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2024-11-21T17:45:42.546912Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2024-11-21T17:45:42.546913Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2024-11-21T17:45:42.546915Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2024-11-21T17:45:42.546917Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2024-11-21T17:45:42.546918Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2024-11-21T17:45:42.546920Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2024-11-21T17:45:42.546921Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2024-11-21T17:45:42.546923Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2024-11-21T17:45:42.546925Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2024-11-21T17:45:42.546931Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2024-11-21T17:45:42.546933Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2024-11-21T17:45:42.546935Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2024-11-21T17:45:42.546936Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2024-11-21T17:45:42.546939Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2024-11-21T17:45:42.546941Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2024-11-21T17:45:42.546943Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2024-11-21T17:45:42.546944Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2024-11-21T17:45:42.546946Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2024-11-21T17:45:42.546947Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2024-11-21T17:45:42.546949Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2024-11-21T17:45:42.546950Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2024-11-21T17:45:42.546952Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2024-11-21T17:45:42.546955Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2024-11-21T17:45:42.546957Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2024-11-21T17:45:42.546959Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2024-11-21T17:45:42.546960Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2024-11-21T17:45:42.546962Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2024-11-21T17:45:42.546967Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T17:45:42.547027Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2024-11-21T17:45:42.547049Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2024-11-21T17:45:42.547051Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2024-11-21T17:45:42.547053Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2024-11-21T17:45:42.547055Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2024-11-21T17:45:42.547057Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2024-11-21T17:45:42.547059Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2024-11-21T17:45:42.547060Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2024-11-21T17:45:42.547062Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2024-11-21T17:45:42.547064Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2024-11-21T17:45:42.547066Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2024-11-21T17:45:42.547067Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2024-11-21T17:45:42.547069Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2024-11-21T17:45:42.547070Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2024-11-21T17:45:42.547072Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2024-11-21T17:45:42.547073Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2024-11-21T17:45:42.547075Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2024-11-21T17:45:42.547078Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2024-11-21T17:45:42.547079Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2024-11-21T17:45:42.547081Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2024-11-21T17:45:42.547082Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2024-11-21T17:45:42.547084Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2024-11-21T17:45:42.547085Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2024-11-21T17:45:42.547087Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2024-11-21T17:45:42.547088Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2024-11-21T17:45:42.547090Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2024-11-21T17:45:42.547091Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2024-11-21T17:45:42.547093Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2024-11-21T17:45:42.547094Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2024-11-21T17:45:42.547096Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2024-11-21T17:45:42.547097Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2024-11-21T17:45:42.547099Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2024-11-21T17:45:42.547101Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2024-11-21T17:45:42.547105Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2024-11-21T17:45:42.547108Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2024-11-21T17:45:42.547109Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2024-11-21T17:45:42.547111Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2024-11-21T17:45:42.547112Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2024-11-21T17:45:42.547114Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2024-11-21T17:45:42.547115Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2024-11-21T17:45:42.547117Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2024-11-21T17:45:42.547119Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2024-11-21T17:45:42.547120Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2024-11-21T17:45:42.547122Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2024-11-21T17:45:42.547124Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2024-11-21T17:45:42.547125Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2024-11-21T17:45:42.547127Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2024-11-21T17:45:42.547128Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2024-11-21T17:45:42.547130Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2024-11-21T17:45:42.547132Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2024-11-21T17:45:42.547133Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2024-11-21T17:45:42.547136Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T17:45:42.547149Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T17:45:42.547310Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.547312Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.547313Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.547340Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.547399Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.547410Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.547457Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.648535Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.648603Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:45:42.648618Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.648625Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T17:45:42.648647Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T17:45:42.849746Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2024-11-21T17:45:42.949994Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T17:45:42.950067Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:45:42.950146Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T17:45:42.950521Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.950527Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.950530Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.950594Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.950688Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.950728Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.950814Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:43.051419Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.051485Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:45:43.051499Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:43.051506Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T17:45:43.051533Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2024-11-21T17:45:43.051553Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T17:45:43.051615Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T17:45:43.051637Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:45:43.051665Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> test.py::test[aggregate-percentile_interval-default.txt-Results] [GOOD] >> test.py::test[bigdate-round-default.txt-Debug] >> test.py::test[window-win_multiaggr_library--Results] [GOOD] >> TPDiskRaces::OwnerRecreationRaces [GOOD] >> TPDiskTest::CommitDeleteChunks |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2024-11-21T17:45:43.018225Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.018235Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.018239Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.018349Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:45:43.018363Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.018366Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.019007Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007229s 2024-11-21T17:45:43.019147Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.019265Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:45:43.019279Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.019573Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.019577Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.019580Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.019630Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:45:43.019639Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.019641Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.019654Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006516s 2024-11-21T17:45:43.024317Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.024433Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:45:43.024456Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.031453Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.031461Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.031466Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.037425Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T17:45:43.037448Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.037452Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.037476Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.167624s 2024-11-21T17:45:43.037639Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.038224Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:45:43.038241Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.043825Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.043832Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.043837Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.050475Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T17:45:43.050498Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.050503Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.050526Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.230219s 2024-11-21T17:45:43.050645Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.050733Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:45:43.050745Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.051074Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.051078Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.051082Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.053379Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.053514Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:43.054980Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.055098Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2024-11-21T17:45:43.055105Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.055108Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.055128Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.170792s 2024-11-21T17:45:43.055180Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T17:45:43.057182Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.057188Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.057191Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.057257Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.057358Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:43.057404Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.057484Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:43.166528Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.166635Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:45:43.166668Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:43.166675Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T17:45:43.166696Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T17:45:43.268342Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:45:43.268410Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T17:45:43.268809Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.268814Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.268817Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.268874Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.269014Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:43.269063Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.270132Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:43.371566Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.371631Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:45:43.371645Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:43.371651Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T17:45:43.371676Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2024-11-21T17:45:43.371699Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T17:45:43.371867Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T17:45:43.371890Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:45:43.371918Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TRtmrTestReboots::CreateRtmrVolumeWithReboots >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> TPDiskTest::CommitDeleteChunks [GOOD] >> TPDiskTest::DeviceHaltTooLong >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle |75.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> test.py::test[window-win_func_rank_by_opt_part--ForceBlocks] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Plan] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Plan] >> test.py::test[bigdate-tzcasts-default.txt-Results] [GOOD] >> test.py::test[binding-named_expr_input-default.txt-Debug] >> test.py::test[window-win_func_rank_by_opt_part--Plan] [GOOD] >> test.py::test[window-win_func_rank_by_opt_part--Results] |75.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Plan] [GOOD] >> test.py::test[binding-drop_binding--Debug] [GOOD] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Results] >> test.py::test[binding-drop_binding--Plan] [GOOD] >> test.py::test[binding-drop_binding--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2024-11-21T17:45:43.317748Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.317758Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.317762Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.317869Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.318017Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:43.319480Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.319605Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:43.319983Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:45:43.320054Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:45:43.320116Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T17:45:43.320122Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:45:43.320140Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:43.320145Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T17:45:43.320153Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:45:43.320156Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:45:43.320890Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.320894Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.320897Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.321772Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.327656Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:43.327743Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.328264Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T17:45:43.328500Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:45:43.328525Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T17:45:43.328586Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T17:45:43.328593Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T17:45:43.328688Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:43.328694Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:45:43.328703Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:45:43.328741Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2024-11-21T17:45:43.328763Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:45:43.328766Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T17:45:43.328768Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:45:43.328783Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2024-11-21T17:45:43.328792Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T17:45:43.328795Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T17:45:43.328798Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:45:43.328806Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2024-11-21T17:45:43.328815Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T17:45:43.328818Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T17:45:43.328821Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:45:43.328832Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2024-11-21T17:45:43.363886Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.363892Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.363896Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.363975Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.364107Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:43.364178Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.364252Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 100 Compressed message data size: 91 2024-11-21T17:45:43.364447Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:45:43.364474Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T17:45:43.364534Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T17:45:43.364554Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T17:45:43.364582Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:43.364592Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:45:43.364625Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2024-11-21T17:45:43.364645Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:45:43.364648Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:45:43.364656Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2024-11-21T17:45:43.364665Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:45:43.364670Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:45:43.364677Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2024-11-21T17:45:43.364687Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T17:45:43.364690Z :DEBUG: [db] [sessionid] [cluster] ... estTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:45:43.845976Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2024-11-21T17:45:43.900642Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T17:45:43.900650Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T17:45:43.900655Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.900760Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.900892Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:43.900945Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T17:45:43.902291Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2024-11-21T17:45:43.942439Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2024-11-21T17:45:43.942522Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:43.942530Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:45:43.942536Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:45:43.942539Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T17:45:43.942546Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T17:45:43.942550Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T17:45:43.942554Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2024-11-21T17:45:43.942558Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2024-11-21T17:45:43.942564Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2024-11-21T17:45:43.942567Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2024-11-21T17:45:43.942580Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2024-11-21T17:45:43.942646Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:45:43.944519Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2024-11-21T17:45:43.946549Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.946556Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.946559Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.946624Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.946738Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:43.946777Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.946832Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:43.946891Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2024-11-21T17:45:43.947377Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.947382Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.947384Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:43.947442Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:43.947531Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:43.947566Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.947722Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:43.947769Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:45:43.947794Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:43.947802Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:45:43.947828Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 |75.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |75.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |75.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |75.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> test.py::test[aggregate-group_by_expr_lookup--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Debug] |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> test.py::test[select-select_all_group_by_column--Results] [GOOD] >> test.py::test[select-struct_access_without_table_name--Debug] >> test.py::test[binding-drop_binding--Results] [GOOD] >> test.py::test[blocks-add_uint64_opt2--Debug] |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] [GOOD] >> test.py::test[window-win_func_nth_value-default.txt-Debug] >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> test.py::test[bigdate-round-default.txt-Debug] [GOOD] >> test.py::test[bigdate-round-default.txt-Plan] [GOOD] >> test.py::test[bigdate-round-default.txt-Results] |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |75.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> test.py::test[pg-select_join_right_const-default.txt-Results] [GOOD] >> test.py::test[pg-select_join_right_equi_and_const-default.txt-Debug] >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> test.py::test[binding-named_expr_input-default.txt-Debug] [GOOD] >> test.py::test[binding-named_expr_input-default.txt-Plan] [GOOD] |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> test.py::test[binding-named_expr_input-default.txt-Results] >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants >> test.py::test[window-win_fuse_window-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Analyze] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Debug] >> TSchemeShardSplitBySizeTest::MergeIndexTableShards >> test.py::test[join-premap_common_left_cross--Debug] [GOOD] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--Debug] |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ncloud/impl/ut/unittest |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> test.py::test[window-win_over_few_partitions_other--Analyze] >> TSchemeShardSplitByLoad::TableSplitsUpToMaxPartitionsCount >> test.py::test[join-premap_common_left_cross--Plan] [GOOD] >> test.py::test[join-premap_common_left_cross--Results] >> TSchemeShardSplitBySizeTest::Merge1KShards |75.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |75.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2024-11-21T17:45:25.634842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:25.638047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:25.638075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002207/r3tmp/tmpPikuva/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8831, node 1 TClient is connected to server localhost:11170 2024-11-21T17:45:25.992015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:45:26.026233Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:26.028021Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:26.028031Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:26.028034Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:26.028090Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:26.070056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:26.070295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:26.081374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:26.188529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:45:26.210245Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:653:2546], Recipient [1:706:2591]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:45:26.210426Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:653:2546], Recipient [1:706:2591]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:45:26.210476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:706:2591];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:45:26.213129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:706:2591];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:45:26.213193Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T17:45:26.213775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:45:26.213820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:45:26.213866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:45:26.213886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:45:26.213902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:45:26.213923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:45:26.213939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:45:26.213959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:45:26.213977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:45:26.213995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:45:26.214011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:45:26.214028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:706:2591];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:45:26.217866Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:653:2546], Recipient [1:706:2591]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:45:26.218246Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2024-11-21T17:45:26.218298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:45:26.218308Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:45:26.218344Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:45:26.218371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:45:26.218382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:45:26.218387Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:45:26.218395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:45:26.218403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:45:26.218409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:45:26.218413Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:45:26.218429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:45:26.218436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:45:26.218442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:45:26.218445Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:45:26.218453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:45:26.218459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:45:26.218466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:45:26.218469Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:45:26.218481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:45:26.218490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:45:26.218493Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:45:26.218500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:45:26.218506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:45:26.218509Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:45:26.218548Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=20; 2024-11-21T17:45:26.218561Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=9; 2024-11-21T17:45:26.218571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72 ... bytes=0; at tablet 72075186224037888 2024-11-21T17:45:43.122006Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:706:2591];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=72075186224037888;external_task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=72075186224037888; TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:73/0:size=3543;count=16;;1:size=59793;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445744;count=1;;7:size=1445360;count=1;;8:size=1445928;count=1;;9:size=1445448;count=1;;10:size=1445376;count=1;;11:size=1445528;count=1;;12:size=4720176;count=8;;13:size=849680;count=1;;14:size=1445408;count=1;;15:size=1445608;count=1;;16:size=1445400;count=1;;17:size=1445920;count=1;;18:size=808584;count=1;;19:size=1761752;count=2;;20:size=1806632;count=3;;21:size=1761880;count=3;;22:size=978968;count=1;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:73/0:size=3612;count=17;;1:size=59793;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445744;count=1;;7:size=1445360;count=1;;8:size=1445928;count=1;;9:size=1445448;count=1;;10:size=1445376;count=1;;11:size=1445528;count=1;;12:size=4720176;count=8;;13:size=849680;count=1;;14:size=1445408;count=1;;15:size=1445608;count=1;;16:size=1445400;count=1;;17:size=1445920;count=1;;18:size=808584;count=1;;19:size=1761752;count=2;;20:size=1806632;count=3;;21:size=1761880;count=3;;22:size=978968;count=1;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2024-11-21T17:45:43.132680Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2024-11-21T17:45:43.132704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=with_appended.cpp:80;portions=32,;task_id=6df52fda-a83011ef-81dae7d9-e367cb99; 2024-11-21T17:45:43.132786Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1945216;portion_bytes=1945216;portion_raw_bytes=65757812; 2024-11-21T17:45:43.132803Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=DEFAULT;path_id=3;portion=30;before_size=18270304;after_size=16325088;before_rows=517509;after_rows=462321; 2024-11-21T17:45:43.132808Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1945216;portion_bytes=1945216;portion_raw_bytes=65757812; 2024-11-21T17:45:43.132828Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=485688;portion_bytes=485688;portion_raw_bytes=16432836; 2024-11-21T17:45:43.132835Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=DEFAULT;path_id=3;portion=31;before_size=16325088;after_size=15839400;before_rows=462321;after_rows=448530; 2024-11-21T17:45:43.132839Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=485688;portion_bytes=485688;portion_raw_bytes=16432836; 2024-11-21T17:45:43.132845Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=2430936;portion_bytes=2430936;portion_raw_bytes=82190648; 2024-11-21T17:45:43.132851Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=3;portion=32;before_size=15839400;after_size=18270336;before_rows=448530;after_rows=517509; 2024-11-21T17:45:43.132855Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=2430936;portion_bytes=2430936;portion_raw_bytes=82190648; 2024-11-21T17:45:43.132869Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:32;path_id:3;records_count:68979;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:2430936;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2024-11-21T17:45:43.132939Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=tiering.cpp:48;tiering_info=__DEFAULT/0.000000s;$$DELETE/489823.000000s;; 2024-11-21T17:45:43.132957Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=manager.cpp:14;event=unlock;process_id=CS::GENERAL::6df52fda-a83011ef-81dae7d9-e367cb99; 2024-11-21T17:45:43.132972Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;fline=granule.cpp:99;event=OnCompactionFinished;info=(granule:3;path_id:3;size:18270336;portions_count:32;); 2024-11-21T17:45:43.132978Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:45:43.132991Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:45:43.133009Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=1; 2024-11-21T17:45:43.133019Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;tablet_id=72075186224037888;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2024-11-21T17:45:43.133025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:45:43.133036Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:45:43.133045Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:45:43.133051Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:45:43.133067Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:45:43.133119Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:20 Blob count: 1 VERIFY failed (2024-11-21T17:45:43.133136Z): tablet_id=72075186224037888;task_id=6df52fda-a83011ef-81dae7d9-e367cb99;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+459 (0x122C1CAB) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x122B9417) ??+0 (0x128DD584) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+1472 (0x23DCC080) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+1993 (0x1C34D719) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+302 (0x141714DE) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+1340 (0x1416B8DC) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+1314 (0x140D0572) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+786 (0x140AFA12) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+1981 (0x1DCC5EBD) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+5273 (0x1DCC2E19) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&)+49 (0x1DCC1881) NKikimr::Tests::NCS::THelperSchemaless::SendDataViaActorSystem(TBasicString>, std::__y1::shared_ptr, Ydb::StatusIds_StatusCode const&) const+3899 (0x1E038FEB) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+1650 (0x1219AA22) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x121A1407) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x1241788E) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+419 (0x121A0D23) NUnitTest::TTestFactory::Execute()+803 (0x12418003) NUnitTest::RunMain(int, char**)+3005 (0x1242B22D) ??+0 (0x7F8CB32B1D90) __libc_start_main+128 (0x7F8CB32B1E40) _start+41 (0x11601029) >> TSchemeShardSplitBySizeTest::Split10Shards >> test.py::test[window-win_func_rank_by_opt_part--Results] [GOOD] >> test.py::test[window-yql-14277-default.txt-Analyze] >> test.py::test[bigdate-round-default.txt-Results] [GOOD] >> test.py::test[window-win_func_nth_value-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_nth_value-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_nth_value-default.txt-Results] >> test.py::test[binding-named_expr_input-default.txt-Results] [GOOD] >> test.py::test[binding-table_filter_strict_binding-default.txt-Debug] >> test.py::test[blocks-add_uint64_opt2--Debug] [GOOD] >> test.py::test[blocks-add_uint64_opt2--Plan] [GOOD] >> test.py::test[blocks-add_uint64_opt2--Results] >> test.py::test[pg-select_join_right_equi_and_const-default.txt-Debug] [GOOD] >> test.py::test[pg-select_join_right_equi_and_const-default.txt-Plan] [GOOD] >> test.py::test[pg-select_join_right_equi_and_const-default.txt-Results] |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneShard >> TSchemeShardSplitBySizeTest::Merge111Shards >> TBackupCollectionTests::CreateAbsolutePath >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestCreateSubSubDomain |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> test.py::test[select-struct_access_without_table_name--Debug] [GOOD] >> TBackupCollectionTests::HiddenByFeatureFlag >> test.py::test[select-struct_access_without_table_name--Plan] [GOOD] >> test.py::test[select-struct_access_without_table_name--Results] >> KqpLimits::BigParameter >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor >> TSchemeShardSplitBySizeTest::SplitShardsWhithPgKey >> KqpLimits::QueryReplySize |75.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[window-win_multiaggr_library--Results] [GOOD] |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create >> test.py::test[window-win_over_few_partitions_other--Analyze] [GOOD] >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::DisallowedPath >> test.py::test[window-yql-14277-default.txt-Analyze] [GOOD] >> test.py::test[window-yql-14277-default.txt-Debug] >> test.py::test[window-win_over_few_partitions_other--Debug] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Results] >> KqpTypes::Time64Columns+EnableTableDatetime64 >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneShard [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerLegacyDependedRead >> test.py::test[agg_phases-max_null-default.txt-Results] [GOOD] >> test.py::test[agg_phases-min_by-default.txt-Debug] >> KqpTypes::UnsafeTimestampCastV0 >> TBackupCollectionTests::Create [GOOD] >> TBackupCollectionTests::CreateTwice >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::ParallelCreate >> test.py::test[blocks-add_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-block_input_various_types--Debug] >> test.py::test[blocks-block_input_various_types--Debug] [SKIPPED] >> test.py::test[blocks-block_input_various_types--Plan] [SKIPPED] >> test.py::test[blocks-block_input_various_types--Results] [SKIPPED] >> test.py::test[blocks-coalesce_ints--Debug] >> test.py::test[pg-select_join_right_equi_and_const-default.txt-Results] [GOOD] >> test.py::test[pg-select_proj_ref_distinct_on_qstar-default.txt-Debug] >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::Drop >> TBackupCollectionTests::CreateTwice [GOOD] >> TBackupCollectionTests::BackupAbsentCollection >> test.py::test[binding-table_filter_strict_binding-default.txt-Debug] [GOOD] >> test.py::test[binding-table_filter_strict_binding-default.txt-Plan] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:45:46.626843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:46.626864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:46.626867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:46.626870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:46.626879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:46.626881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:46.626886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:46.626949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:46.636166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:46.636188Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:46.639504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:46.640097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:46.640123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:46.641236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:46.641394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:46.641468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.641530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:46.642267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.642476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:46.642485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.642518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:46.642525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:46.642531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:46.642542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.643577Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:45:46.658759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:46.658846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.658901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:46.658962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:46.658967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.677098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.677144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:46.677242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.677255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:46.677260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:46.677266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:46.680451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.680477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:46.680485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:46.687864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.687887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.687895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:46.687903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:46.688572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:46.696601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:46.696677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:46.696892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.696938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:46.696947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:46.697016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:46.697026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:46.697057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:46.697070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:46.701262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:46.701278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:46.701331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.701337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:45:46.701418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.701427Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:46.701441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:46.701446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:46.701453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:46.701459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:46.701463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:46.701468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:46.701488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:46.701494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:46.701498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:45:46.701898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:46.701913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:46.701918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:45:46.701923Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:45:46.701928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:46.701957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... :47.091120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:507:2451], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:45:47.091198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.091204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:47.091209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186233409546 on partitioning changed splitOp# 102 at tablet 72057594046678944 2024-11-21T17:45:47.091291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:45:47.091297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:45:47.091300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:45:47.091304Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T17:45:47.091308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T17:45:47.091318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T17:45:47.092644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269553158 2024-11-21T17:45:47.092975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:45:47.093394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: OperationCookie: 102 TabletId: 72075186233409546 2024-11-21T17:45:47.093407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 102:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:45:47.093427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:45:47.093431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:45:47.093438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:45:47.093445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:45:47.093452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:45:47.093456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:45:47.093490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:45:47.093996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.094008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 102:0 Leader for TabletID 72057594046678944 is [1:458:2412] sender: [1:668:2058] recipient: [1:15:2062] 2024-11-21T17:45:47.094368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } TabletId: 72075186233409546 State: 4 2024-11-21T17:45:47.094383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:45:47.094821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:45:47.094924Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:45:47.095235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:47.095287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:45:47.095894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:45:47.095911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 TestWaitNotification wait txId: 102 2024-11-21T17:45:47.111485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:45:47.111502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2024-11-21T17:45:47.111518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:45:47.111522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:45:47.111625Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:45:47.111661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:45:47.111666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:678:2593] 2024-11-21T17:45:47.111674Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:45:47.111689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:45:47.111693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:678:2593] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 wait until 72075186233409546 is deleted 2024-11-21T17:45:47.111760Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 Deleted tabletId 72075186233409546 2024-11-21T17:45:47.111849Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:45:47.111901Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 62us result status StatusSuccess 2024-11-21T17:45:47.112097Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "Key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnNames: "Value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\001\000\000\000A\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::DropTwice >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64 >> TBackupCollectionTests::BackupDroppedCollection >> test.py::test[select-struct_access_without_table_name--Results] [GOOD] >> test.py::test[select-trivial_between-default.txt-Debug] >> test.py::test[weak_field-weak_field_in_group_by--Debug] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--Plan] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--Results] >> KqpQuery::QueryCacheTtl >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> KqpParams::CheckQueryCacheForPreparedQuery >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 >> test.py::test[window-win_func_nth_value-default.txt-Results] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Debug] >> test.py::test[aggregate-group_by_expr_with_where-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Debug] >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::TableWithSystemColumns [GOOD] >> test.py::test[join-premap_common_left_cross--Results] [GOOD] >> test.py::test[join-pullup_null_column-off-Debug] [SKIPPED] >> test.py::test[join-pullup_null_column-off-Plan] [SKIPPED] >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] [GOOD] >> test.py::test[bitcast_implicit-div_bitcast-default.txt-Debug] >> KqpLimits::BigParameter [GOOD] >> KqpLimits::CancelAfterRoTx >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] >> test.py::test[agg_phases-min_by-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-min_by-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-min_by-default.txt-Results] >> test.py::test[join-pullup_null_column-off-Results] [SKIPPED] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Debug] [SKIPPED] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Plan] [SKIPPED] >> test.py::test[join-pushdown_filter_over_inner_with_assume_strict-off-Results] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted-off-Debug] >> test.py::test[pg-select_proj_ref_distinct_on_qstar-default.txt-Debug] [GOOD] >> test.py::test[pg-select_proj_ref_distinct_on_qstar-default.txt-Plan] [GOOD] >> test.py::test[pg-select_proj_ref_distinct_on_qstar-default.txt-Results] |75.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |75.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant >> test.py::test[join-selfjoin_on_sorted-off-Debug] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted-off-Plan] [SKIPPED] >> test.py::test[join-selfjoin_on_sorted-off-Results] [SKIPPED] >> test.py::test[join-star_join-off-Debug] [SKIPPED] >> test.py::test[join-star_join-off-Plan] [SKIPPED] >> test.py::test[join-star_join-off-Results] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted-off-Debug] [SKIPPED] |75.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[bigdate-round-default.txt-Results] [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForSuccessExplicitPrepare >> test.py::test[join-star_join_inners_vk_sorted-off-Plan] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted-off-Results] [SKIPPED] >> test.py::test[join-strict_keys--Debug] [SKIPPED] >> test.py::test[join-strict_keys--Plan] [SKIPPED] >> test.py::test[join-strict_keys--Results] >> test.py::test[window-win_over_few_partitions_other--Debug] [GOOD] >> test.py::test[window-win_over_few_partitions_other--ForceBlocks] >> test.py::test[window-yql-14277-default.txt-Debug] [GOOD] >> test.py::test[window-yql-14277-default.txt-ForceBlocks] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::ReadsetCountLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::BackupAbsentDirs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:45:46.846635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:46.846666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:46.846670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:46.846674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:46.846680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:46.846683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:46.846691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:46.846763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:46.857617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:46.857638Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:46.860425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:46.861311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:46.861360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:46.862780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:46.862970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:46.863073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.863193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:46.864065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.864333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:46.864346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.864389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:46.864397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:46.864402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:46.864419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.865701Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:45:46.883626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:46.883752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.883817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:46.883864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:46.883871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.884643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.884668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:46.884732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.884742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:46.884746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:46.884751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:46.885131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.885143Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:46.885147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:46.885533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.885544Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.885549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:46.885556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:46.886091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:46.886496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:46.886546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:46.886715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.886739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:46.886754Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:46.886823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:46.886829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:46.886855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:46.886868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:46.887242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:46.887251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:46.887309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.887315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:45:46.887406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.887414Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:46.887425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:46.887432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:46.887437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:46.887442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:46.887446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:46.887449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:46.887462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:46.887467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:46.887471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:45:46.887747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:46.887763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:46.887768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:45:46.887773Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:45:46.887777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:46.887790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... D TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:45:48.634761Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:45:48.634765Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:45:48.634769Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T17:45:48.634774Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:45:48.634777Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:45:48.634780Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:45:48.634790Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:45:48.634794Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2024-11-21T17:45:48.634798Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T17:45:48.634801Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 4], 1 2024-11-21T17:45:48.634935Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:201:2204], Recipient [6:122:2148]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 5 } 2024-11-21T17:45:48.634942Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T17:45:48.634952Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:45:48.634959Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:45:48.634963Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:45:48.634966Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:45:48.634971Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:45:48.634982Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:45:48.635210Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:201:2204], Recipient [6:122:2148]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Version: 1 } 2024-11-21T17:45:48.635221Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T17:45:48.635230Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:45:48.635239Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:45:48.635242Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:45:48.635246Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 1 2024-11-21T17:45:48.635249Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:45:48.635259Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T17:45:48.635262Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:45:48.635538Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:45:48.635651Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:45:48.635657Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:45:48.635854Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:45:48.635862Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T17:45:48.635905Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:45:48.635910Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:45:48.635955Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:352:2344], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:48.635959Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:48.635962Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:45:48.635979Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [6:295:2287], Recipient [6:122:2148]: NKikimrScheme.TEvNotifyTxCompletion TxId: 103 2024-11-21T17:45:48.635982Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T17:45:48.635990Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:45:48.636005Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:45:48.636008Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:350:2342] 2024-11-21T17:45:48.636065Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:352:2344], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:48.636069Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:48.636078Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 TestModificationResults wait txId: 104 2024-11-21T17:45:48.636135Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [6:353:2345], Recipient [6:122:2148]: {TEvModifySchemeTransaction txid# 104 TabletId# 72057594046678944} 2024-11-21T17:45:48.636139Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:45:48.636657Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupBackupCollection BackupBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:48.636727Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 104:0, explain: Check failed: path: '/MyRoot/Table1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp:111, at schemeshard: 72057594046678944 2024-11-21T17:45:48.636733Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/Table1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp:111, at schemeshard: 72057594046678944 2024-11-21T17:45:48.636777Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:45:48.637139Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp:111" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:48.637166Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/Table1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp:111, operation: BACKUP, path: /MyRoot/.backups/collections/MyCollection1 2024-11-21T17:45:48.637188Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T17:45:48.637233Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T17:45:48.637239Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T17:45:48.637283Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:359:2351], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:48.637288Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:48.637292Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:45:48.637307Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [6:295:2287], Recipient [6:122:2148]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2024-11-21T17:45:48.637310Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T17:45:48.637321Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:45:48.637336Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:45:48.637339Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [6:357:2349] 2024-11-21T17:45:48.637354Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:359:2351], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:48.637358Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:48.637361Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::TableWithSystemColumns [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:45:46.972043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:46.972079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:46.972085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:46.972090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:46.972096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:46.972100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:46.972111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:46.972202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:46.984587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:46.984615Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:46.987825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:46.988712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:46.988769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:46.990211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:46.990386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:46.990492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.990587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:46.991433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.991728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:46.991740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.991783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:46.991791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:46.991798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:46.991813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.993161Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:45:47.011984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:47.012094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.012173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:47.012244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:47.012254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.013134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:47.013176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:47.013241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.013254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:47.013258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:47.013264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:47.013664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.013675Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:47.013680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:47.013984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.013994Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.014000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:47.014008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:47.014577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:47.014912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:47.014969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:47.015166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:47.015191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:47.015209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:47.015275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:47.015282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:47.015317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:47.015330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:47.015681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:47.015690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:47.015742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:47.015748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:45:47.015842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.015850Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:47.015863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:47.015867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:47.015874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:47.015879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:47.015885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:47.015889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:47.015900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:47.015906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:47.015911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:45:47.016201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:47.016214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:47.016219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:45:47.016224Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:45:47.016250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:47.016265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 274 } } 2024-11-21T17:45:48.818433Z node 6 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 274 } } 2024-11-21T17:45:48.818436Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:45:48.818478Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:201:2204], Recipient [6:122:2148]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 7] Version: 5 } 2024-11-21T17:45:48.818482Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T17:45:48.818489Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 5 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T17:45:48.818496Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 5 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T17:45:48.818500Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2024-11-21T17:45:48.818504Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 5 2024-11-21T17:45:48.818507Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T17:45:48.818515Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:45:48.818566Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:635:2588], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:48.818571Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:45:48.818574Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:45:48.818592Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:201:2204], Recipient [6:122:2148]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 8] Version: 3 } 2024-11-21T17:45:48.818595Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T17:45:48.818601Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T17:45:48.818610Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T17:45:48.818613Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2024-11-21T17:45:48.818616Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 3 2024-11-21T17:45:48.818619Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 4 2024-11-21T17:45:48.818625Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/2, is published: true 2024-11-21T17:45:48.818628Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:45:48.818718Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [6:575:2535], Recipient [6:122:2148]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 575 RawX2: 25769806311 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2024-11-21T17:45:48.818724Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2024-11-21T17:45:48.818732Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 575 RawX2: 25769806311 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2024-11-21T17:45:48.818735Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2024-11-21T17:45:48.818747Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: Source { RawX1: 575 RawX2: 25769806311 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2024-11-21T17:45:48.818751Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:45:48.818758Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 575 RawX2: 25769806311 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2024-11-21T17:45:48.818767Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:1, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:48.818770Z node 6 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:1, at schemeshard: 72057594046678944 2024-11-21T17:45:48.818774Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:1, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:45:48.818779Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:1 129 -> 240 2024-11-21T17:45:48.818797Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:45:48.819171Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:45:48.819193Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:45:48.819604Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T17:45:48.819611Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:45:48.819629Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2024-11-21T17:45:48.819665Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:45:48.819678Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T17:45:48.819681Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:45:48.819690Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T17:45:48.819693Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:45:48.819705Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2024-11-21T17:45:48.819708Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:45:48.819712Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 106:1 2024-11-21T17:45:48.819726Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:575:2535] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 106 at schemeshard: 72057594046678944 2024-11-21T17:45:48.819775Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:122:2148], Recipient [6:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T17:45:48.819779Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T17:45:48.819784Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:1, at schemeshard: 72057594046678944 2024-11-21T17:45:48.819789Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:1 ProgressState 2024-11-21T17:45:48.819797Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:45:48.819801Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2024-11-21T17:45:48.819805Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2024-11-21T17:45:48.819810Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 2/2, is published: true 2024-11-21T17:45:48.819819Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:295:2287] message: TxId: 106 2024-11-21T17:45:48.819824Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2024-11-21T17:45:48.819831Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2024-11-21T17:45:48.819835Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2024-11-21T17:45:48.819844Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T17:45:48.819848Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:1 2024-11-21T17:45:48.819851Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:1 2024-11-21T17:45:48.819865Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2024-11-21T17:45:48.820165Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:45:48.820180Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:295:2287] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 106 at schemeshard: 72057594046678944 2024-11-21T17:45:48.820210Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T17:45:48.820215Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [6:606:2559] 2024-11-21T17:45:48.820275Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:608:2561], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:48.820282Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:48.820285Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 12754, MsgBus: 29178 2024-11-21T17:45:47.235533Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790226908630072:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:47.235749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00168e/r3tmp/tmp8ohB4u/pdisk_1.dat 2024-11-21T17:45:47.299955Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12754, node 1 2024-11-21T17:45:47.311391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:47.311405Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:47.311407Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:47.311448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29178 2024-11-21T17:45:47.376758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:47.376789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:47.377823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29178 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:47.409850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.412634Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:47.644382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790226908630685:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.644442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.651510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.725023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790226908630785:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.725060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.725285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790226908630790:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.726450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:45:47.728925Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T17:45:47.728988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790226908630792:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } Trying to start YDB, gRPC: 10625, MsgBus: 27462 2024-11-21T17:45:48.217941Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790233469041831:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:48.220062Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00168e/r3tmp/tmp53bZA7/pdisk_1.dat 2024-11-21T17:45:48.234196Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10625, node 2 2024-11-21T17:45:48.252401Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:48.252415Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:48.252416Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:48.252454Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27462 TClient is connected to server localhost:27462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:48.323862Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:48.323895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:48.324297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.324772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:45:48.326480Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:48.564662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790233469042278:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.564728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpStats::JoinNoStatsYql >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery >> test.py::test[blocks-coalesce_ints--Debug] [GOOD] >> test.py::test[weak_field-weak_field_in_group_by--Results] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Debug] >> test.py::test[blocks-coalesce_ints--Plan] [GOOD] >> test.py::test[blocks-coalesce_ints--Results] >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::DefaultParameterValue >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants |75.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[pg-select_proj_ref_distinct_on_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_qstar-default.txt-Debug] >> test.py::test[pg-tpch-q21-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-ForceBlocks] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Debug] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Plan] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Results] >> test.py::test[bitcast_implicit-div_bitcast-default.txt-Debug] [GOOD] >> test.py::test[bitcast_implicit-div_bitcast-default.txt-Plan] [GOOD] >> test.py::test[bitcast_implicit-div_bitcast-default.txt-Results] >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute |75.1%| [TA] $(B)/ydb/library/yql/tests/sql/dq_file/part17/test-results/pytest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpTypes::UnsafeTimestampCastV1 [GOOD] Test command err: Trying to start YDB, gRPC: 63943, MsgBus: 4649 2024-11-21T17:45:47.509515Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790228684027298:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:47.509597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00168c/r3tmp/tmpfyxJgw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63943, node 1 2024-11-21T17:45:47.564101Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:47.565997Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:47.566005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:47.566006Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:47.566029Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4649 TClient is connected to server localhost:4649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:45:47.612111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:47.612141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:47.612951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:47.634756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.638556Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:47.649618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.673480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.722655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.736955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.927866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790228684028691:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.927916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.932249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.945992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.955661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.971049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.985141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.000812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.016920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790232978996491:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.016975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.017148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790232978996498:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.017921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:48.026322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790232978996500:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:48.310766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Warning: Optimization, code: 1070
:3:29: Warning: Unsafe conversion integral value to Timestamp, consider using date types, code: 1102 Trying to start YDB, gRPC: 1377, MsgBus: 2354 2024-11-21T17:45:48.605113Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790234119702637:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00168c/r3tmp/tmpF3n23G/pdisk_1.dat 2024-11-21T17:45:48.611978Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:48.626732Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1377, node 2 2024-11-21T17:45:48.637224Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:48.637236Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:48.637237Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:48.637271Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2354 TClient is connected to server localhost:2354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:45:48.712398Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:48.712426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:48.712670Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:48.712709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.715648Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.725902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.789594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.839619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.881649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.035587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790238414671339:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.035737Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.039141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.052732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.063930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.118595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.133295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.145468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.165760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790238414671848:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.165788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.165943Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790238414671853:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.166767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:49.173241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790238414671855:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:49.397598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.412339Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790238414672228:2465], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. 2024-11-21T17:45:49.412760Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWRiOTU2ODQtOWZhMWU0NTMtMjc3Y2YxNC0xNjgzY2My, ActorId: [2:7439790238414672151:2454], ActorState: ExecuteState, TraceId: 01jd7x92k148nfexphmycec2nd, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. >> test.py::test[join-strict_keys--Results] [GOOD] >> test.py::test[select-trivial_between-default.txt-Debug] [GOOD] >> test.py::test[select-trivial_between-default.txt-Plan] [GOOD] >> test.py::test[select-trivial_between-default.txt-Results] >> test.py::test[join-yql-14847-off-Debug] [SKIPPED] >> test.py::test[join-yql-14847-off-Plan] [SKIPPED] >> test.py::test[join-yql-14847-off-Results] [SKIPPED] >> test.py::test[join-yql-8980--Debug] >> KqpExplain::SqlIn >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::ReplySizeExceeded >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::Decimal+QueryService |75.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |75.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::WaitCAsStateOnAbort >> KqpParams::DefaultParameterValue [GOOD] >> KqpParams::Decimal-QueryService >> KqpQuery::RewriteIfPresentToMap >> test.py::test[bitcast_implicit-div_bitcast-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint64_opt--Debug] >> KqpErrors::ProposeError >> test.py::test[window-yql-14277-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-14277-default.txt-Plan] [GOOD] >> test.py::test[window-yql-14277-default.txt-Results] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> test.py::test[blocks-coalesce_ints--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal--Debug] >> test.py::test[weak_field-weak_field_long_name--Debug] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Plan] >> test.py::test[pg-select_proj_ref_group_by_qstar-default.txt-Debug] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_qstar-default.txt-Plan] [GOOD] >> test.py::test[pg-select_proj_ref_group_by_qstar-default.txt-Results] >> test.py::test[weak_field-weak_field_long_name--Plan] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Results] >> test.py::test[window-win_over_few_partitions_other--ForceBlocks] [GOOD] >> test.py::test[window-win_over_few_partitions_other--Plan] [GOOD] >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> KqpStats::RequestUnitForExecute [GOOD] >> test.py::test[window-win_over_few_partitions_other--Results] >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> test.py::test[select-trivial_between-default.txt-Results] [GOOD] >> test.py::test[select-trivial_where-many-Debug] >> KqpParams::Decimal+QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] >> KqpExplain::SqlIn [GOOD] Test command err: Trying to start YDB, gRPC: 21974, MsgBus: 23652 2024-11-21T17:45:48.095277Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790230624676619:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:48.095332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00168a/r3tmp/tmpg6y7Z7/pdisk_1.dat 2024-11-21T17:45:48.196607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:48.196644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:48.199585Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:48.202313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21974, node 1 2024-11-21T17:45:48.216391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:48.216405Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:48.216406Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:48.216439Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23652 TClient is connected to server localhost:23652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:48.339265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.342703Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.361125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.379294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.405161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.418170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.556421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790230624678009:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.556544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.562951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.571371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.582671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.597067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.608307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.625592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.641063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790230624678526:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.641083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.641241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790230624678531:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.641975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:48.650324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790230624678533:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:48.878414Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790230624678826:2459], status: GENERIC_ERROR, issues:
:2:8: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... 2024-11-21T17:45:48.878653Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTQzMmYxOTQtMmZhZDM5ZDktMTU4MGE5N2QtNjg2MjZmMDI=, ActorId: [1:7439790230624678798:2454], ActorState: ExecuteState, TraceId: 01jd7x922cc4csmy429jaxkgzx, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:8: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... Trying to start YDB, gRPC: 5533, MsgBus: 1385 2024-11-21T17:45:49.253082Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00168a/r3tmp/tmpGaoMry/pdisk_1.dat 2024-11-21T17:45:49.260712Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5533, node 2 2024-11-21T17:45:49.280738Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:49.280750Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:49.280752Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:49.280790Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1385 TClient is connected to server localhost:1385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:45:49.348506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:49.348543Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:49.348938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.351192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:49.361243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.420812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.478913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.489581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.568059Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790235405460240:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.568092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.571109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.583412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.593739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.600562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.615105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.632198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.661430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790235405460745:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.661470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.661559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790235405460750:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.662342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:49.664176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790235405460752:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 11868, MsgBus: 10995 2024-11-21T17:45:50.162116Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790240420876712:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00168a/r3tmp/tmp72DyM9/pdisk_1.dat 2024-11-21T17:45:50.175830Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:50.181411Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11868, node 3 2024-11-21T17:45:50.191099Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:50.191111Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:50.191112Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:50.191142Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10995 TClient is connected to server localhost:10995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:50.262199Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:50.262231Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:50.262863Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:50.263543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.266314Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.272416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.283999Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.307477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.325241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.562703Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790240420878105:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.562724Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.568402Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.577897Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.601733Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.620533Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.631402Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.646759Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.664696Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790240420878611:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.664723Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.664890Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790240420878616:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.665772Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:50.674579Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790240420878618:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Consumed units: 43 Consumed units: 6 >> KqpExplain::SsaProgramInJsonPlan >> KqpErrors::ProposeResultLost_RwTx >> test.py::test[window-win_func_part_by_expr_new-default.txt-Debug] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Plan] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Plan] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] >> test.py::test[pg-select_proj_ref_group_by_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-Debug] >> test.py::test[weak_field-weak_field_long_name--Results] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Debug] >> KqpParams::Decimal-QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 27318, MsgBus: 23703 2024-11-21T17:45:48.525576Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790233878847480:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:48.525653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001685/r3tmp/tmpl8YtMH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27318, node 1 2024-11-21T17:45:48.589937Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:48.589951Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:48.589953Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:48.589985Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:48.590272Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23703 2024-11-21T17:45:48.624121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:48.624150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:48.627430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:48.694088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.697205Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:48.713214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:48.781653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.811671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.853192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.948432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790233878848868:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.948474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.960883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.979376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.997225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.007569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.029238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.042525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.059090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790238173816677:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.059107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.059211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790238173816682:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.059957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:49.068430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790238173816684:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 10586, MsgBus: 25464 2024-11-21T17:45:49.604187Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790238649926790:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001685/r3tmp/tmpsQgdPD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10586, node 2 2024-11-21T17:45:49.630231Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:49.634587Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:49.634852Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:49.634860Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:49.634861Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:49.634898Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25464 TClient is connected to server localhost:25464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:49.705656Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:49.705687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:49.706703Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:49.707398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.715536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.729281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.748956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.763620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.952907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790238649928184:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.952986Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.955313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo u ... KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790242944895984:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.088522Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.088548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790242944895989:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.089113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:50.097274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790242944895991:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 23786, MsgBus: 65516 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001685/r3tmp/tmp5Nqg5Y/pdisk_1.dat 2024-11-21T17:45:50.511454Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23786, node 3 2024-11-21T17:45:50.532810Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:50.532827Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:50.532829Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:50.532872Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65516 2024-11-21T17:45:50.588560Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:50.588584Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:50.596593Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65516 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:50.645418Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.653781Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:50.669070Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.694770Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.742121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.775678Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.904357Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790239225243879:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.904498Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.907163Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.924039Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.934829Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.951994Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.965575Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.978197Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.068526Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790243520211694:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.068563Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.068695Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790243520211699:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.069663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:51.073336Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:51.073410Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790243520211701:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:51.362550Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.488503Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439790243520212179:2490], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2024-11-21T17:45:51.488862Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDE0YmQwMTEtNjdiYmNiYTEtM2I5ZjI4NDYtZjQyY2NlZDU=, ActorId: [3:7439790243520212177:2489], ActorState: ExecuteState, TraceId: 01jd7x94kree63wgmmnxx9ajg2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:45:51.517876Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTIwZTFiNzctMzRmNmZhNWItNGYxN2MwNzEtYWFhYWU2YzQ=, ActorId: [3:7439790243520212183:2492], ActorState: ExecuteState, TraceId: 01jd7x94m2ackt5q0fmhr78acz, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1189: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2024-11-21T17:45:51.522512Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439790243520212212:2498], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T17:45:51.522891Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGZhMDVkYzAtYTBmY2IwNWYtZGJmNDk0MDYtNWM5ZWRkOTg=, ActorId: [3:7439790243520212210:2497], ActorState: ExecuteState, TraceId: 01jd7x94mz3tf7jeepqwn4e83r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:45:51.527021Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439790243520212224:2504], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T17:45:51.527358Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWM0Y2IxOGItYTkxOWI2YjAtZDhiYzJlNTktMjQ0Y2EzMDg=, ActorId: [3:7439790243520212222:2503], ActorState: ExecuteState, TraceId: 01jd7x94n4azajw179ts80e02h, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::RandomUuid >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Results] >> KqpLimits::ReplySizeExceeded [GOOD] >> test.py::test[window-yql-14277-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19352, MsgBus: 16955 2024-11-21T17:45:48.456519Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790231916612214:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001684/r3tmp/tmp3xRd6D/pdisk_1.dat 2024-11-21T17:45:48.568285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:48.605364Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19352, node 1 2024-11-21T17:45:48.636449Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:48.636464Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:48.636466Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:48.636508Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16955 2024-11-21T17:45:48.660712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:48.660747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:48.661604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:48.724811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.727657Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:48.749216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.787478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.825769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.842583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.030472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790231916613607:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.034511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.043728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.099648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.110304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.117871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.132294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.188763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.207660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790236211581447:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.207696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.207718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790236211581452:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.208442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:49.214424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790236211581454:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 3955, MsgBus: 13015 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001684/r3tmp/tmp1q742w/pdisk_1.dat 2024-11-21T17:45:49.818703Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:49.819787Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3955, node 2 2024-11-21T17:45:49.828799Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:49.828813Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:49.828815Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:49.828859Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13015 TClient is connected to server localhost:13015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:49.880691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.885663Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.889813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.908090Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:49.908121Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:49.909839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:49.948783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.969738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:49.987404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.163690Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790241149054001:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.163717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.173006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... ICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790241149054507:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.266334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:50.270056Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:50.270157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790241149054509:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 6775, MsgBus: 9916 2024-11-21T17:45:50.844373Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790239578954545:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:50.846483Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001684/r3tmp/tmpcIfkAW/pdisk_1.dat 2024-11-21T17:45:50.882596Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6775, node 3 2024-11-21T17:45:50.908927Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:50.908940Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:50.908942Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:50.908988Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9916 2024-11-21T17:45:50.940631Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:50.940682Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:50.944588Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:51.012150Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:51.013199Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:51.081389Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:51.109873Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:51.150694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:51.171169Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:51.248756Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790243873923221:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.248840Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.251407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.268613Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.284379Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.300762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.310398Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.326968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.359934Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790243873923725:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.359977Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.360359Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790243873923730:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.361172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:51.364190Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:51.364267Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790243873923732:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:51.894327Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:45:52.149854Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439790248168891571:2486], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2024-11-21T17:45:52.150360Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTIwOGU2Mi0xMzI4Yzc1NC1kMWIwNGRjYS02MjA2MWVjMw==, ActorId: [3:7439790243873924059:2454], ActorState: ExecuteState, TraceId: 01jd7x958jewm695c6wfe3fpk0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:45:52.167726Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTIwOGU2Mi0xMzI4Yzc1NC1kMWIwNGRjYS02MjA2MWVjMw==, ActorId: [3:7439790243873924059:2454], ActorState: ExecuteState, TraceId: 01jd7x958q291sd28kn4q1vsr6, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1189: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2024-11-21T17:45:52.172546Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439790248168891598:2492], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T17:45:52.172962Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTIwOGU2Mi0xMzI4Yzc1NC1kMWIwNGRjYS02MjA2MWVjMw==, ActorId: [3:7439790243873924059:2454], ActorState: ExecuteState, TraceId: 01jd7x9599dx3ne1y21886m4fx, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:45:52.180961Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439790248168891607:2496], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T17:45:52.181471Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTIwOGU2Mi0xMzI4Yzc1NC1kMWIwNGRjYS02MjA2MWVjMw==, ActorId: [3:7439790243873924059:2454], ActorState: ExecuteState, TraceId: 01jd7x959ef94mjvevdc582qnk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> test.py::test[blocks-add_uint64_opt--Debug] [GOOD] >> test.py::test[blocks-add_uint64_opt--Plan] [GOOD] >> test.py::test[blocks-add_uint64_opt--Results] >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCachePermissionsLoss >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> test.py::test[select-trivial_where-many-Debug] [GOOD] >> test.py::test[select-trivial_where-many-Plan] [GOOD] >> test.py::test[select-trivial_where-many-Results] >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpLimits::AffectedShardsLimit >> CompressExecutor::TestReorderedExecutor [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 19838, MsgBus: 30837 2024-11-21T17:45:47.064626Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790226505001674:2124];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001690/r3tmp/tmpu1A1Xi/pdisk_1.dat 2024-11-21T17:45:47.098975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:47.132473Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19838, node 1 2024-11-21T17:45:47.144022Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:47.144035Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:47.144038Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:47.144075Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30837 2024-11-21T17:45:47.196845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:47.196875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:47.197869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30837 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:47.254850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.260681Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:47.276905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.309266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.347616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.385330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.551355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790226505003132:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.553701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.557950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.565825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.580689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.601074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.619184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.631215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.652913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790226505003638:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.652930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.652994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790226505003643:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.653807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:47.656617Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:47.656781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790226505003645:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:47.877060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.060493Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzBmNjE1MDYtOTBiZjE5MGUtMTkwYWM5NTctZTI4YzNhY2E=, ActorId: [1:7439790230799972158:2561], ActorState: ExecuteState, TraceId: 01jd7x921g2zppa1ctdft2vnea, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (80001703 > 50331648), code: 2013 Trying to start YDB, gRPC: 27502, MsgBus: 5066 2024-11-21T17:45:49.386932Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790236973299935:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:49.386952Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001690/r3tmp/tmpuJBCOL/pdisk_1.dat 2024-11-21T17:45:49.400495Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27502, node 2 2024-11-21T17:45:49.422492Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:49.422505Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:49.422506Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:49.422542Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5066 TClient is connected to server localhost:5066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:49.488470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:49.488506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:49.489298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.489708Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:45:49.501036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.511006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.529251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.541430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.733005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790236973301489:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.733047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.738877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.758397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.769654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.785335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.797011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.823127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.837373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790236973301986:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.837404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.837528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790236973301991:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.838347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:49.845403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790236973301993:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:50.054769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 16940, MsgBus: 31325 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001690/r3tmp/tmpKPpcGA/pdisk_1.dat 2024-11-21T17:45:50.400873Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16940, node 3 2024-11-21T17:45:50.411373Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:50.411388Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:50.411391Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:50.411432Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31325 TClient is connected to server localhost:31325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:50.485451Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:50.485479Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:50.485822Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.486466Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:50.487874Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:50.491813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:50.553479Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.583391Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.596720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.792386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790240728818533:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.792504Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.798054Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.855866Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.870054Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.889842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.909534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.933815Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.953832Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790240728819052:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.953863Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.953996Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790240728819057:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.954797Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:50.966212Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790240728819059:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:51.260678Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:45:52.931699Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjA1MDczYWMtNGE0MTZlMGUtNGIzMDkxNzMtYjE1NmM2OGQ=, ActorId: [3:7439790245023786655:2454], ActorState: ExecuteState, TraceId: 01jd7x95y5chnk74cqp22f89zj, Create QueryResponse for error on request, msg: >> KqpQuery::RandomUuid [GOOD] >> KqpQuery::RowsLimit >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> KqpErrors::ResolveTableError >> test.py::test[agg_phases-min_by-default.txt-Results] [GOOD] >> test.py::test[agg_phases-sum_null-default.txt-Debug] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] [GOOD] >> test.py::test[window-yql-15636-default.txt-Debug] >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> KqpLimits::QSReplySizeEnsureMemoryLimits >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] >> test.py::test[blocks-add_uint64_opt--Results] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q21-default.txt-Results] >> test.py::test[select-trivial_where-many-Results] [GOOD] >> test.py::test[select-trivial_where-one-Debug] >> test.py::test[pg-select_qstarref2-default.txt-Debug] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-Plan] [GOOD] >> test.py::test[pg-select_qstarref2-default.txt-Results] >> KqpStats::SysViewClientLost >> KqpQuery::RowsLimit [GOOD] >> KqpLimits::AffectedShardsLimit [GOOD] |75.2%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/dq_file/part17/test-results/pytest/{meta.json ... results_accumulator.log} |75.2%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> KqpQuery::QueryCancelWrite |75.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |75.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |75.2%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |75.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> test.py::test[blocks-combine_all_decimal--Debug] [GOOD] >> test.py::test[aggregate-group_by_gs_and_having-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_only_distinct--Debug] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only_distinct--Plan] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only_distinct--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Debug] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Plan] [SKIPPED] >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Results] |75.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::RowsLimit [GOOD] Test command err: Trying to start YDB, gRPC: 14340, MsgBus: 8072 2024-11-21T17:45:50.931417Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790241489026995:2265];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:50.931589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00167b/r3tmp/tmp8rtJBv/pdisk_1.dat 2024-11-21T17:45:50.981706Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14340, node 1 2024-11-21T17:45:51.012429Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:51.012439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:51.012441Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:51.012476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8072 2024-11-21T17:45:51.033160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:51.033200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:51.034139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:51.207460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:51.216476Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:51.241054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:51.312970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:51.339477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:51.361618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:51.611328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790245783995614:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.615741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.627648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.647849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.665306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.681491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.703811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.730700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.796492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790245783996143:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.796544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.796666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790245783996148:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.801342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:51.896379Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:51.896502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790245783996150:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 8611, MsgBus: 2448 2024-11-21T17:45:52.833396Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790247625759109:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:52.834683Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00167b/r3tmp/tmp6zoIRA/pdisk_1.dat 2024-11-21T17:45:52.854608Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8611, node 2 2024-11-21T17:45:52.862045Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:52.862060Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:52.862063Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:52.862107Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2448 TClient is connected to server localhost:2448 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:52.938402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:52.938432Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:52.938734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:52.940163Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:52.940500Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:52.943295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:52.954586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:52.969221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:52.978979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.180154Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790251920727827:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:53.180187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:53.186546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:53.201426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:53.215678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:53.227034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:53.246933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:53.259880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:53.282208Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790251920728319:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:53.282235Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:53.282362Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790251920728324:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:53.283176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:53.290350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790251920728326:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 25909, MsgBus: 23626 2024-11-21T17:45:53.776210Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790254189224758:2137];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:53.776266Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00167b/r3tmp/tmp1zTBHN/pdisk_1.dat 2024-11-21T17:45:53.786853Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25909, node 3 2024-11-21T17:45:53.793952Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:53.793965Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:53.793967Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:53.794003Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23626 TClient is connected to server localhost:23626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:53.876514Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:53.876546Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:53.884028Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:53.884343Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.889185Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:53.922344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.933374Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.963089Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.974423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:54.105360Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790258484193492:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.105404Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.108449Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.115014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.122532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.136529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.192113Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.199881Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.221458Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790258484194005:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.221496Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.221520Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790258484194010:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.222263Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:54.226701Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790258484194012:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> test.py::test[aggregate-group_by_hop_static_list_key-default.txt-Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Debug] >> test.py::test[blocks-combine_all_decimal--Plan] [GOOD] >> test.py::test[blocks-combine_all_decimal--Results] >> KqpExplain::LimitOffset |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |75.2%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::AffectedShardsLimit [GOOD] Test command err: Trying to start YDB, gRPC: 25814, MsgBus: 29483 2024-11-21T17:45:50.419349Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790239245287823:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:50.419411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001681/r3tmp/tmppiA7Tk/pdisk_1.dat 2024-11-21T17:45:50.467980Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25814, node 1 2024-11-21T17:45:50.480865Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:50.480878Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:50.480880Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:50.480928Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29483 2024-11-21T17:45:50.520456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:50.520485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:50.524578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:45:50.572557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.575666Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.585240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.684532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.710747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.723078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:50.904477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790239245289220:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.909289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.921739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.933747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.952157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.970095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.988510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.007154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.035151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790243540257033:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.035177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.035320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790243540257038:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:51.036203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:51.038849Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:51.038945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790243540257040:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } waiting... 2024-11-21T17:45:51.372835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.510888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:45:51.533658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [1, 4)","Key [42, 42]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":4}],"Node Type":"TableRangeScan"}],"Node Type":"Merge","SortColumns":["Key (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key [1, 4)","Key [42, 42]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [1, 4)","Key [42, 42]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":4}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 63831, MsgBus: 27653 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001681/r3tmp/tmpVR0fma/pdisk_1.dat 2024-11-21T17:45:52.160465Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:52.166422Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63831, node 2 2024-11-21T17:45:52.198286Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:52.198297Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:52.198299Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:52.198337Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27653 2024-11-21T17:45:52.256607Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:52.256636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:52.261549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricate ... 2024-11-21T17:45:53.356475Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:45:53.356493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:45:53.356498Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:45:53.356509Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:45:53.356512Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["OlapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"Value \u003E 0","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/OlapTable","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Value \u003E 0","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 7904, MsgBus: 4967 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001681/r3tmp/tmphzHzrC/pdisk_1.dat 2024-11-21T17:45:53.753328Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:53.753541Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7904, node 3 2024-11-21T17:45:53.762584Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:53.762595Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:53.762597Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:53.762635Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4967 TClient is connected to server localhost:4967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:53.844668Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:53.844695Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:53.845026Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.846754Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:53.849037Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:53.882103Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:53.896008Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:45:53.913916Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:53.927571Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.033165Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790258883566054:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.033217Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.035583Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.043584Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.054405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.067213Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.080732Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.095372Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.110136Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790258883566557:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.110156Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.110161Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790258883566562:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.110928Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:54.114586Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790258883566564:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:54.305212Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:54.343889Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:54.468044Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7439790258883568535:2579] TxId: 281474976715674. Ctx: { TraceId: 01jd7x97fxcjk1qk7zvnztx29k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWMyMDNhZmUtODgzZGY1NDAtYWEzMWI0NGItMTJiNDczMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Too many affected shards: datashardTasks=21, limit: 20 2024-11-21T17:45:54.469511Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWMyMDNhZmUtODgzZGY1NDAtYWEzMWI0NGItMTJiNDczMTY=, ActorId: [3:7439790258883568359:2579], ActorState: ExecuteState, TraceId: 01jd7x97fxcjk1qk7zvnztx29k, Create QueryResponse for error on request, msg:
: Error: Affected too many shards: 0, code: 2029 >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> test.py::test[join-yql-8980--Debug] [GOOD] >> test.py::test[join-yql-8980--Plan] [GOOD] >> test.py::test[join-yql-8980--Results] >> KqpLimits::QSReplySizeEnsureMemoryLimits [GOOD] >> KqpLimits::QueryExecTimeoutCancel >> test.py::test[agg_phases-sum_null-default.txt-Debug] [GOOD] >> test.py::test[agg_phases-sum_null-default.txt-Plan] [GOOD] >> test.py::test[agg_phases-sum_null-default.txt-Results] >> KqpErrors::ProposeError [GOOD] >> KqpQuery::QueryCancelWrite [GOOD] >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] >> TSchemeShardSplitBySizeTest::MergeIndexTableShards [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map-default.txt-Results] [GOOD] >> test.py::test[aggregate-compare_by--Debug] >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls >> test.py::test[pg-select_qstarref2-default.txt-Results] [GOOD] >> test.py::test[pg-select_starref2-default.txt-Debug] >> KqpErrors::ProposeResultLost_RwTx [GOOD] >> TSchemeShardSplitByLoad::TableSplitsUpToMaxPartitionsCount [GOOD] >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::MergeConnection ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::MergeIndexTableShards [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:45:45.726740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:45.726767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:45.726773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:45.726778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:45.726794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:45.726798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:45.726808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:45.726905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:45.738000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:45.738026Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:45.740946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:45.741844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:45.741878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:45.743310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:45.743519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:45.743628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:45.743711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:45.744737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:45.744999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:45.745011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:45.745048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:45.745055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:45.745061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:45.745076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.746394Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:45:45.759165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:45.759238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.759290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:45.759346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:45.759354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.760007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:45.760037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:45.760085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.760093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:45.760096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:45.760099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:45.760412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.760419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:45.760422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:45.760685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.760691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.760695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:45.760699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:45.761095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:45.761386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:45.761431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:45.761602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:45.761627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:45.761637Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:45.761695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:45.761703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:45.761728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:45.761741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:45.762255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:45.762264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:45.762306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:45.762311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:45:45.762397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.762405Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:45.762417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:45.762421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:45.762427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:45.762432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:45.762436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:45.762440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:45.762450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:45.762455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:45.762460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:45:45.762784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:45.762803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:45.762808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:45:45.762813Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:45:45.762819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:45.762836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ersion: 6 PathOwnerId: 72057594046678944, cookie: 281474976710659 2024-11-21T17:45:55.515340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 281474976710659 2024-11-21T17:45:55.515345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710659 2024-11-21T17:45:55.515349Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710659, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2024-11-21T17:45:55.515354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 6 2024-11-21T17:45:55.515374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710659, ready parts: 0/1, is published: true 2024-11-21T17:45:55.516626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710659:0 from tablet: 72057594046678944 to tablet: 72075186233409551 cookie: 72057594046678944:6 msg type: 269553158 2024-11-21T17:45:55.516648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710659:0 from tablet: 72057594046678944 to tablet: 72075186233409552 cookie: 72057594046678944:7 msg type: 269553158 2024-11-21T17:45:55.517100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710659 2024-11-21T17:45:55.517627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710659:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710659 TabletId: 72075186233409551 2024-11-21T17:45:55.517640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710659:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409551, at schemeshard: 72057594046678944 2024-11-21T17:45:55.517684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710659:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710659 TabletId: 72075186233409552 2024-11-21T17:45:55.517688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710659:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409552, at schemeshard: 72057594046678944 2024-11-21T17:45:55.517702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T17:45:55.517707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2024-11-21T17:45:55.517714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710659, ready parts: 1/1, is published: true 2024-11-21T17:45:55.517719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2024-11-21T17:45:55.517725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T17:45:55.517730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710659:0 2024-11-21T17:45:55.517765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2024-11-21T17:45:55.518612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710659:0, at schemeshard: 72057594046678944 2024-11-21T17:45:55.518645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710659:0, at schemeshard: 72057594046678944 2024-11-21T17:45:55.518649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976710659:0 2024-11-21T17:45:55.518792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 758 RawX2: 4294969970 } TabletId: 72075186233409551 State: 4 2024-11-21T17:45:55.518803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409551, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:45:55.518844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 814 RawX2: 4294970016 } TabletId: 72075186233409552 State: 4 2024-11-21T17:45:55.518850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:45:55.519177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:45:55.519205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:7 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:45:55.519283Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2024-11-21T17:45:55.580919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T17:45:55.581061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 Forgetting tablet 72075186233409551 2024-11-21T17:45:55.581860Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186233409552 Forgetting tablet 72075186233409552 2024-11-21T17:45:55.582493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2024-11-21T17:45:55.582570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:45:55.596620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T17:45:55.596647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T17:45:55.596796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2024-11-21T17:45:55.596805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 2024-11-21T17:45:55.596989Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/ByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:45:55.597065Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/ByValue/indexImplTable" took 88us result status StatusSuccess 2024-11-21T17:45:55.597324Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/ByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 4 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 4 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeError [GOOD] Test command err: 2024-11-21T17:45:52.795422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:52.795556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:52.795590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:45:52.795711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:52.795762Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:52.795788Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f08/r3tmp/tmpWK8Aic/pdisk_1.dat 2024-11-21T17:45:52.903052Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:53.004397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:45:53.093852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:53.093889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:53.095091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:53.095109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:53.113100Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:45:53.113284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:53.113450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:53.514903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.140520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1518:2922], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.140552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1528:2927], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.140564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.142031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:45:54.796331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1532:2930], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:45:54.986627Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2024-11-21T17:45:54.986652Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2024-11-21T17:45:54.986672Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T17:45:54.986678Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T17:45:54.986688Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T17:45:54.987300Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2024-11-21T17:45:54.989007Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7x976v5qfesvn31p7v22nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3MGVlMzEtMjgzNzFhNS1hNDYwMTJiMy1hMzNmODQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.455668s, cancelAfter: (empty maybe) 2024-11-21T17:45:54.989025Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7x976v5qfesvn31p7v22nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3MGVlMzEtMjgzNzFhNS1hNDYwMTJiMy1hMzNmODQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2024-11-21T17:45:54.989035Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T17:45:54.989041Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7x976v5qfesvn31p7v22nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3MGVlMzEtMjgzNzFhNS1hNDYwMTJiMy1hMzNmODQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T17:45:54.989050Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T17:45:54.989167Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7x976v5qfesvn31p7v22nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3MGVlMzEtMjgzNzFhNS1hNDYwMTJiMy1hMzNmODQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2024-11-21T17:45:54.989373Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1708:2920] TxId: 0. Ctx: { TraceId: 01jd7x976v5qfesvn31p7v22nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3MGVlMzEtMjgzNzFhNS1hNDYwMTJiMy1hMzNmODQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2024-11-21T17:45:54.989391Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1708:2920] TxId: 281474976715660. Ctx: { TraceId: 01jd7x976v5qfesvn31p7v22nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3MGVlMzEtMjgzNzFhNS1hNDYwMTJiMy1hMzNmODQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2024-11-21T17:45:54.989402Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1708:2920] TxId: 281474976715660. Ctx: { TraceId: 01jd7x976v5qfesvn31p7v22nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3MGVlMzEtMjgzNzFhNS1hNDYwMTJiMy1hMzNmODQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2024-11-21T17:45:54.989423Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T17:45:54.989478Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2024-11-21T17:45:54.989517Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T17:45:54.989537Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1708:2920] TxId: 281474976715660. Ctx: { TraceId: 01jd7x976v5qfesvn31p7v22nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3MGVlMzEtMjgzNzFhNS1hNDYwMTJiMy1hMzNmODQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2024-11-21T17:45:54.989595Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1708:2920] TxId: 281474976715660. Ctx: { TraceId: 01jd7x976v5qfesvn31p7v22nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3MGVlMzEtMjgzNzFhNS1hNDYwMTJiMy1hMzNmODQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2024-11-21T17:45:54.989606Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1708:2920] TxId: 281474976715660. Ctx: { TraceId: 01jd7x976v5qfesvn31p7v22nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3MGVlMzEtMjgzNzFhNS1hNDYwMTJiMy1hMzNmODQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2024-11-21T17:45:54.989702Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x976v5qfesvn31p7v22nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3MGVlMzEtMjgzNzFhNS1hNDYwMTJiMy1hMzNmODQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:54.989710Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jd7x976v5qfesvn31p7v22nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3MGVlMzEtMjgzNzFhNS1hNDYwMTJiMy1hMzNmODQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localCompu ... tNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:55.393298Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2024-11-21T17:45:55.393505Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. datashard task: 1, proto: Id: 1 Executer { ActorId { RawX1: 1953 RawX2: 4294970482 } } Program { RuntimeVersion: 100000 Raw: "\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/" Settings { LevelDataPrediction: 1 InputDataPrediction: 1 OutputDataPrediction: 1 NodesCount: 52 } } Parameters { key: "%kqp%tx_result_binding_0_0" value { TransportVersion: 20000 Raw: "\t\000\002\n\n" Rows: 1 } } Outputs { Effects { } } Meta { [type.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta] { Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\005\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } } } UseLlvm: false RequestContext { key: "CurrentExecutionId" value: "" } RequestContext { key: "CustomerSuppliedId" value: "" } RequestContext { key: "Database" value: "" } RequestContext { key: "DatabaseId" value: "/Root" } RequestContext { key: "PoolId" value: "default" } RequestContext { key: "SessionId" value: "ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=" } RequestContext { key: "TraceId" value: "01jd7x98dpemzf5g5b5570ppp7" } EnableSpilling: false 2024-11-21T17:45:55.393536Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [1], lockTxId: (empty maybe), locks: , immediate: 1 2024-11-21T17:45:55.393555Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2024-11-21T17:45:55.393570Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T17:45:55.393575Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T17:45:55.393581Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2024-11-21T17:45:55.393589Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2024-11-21T17:45:55.393595Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T17:45:55.406711Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: RESPONSE_DATA, error: 2024-11-21T17:45:55.406754Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2024-11-21T17:45:55.406805Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: GENERIC_ERROR Issues { message: "Error executing transaction: transaction failed." severity: 1 } Result { Stats { CpuTimeUs: 92 } } , to ActorId: [1:1943:3186] 2024-11-21T17:45:55.406822Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2024-11-21T17:45:55.406851Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:45:55.406857Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1953:3186] TxId: 281474976715683. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T17:45:55.406901Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, ActorId: [1:1943:3186], ActorState: ExecuteState, TraceId: 01jd7x98dpemzf5g5b5570ppp7, Create QueryResponse for error on request, msg: 2024-11-21T17:45:55.407050Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1958:3186] TxId: 0. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2024-11-21T17:45:55.407086Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1958:3186] TxId: 281474976715684. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2024-11-21T17:45:55.407155Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715684. Resolved key sets: 0 2024-11-21T17:45:55.407183Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:55.407199Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715684. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T17:45:55.407208Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1958:3186] TxId: 281474976715684. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T17:45:55.407213Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1958:3186] TxId: 281474976715684. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T17:45:55.407225Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1958:3186] TxId: 281474976715684. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:45:55.407229Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1958:3186] TxId: 281474976715684. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T17:45:55.407237Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1958:3186] TxId: 281474976715684. Ctx: { TraceId: 01jd7x98dpemzf5g5b5570ppp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhODc3NmQtNjQ4YmYwMGYtNWEzYTBlN2UtMWVjOWQwMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWrite [GOOD] Test command err: Trying to start YDB, gRPC: 29875, MsgBus: 64003 2024-11-21T17:45:48.322774Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790231139904911:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:48.322839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001686/r3tmp/tmpZlG2hB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29875, node 1 2024-11-21T17:45:48.388425Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:48.391632Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:48.391636Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:48.391638Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:48.391672Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64003 2024-11-21T17:45:48.424435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:48.424475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:48.432702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:48.532321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.535139Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:48.585268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.614931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.639023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.652369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:48.816491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790231139906312:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.820832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.835579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.848008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.864140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.881756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.894922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.917043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.935507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790231139906828:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.935543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.935620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790231139906833:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:48.936532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:48.942783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790231139906835:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:53.330763Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790231139904911:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:53.330813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28048, MsgBus: 23401 2024-11-21T17:45:53.563295Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:53.573391Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:53.574980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:53.574995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001686/r3tmp/tmpvee8Yk/pdisk_1.dat 2024-11-21T17:45:53.577787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28048, node 2 2024-11-21T17:45:53.590255Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:53.590269Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:53.590271Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:53.590329Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23401 TClient is connected to server localhost:23401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:53.633560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.635359Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:53.654340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.667218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.691406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.703367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.858921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790253845859392:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:5 ... ARN: Access denied: self# [2:7439790258140827967:3676], for# user0@builtin, access# DescribeSchema 2024-11-21T17:45:54.563737Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439790258140827967:3676], for# user0@builtin, access# DescribeSchema 2024-11-21T17:45:54.564212Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790258140827964:2599], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:54.564765Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzUyYzBjYjAtZTEwM2E3NTktN2FmYzJlMGItNzAyMTZmMzI=, ActorId: [2:7439790258140827945:2596], ActorState: ExecuteState, TraceId: 01jd7x97m13bdqkqyc552jheh8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:54.566501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710695:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.576052Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439790258140827996:3690], for# user0@builtin, access# DescribeSchema 2024-11-21T17:45:54.576066Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439790258140827996:3690], for# user0@builtin, access# DescribeSchema 2024-11-21T17:45:54.576474Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790258140827993:2609], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:54.576694Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDA2NDRkN2MtOGRiOWVjYmUtMWI4YThiMWQtZDBmZTJlMWE=, ActorId: [2:7439790258140827989:2607], ActorState: ExecuteState, TraceId: 01jd7x97meadedq9yc47cq6per, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:54.588393Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439790258140828013:3694], for# user0@builtin, access# DescribeSchema 2024-11-21T17:45:54.588408Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439790258140828013:3694], for# user0@builtin, access# DescribeSchema 2024-11-21T17:45:54.588846Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790258140828010:2618], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:54.588952Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTk5OTE2ODYtZDI0YTllMzQtMzI0YmExOGMtN2I5ZGQzMzg=, ActorId: [2:7439790258140828006:2616], ActorState: ExecuteState, TraceId: 01jd7x97mrbmynvrep394xwywy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:54.601335Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439790258140828031:3699], for# user0@builtin, access# DescribeSchema 2024-11-21T17:45:54.601350Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439790258140828031:3699], for# user0@builtin, access# DescribeSchema 2024-11-21T17:45:54.601809Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790258140828028:2627], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:54.601909Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjM1Mjk1NjktZmI0NTc0ZS02MWQ3YWVhZC0xYjUxZjVl, ActorId: [2:7439790258140828024:2625], ActorState: ExecuteState, TraceId: 01jd7x97n65ggt7nb48sbpsvnv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 18520, MsgBus: 5932 2024-11-21T17:45:54.915382Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790258111150329:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:54.915669Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001686/r3tmp/tmp0ATOkC/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18520, node 3 2024-11-21T17:45:54.947468Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:54.947696Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:54.947700Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:54.947701Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:54.947749Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5932 TClient is connected to server localhost:5932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:55.015952Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:55.015983Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:55.018280Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:55.018566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:55.023779Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:55.029478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:55.041578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:55.064575Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.120898Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:55.280203Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790262406119165:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.280255Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.287518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.300404Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.315015Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.332248Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.348986Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.361689Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.384771Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790262406119671:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.384791Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.384889Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790262406119676:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.385611Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:55.389476Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:55.389577Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790262406119678:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |75.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |75.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestAuthorization >> test.py::test[select-trivial_where-one-Debug] [GOOD] >> test.py::test[select-trivial_where-one-Plan] [GOOD] >> test.py::test[select-trivial_where-one-Results] >> KqpStats::JoinStatsBasicScan >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitByLoad::TableSplitsUpToMaxPartitionsCount [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:45:45.787900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:45.787929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:45.787934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:45.787939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:45.787951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:45.787955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:45.787964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:45.788055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:45.799139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:45.799166Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:45.802242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:45.803098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:45.803128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:45.804546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:45.804821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:45.804923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:45.804983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:45.805971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:45.806230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:45.806242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:45.806280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:45.806287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:45.806293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:45.806306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.807702Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:45:45.826617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:45.826724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.826780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:45.826855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:45.826863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.827565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:45.827596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:45.827648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.827657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:45.827662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:45.827667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:45.828147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.828160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:45.828165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:45.828595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.828609Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.828615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:45.828621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:45.829228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:45.829629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:45.829666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:45.829790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:45.829809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:45.829817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:45.829862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:45.829866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:45.829888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:45.829897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:45.830202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:45.830207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:45.830242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:45.830247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:45:45.830312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.830316Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:45.830324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:45.830327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:45.830331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:45.830335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:45.830337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:45.830340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:45.830347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:45.830351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:45.830354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:45:45.830602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:45.830614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:45.830617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:45:45.830620Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:45:45.830623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:45.830634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... sVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 5 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 5 SplitByLoadSettings { Enabled: true CpuPercentageThreshold: 1 } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 62500 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 125000 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 250000 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 500000 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000$\364\000\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000H\350\001\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409554 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\220\320\003\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000 \241\007\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 5 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 5000000 Memory: 429112 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TEST table final state: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 5 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 5 SplitByLoadSettings { Enabled: true CpuPercentageThreshold: 1 } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 62500 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 125000 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 250000 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint64: 500000 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000$\364\000\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000H\350\001\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409554 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\220\320\003\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000 \241\007\000\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 5 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 5000000 Memory: 429112 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx [GOOD] Test command err: 2024-11-21T17:45:53.177921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:53.178023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:53.178050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:45:53.178155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:53.178186Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:53.178205Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e34/r3tmp/tmpGgMizN/pdisk_1.dat 2024-11-21T17:45:53.352716Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:53.482191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:45:53.616030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:53.616072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:53.625787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:53.625828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:53.638406Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:45:53.638554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:53.638677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:54.012187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.699110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1518:2922], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.699138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1528:2927], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.699149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.700757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:45:55.363472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1532:2930], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:45:55.565618Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2024-11-21T17:45:55.565642Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2024-11-21T17:45:55.565661Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T17:45:55.565668Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T17:45:55.565677Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T17:45:55.566289Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2024-11-21T17:45:55.567665Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7x97ra865se4k4y31rh8f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI4NGRhOTAtYTZmODE0ZjQtMzY5YTMzYjgtZjEyM2Q3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.450454s, cancelAfter: (empty maybe) 2024-11-21T17:45:55.567682Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7x97ra865se4k4y31rh8f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI4NGRhOTAtYTZmODE0ZjQtMzY5YTMzYjgtZjEyM2Q3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2024-11-21T17:45:55.567690Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T17:45:55.567698Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7x97ra865se4k4y31rh8f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI4NGRhOTAtYTZmODE0ZjQtMzY5YTMzYjgtZjEyM2Q3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T17:45:55.567706Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T17:45:55.567821Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7x97ra865se4k4y31rh8f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI4NGRhOTAtYTZmODE0ZjQtMzY5YTMzYjgtZjEyM2Q3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2024-11-21T17:45:55.567925Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1708:2920] TxId: 0. Ctx: { TraceId: 01jd7x97ra865se4k4y31rh8f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI4NGRhOTAtYTZmODE0ZjQtMzY5YTMzYjgtZjEyM2Q3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2024-11-21T17:45:55.567942Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1708:2920] TxId: 281474976715660. Ctx: { TraceId: 01jd7x97ra865se4k4y31rh8f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI4NGRhOTAtYTZmODE0ZjQtMzY5YTMzYjgtZjEyM2Q3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2024-11-21T17:45:55.567952Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1708:2920] TxId: 281474976715660. Ctx: { TraceId: 01jd7x97ra865se4k4y31rh8f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI4NGRhOTAtYTZmODE0ZjQtMzY5YTMzYjgtZjEyM2Q3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2024-11-21T17:45:55.567971Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T17:45:55.568028Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2024-11-21T17:45:55.568066Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T17:45:55.568086Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1708:2920] TxId: 281474976715660. Ctx: { TraceId: 01jd7x97ra865se4k4y31rh8f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI4NGRhOTAtYTZmODE0ZjQtMzY5YTMzYjgtZjEyM2Q3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2024-11-21T17:45:55.568143Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1708:2920] TxId: 281474976715660. Ctx: { TraceId: 01jd7x97ra865se4k4y31rh8f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI4NGRhOTAtYTZmODE0ZjQtMzY5YTMzYjgtZjEyM2Q3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2024-11-21T17:45:55.568155Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1708:2920] TxId: 281474976715660. Ctx: { TraceId: 01jd7x97ra865se4k4y31rh8f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI4NGRhOTAtYTZmODE0ZjQtMzY5YTMzYjgtZjEyM2Q3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2024-11-21T17:45:55.568371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x97ra865se4k4y31rh8f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI4NGRhOTAtYTZmODE0ZjQtMzY5YTMzYjgtZjEyM2Q3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:55.568381Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jd7x97ra865se4k4y31rh8f0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI4NGRhOTAtYTZmODE0ZjQtMzY5YTMzYjgtZjEyM2Q3ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localCompu ... : { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T17:45:55.599248Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Preparing 2024-11-21T17:45:55.599251Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037889 not finished yet: Preparing 2024-11-21T17:45:55.599255Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037890 not finished yet: Preparing 2024-11-21T17:45:55.599259Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037891 not finished yet: Preparing 2024-11-21T17:45:55.599267Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 4 datashard(s): DS 72075186224037888 (Preparing), DS 72075186224037889 (Preparing), DS 72075186224037890 (Preparing), DS 72075186224037891 (Preparing), 2024-11-21T17:45:55.599272Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, not immediate tx, become PrepareState 2024-11-21T17:45:55.600073Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shard 72075186224037889 propose error, notDelivered: 0, notPrepared: 0, wasRestart: 0 2024-11-21T17:45:55.600088Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037888 2024-11-21T17:45:55.600095Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037889 2024-11-21T17:45:55.600101Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037890 2024-11-21T17:45:55.600106Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037891 2024-11-21T17:45:55.601330Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2024-11-21T17:45:55.601348Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 2, does not have the CA id yet or is already complete 2024-11-21T17:45:55.601352Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 3, does not have the CA id yet or is already complete 2024-11-21T17:45:55.601356Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 4, does not have the CA id yet or is already complete 2024-11-21T17:45:55.602258Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: UNDETERMINED Issues { message: "State of operation is unknown." issue_code: 2026 severity: 1 issues { message: "Tx state unknown for shard 72075186224037889, txid 281474976715661" issue_code: 200506 severity: 1 } } Result { Stats { } } , to ActorId: [1:1717:3037] 2024-11-21T17:45:55.602324Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2024-11-21T17:45:55.602353Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:45:55.602360Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1732:3037] TxId: 281474976715661. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T17:45:55.602781Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, ActorId: [1:1717:3037], ActorState: ExecuteState, TraceId: 01jd7x98m25fvjekepqkx5q6aa, Create QueryResponse for error on request, msg: 2024-11-21T17:45:55.603148Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1740:3037] TxId: 0. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2024-11-21T17:45:55.603173Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1740:3037] TxId: 281474976715662. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2024-11-21T17:45:55.603271Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715662. Resolved key sets: 0 2024-11-21T17:45:55.603298Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:45:55.603307Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715662. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T17:45:55.603316Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1740:3037] TxId: 281474976715662. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T17:45:55.603321Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1740:3037] TxId: 281474976715662. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T17:45:55.603339Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1740:3037] TxId: 281474976715662. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:45:55.603343Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1740:3037] TxId: 281474976715662. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T17:45:55.603351Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1740:3037] TxId: 281474976715662. Ctx: { TraceId: 01jd7x98m25fvjekepqkx5q6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdkNTIyZjQtM2Y5OTUxMGQtOTFjYWE4NDYtYzEyNmM3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 >> KqpErrors::ResolveTableError [GOOD] >> test.py::test[window-current/ansi_current_mixed--Debug] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Plan] [GOOD] >> test.py::test[window-current/ansi_current_mixed--Results] |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> test.py::test[blocks-combine_all_decimal--Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: 2024-11-21T17:45:54.921378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:54.921541Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dab/r3tmp/tmprk0Kjb/pdisk_1.dat 2024-11-21T17:45:55.050081Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:55.150873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.221692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:55.221731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:55.228115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:55.228167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:55.241571Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:45:55.241730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:55.241830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:55.600942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:56.229336Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2024-11-21T17:45:56.229376Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2024-11-21T17:45:56.229395Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T17:45:56.229402Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T17:45:56.229412Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T17:45:56.230122Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2024-11-21T17:45:56.231610Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 300.000000s, cancelAfter: (empty maybe) 2024-11-21T17:45:56.231625Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2024-11-21T17:45:56.231633Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T17:45:56.231639Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T17:45:56.231647Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T17:45:56.231778Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2024-11-21T17:45:56.231920Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1476:2905] TxId: 0. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Bootstrap done, become ReadyState 2024-11-21T17:45:56.231935Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Executing physical tx, type: 2, stages: 1 2024-11-21T17:45:56.231947Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got request, become WaitResolveState 2024-11-21T17:45:56.231967Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T17:45:56.232032Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key sets: 1 2024-11-21T17:45:56.232073Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T17:45:56.232093Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2024-11-21T17:45:56.232151Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] will be executed on 1 shards. 2024-11-21T17:45:56.232164Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2024-11-21T17:45:56.232280Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2024-11-21T17:45:56.232290Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2024-11-21T17:45:56.233706Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. datashard task: 1, proto: Id: 1 Executer { ActorId { RawX1: 1476 RawX2: 4294970201 } } Program { RuntimeVersion: 100000 Raw: "\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/" Settings { LevelDataPrediction: 1 InputDataPrediction: 1 OutputDataPrediction: 1 NodesCount: 52 } } Parameters { key: "%kqp%tx_result_binding_0_0" value { TransportVersion: 20000 Raw: "\010\000\000\000\000\006\002\002\004\004\006\006" Rows: 3 } } Outputs { Effects { } } Meta { [type.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta] { Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\001\000\000\000" KeyPoints: "\001\000\004\000\000\000\002\000\000\000" KeyPoints: "\001\000\004\000\000\000\003\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } } } UseLlvm: false RequestContext { key: "CurrentExecutionId" value: "" } RequestContext { key: "CustomerSuppliedId" value: "" } RequestContext { key: "Database" value: "" } RequestContext { key: "DatabaseId" value: "/Root" } RequestContext { key: "PoolId" value: "" } RequestContext { key: "SessionId" value: "ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=" } RequestContext { key: "TraceId" value: "01jd7x996y46xnj0d1s7bqwft9" } EnableSpilling: false 2024-11-21T17:45:56.233792Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [1], lockTxId: (empty maybe), locks: , immediate: 1 2024-11-21T17:45:56.233817Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ExecuteDatashardTransaction traceId.verbosity: 0 2024-11-21T17:45:56.233837Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T17:45:56.233841Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Updating channels after the creation of compute actors 2024-11-21T17:45:56.233849Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2024-11-21T17:45:56.233857Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2024-11-21T17:45:56.233862Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T17:45:56.267493Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2024-11-21T17:45:56.267548Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2024-11-21T17:45:56.267552Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Terminate, become ZombieState 2024-11-21T17:45:56.267560Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd7x996y46xnj0d1s7bqwft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRhNTIxMWMtYmE2Y2ZjYzItZThjYzJlMDgtOTkwMmM5OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2024-11-21T17:45:56.270800Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:1491:2924], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2024-11-21T17:45:56.271135Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWFlOGI4Y2UtZDU1ZTZiNGItMzRlNDk2YjUtM2Y2ZTM3ODY=, ActorId: [1:1489:2922], ActorState: ExecuteState, TraceId: 01jd7x999capn73yc94fhxyzvr, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: >> test.py::test[select-trivial_where-one-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Debug] >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TJaegerTracingConfiguratorTests::DefaultConfig >> KqpStats::JoinStatsBasicScan [GOOD] |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> test.py::test[pg-select_starref2-default.txt-Debug] [GOOD] >> test.py::test[pg-select_starref2-default.txt-Plan] [GOOD] >> test.py::test[pg-select_starref2-default.txt-Results] |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |75.3%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 30786, MsgBus: 25373 2024-11-21T17:45:49.543445Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790238964971010:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:49.543492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001682/r3tmp/tmpYmRkF2/pdisk_1.dat 2024-11-21T17:45:49.643138Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:49.646284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:49.646307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:49.648442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30786, node 1 2024-11-21T17:45:49.677568Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:49.677582Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:49.677583Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:49.677627Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25373 TClient is connected to server localhost:25373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:49.761200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.780375Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:49.792626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:49.869942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.904167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:49.923654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.980440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790238964972338:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.980499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.985054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:49.995359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.008905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.021459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.035424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.051188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:50.112844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790243259940153:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.112880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.112989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790243259940158:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:50.113724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:50.117992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790243259940160:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. Trying to start YDB, gRPC: 15626, MsgBus: 6630 2024-11-21T17:45:53.663110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790252167382305:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:53.663176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001682/r3tmp/tmp6WstAE/pdisk_1.dat 2024-11-21T17:45:53.742727Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15626, node 1 2024-11-21T17:45:53.765925Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:53.765943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:53.765944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:53.765984Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6630 2024-11-21T17:45:53.805529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:53.805563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:53.806503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6630 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:53.824397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.827285Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:53.837860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.903704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.965300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:53.980536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.053656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790256462351006:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.053707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.091352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.097801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.108065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.114957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.169862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.179343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.197492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790256462351523:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.197524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.197564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790256462351528:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.198290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:54.206386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790256462351530:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. Trying to start YDB, gRPC: 7760, MsgBus: 13588 2024-11-21T17:45:56.385865Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790267412462598:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:56.386012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001682/r3tmp/tmp6FV5zX/pdisk_1.dat 2024-11-21T17:45:56.449485Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7760, node 1 2024-11-21T17:45:56.459145Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:56.459154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:56.459155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:56.459192Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13588 2024-11-21T17:45:56.487075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:56.487106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:56.488167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:56.527960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:56.538689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:56.601336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:56.618233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:56.629087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:56.692785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790267412464164:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:56.692808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:56.723516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:56.729266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:56.739693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:56.746650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:56.753450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:56.760546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:56.769064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790267412464657:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:56.769076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790267412464662:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:56.769082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:56.769532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:56.774010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790267412464664:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:57.205685Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211157082, txId: 281474976715671] shutting down >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules >> TKeyValueTracingTest::ReadSmall >> TKeyValueTracingTest::ReadHuge >> KqpExplain::IdxFullscan [GOOD] |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall >> test.py::test[join-yql-8980--Results] [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::IdxFullscan [GOOD] Test command err: Trying to start YDB, gRPC: 11937, MsgBus: 9273 2024-11-21T17:45:55.309832Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790262172050338:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:55.312418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00166a/r3tmp/tmpo0sChk/pdisk_1.dat 2024-11-21T17:45:55.366448Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11937, node 1 2024-11-21T17:45:55.387512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:55.387521Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:55.387523Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:55.387563Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9273 2024-11-21T17:45:55.400951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:55.400976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:55.402023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:55.440351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:55.442861Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:55.510836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:55.537069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:55.562867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:55.580715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:55.656325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790262172051707:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.656353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.678797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.684596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.698143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.711191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.717664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.725297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.733909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790262172052209:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.733924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790262172052214:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.733928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.734444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:55.738082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790262172052216:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"SUM(10,15)"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Inputs":[{"ExternalPlanNodeId":4}],"Offset":"15","Name":"Offset"}],"Node Type":"Limit-Offset"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"SUM(10,15)"}],"Node Type":"Limit"}],"Operators":[{"Offset":"15","Name":"Offset"}],"Node Type":"Offset"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 10879, MsgBus: 16885 2024-11-21T17:45:56.230711Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790268248881358:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:56.230950Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00166a/r3tmp/tmpGJozGA/pdisk_1.dat 2024-11-21T17:45:56.241624Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10879, node 2 2024-11-21T17:45:56.252519Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:56.252533Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:56.252535Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:56.252567Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16885 TClient is connected to server localhost:16885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:56.332492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Un ... NodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Merge","SortColumns":["Key (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 31495, MsgBus: 31922 2024-11-21T17:45:56.944840Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790265681951026:2061];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:56.944959Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00166a/r3tmp/tmpYIrbLy/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31495, node 3 2024-11-21T17:45:56.964742Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:56.966538Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:56.966564Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:56.966566Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:56.966608Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31922 TClient is connected to server localhost:31922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:57.047024Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:57.047054Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:57.047341Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:57.048108Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:45:57.049105Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:57.057084Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:57.072708Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:57.089459Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:57.099096Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:57.228181Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790269976919844:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:57.228245Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:57.232894Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:57.239315Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:57.251812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:57.266268Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:57.279507Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:57.286293Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:57.294365Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790269976920359:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:57.294393Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:57.294404Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790269976920364:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:57.294971Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:57.299342Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790269976920366:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:57.496121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:57.536577Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:45:57.547297Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["id"],"Node Type":"TableLookupJoin","PlanNodeId":3,"Columns":["Value","complex_field","id","str_field"],"E-Rows":"No estimate","Table":"test_table_idx","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["test_table_idx_idx"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/test_table_idx","reads":[{"lookup_by":["id"],"columns":["Value","complex_field","id","str_field"],"type":"Lookup"}]},{"name":"\/Root\/test_table_idx_idx","reads":[{"lookup_by":["str_field (null)"],"columns":["id"],"scan_by":["complex_field (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"},{"Operators":[{"E-Rows":"No estimate","Columns":["Value","complex_field","id","str_field"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"test_table_idx","LookupKeyColumns":["id"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["id"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} >> test.py::test[agg_phases-sum_null-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-count-default.txt-Debug] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Debug] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Plan] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors >> test.py::test[window-current/ansi_current_mixed--Results] [GOOD] >> test.py::test[window-current/session_extended--Debug] >> test.py::test[blocks-combine_all_min_filter--Debug] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Plan] >> test.py::test[window-yql-15636-default.txt-Debug] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Plan] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Results] >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> test.py::test[window-yql-15636-default.txt-Plan] [GOOD] >> test.py::test[window-yql-15636-default.txt-Results] >> test.py::test[pg-select_starref2-default.txt-Results] [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> test.py::test[pg-select_unionall_self-default.txt-Debug] >> EraseRowsTests::ConditionalEraseRowsShouldErase ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] Test command err: 2024-11-21T17:45:00.384490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:00.384515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:00.384519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:00.384522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:00.384533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:00.384536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:00.384544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:00.384609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:00.386974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:00.386993Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:00.389449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:00.389578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:00.389596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T17:45:00.390333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:00.390398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:00.390450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:00.390535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:00.391086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:00.391432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:00.391444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:00.391473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:00.391480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:00.391486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:00.391520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.489668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T17:45:00.489778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.489853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T17:45:00.489898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T17:45:00.489906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.490951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:00.490986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T17:45:00.491046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.491058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T17:45:00.491063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:00.491068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:00.491532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.491544Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T17:45:00.491549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:00.491883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.491893Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.491900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:00.491908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:00.492642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:00.493237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:00.493303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:45:00.493563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:00.493584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T17:45:00.493590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:01.050925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:01.050998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 657 RawX2: 4294969527 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T17:45:01.051011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:01.051526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:01.051535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:01.051566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:01.051577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:01.056658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:01.056679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:01.056724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:01.056728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:666:2238], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T17:45:01.056810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:01.056820Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:01.056833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:01.056837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:01.056843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:01.056850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:01.056855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:01.056858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:01.056870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T17:45:01.056877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:45:01.056880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T17:45:01.058607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:01.058625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:01.058629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T17:45:01.058634Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T17:45:01.058638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:01.058658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T17:45:01.058662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:521:2138] 2024-11-21T ... ode 123 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:58.023175Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [123:237:2227], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T17:45:58.023247Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:58.023254Z node 123 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:58.023265Z node 123 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:58.023268Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:58.023272Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:58.023276Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:58.023280Z node 123 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:58.023284Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:58.023295Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T17:45:58.023302Z node 123 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:45:58.023306Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T17:45:58.023383Z node 123 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:58.023390Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:58.023393Z node 123 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T17:45:58.023396Z node 123 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T17:45:58.023403Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:58.023410Z node 123 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T17:45:58.023412Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [123:95:2130] 2024-11-21T17:45:58.023968Z node 123 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 1 2024-11-21T17:45:58.024027Z node 123 :TX_PROXY DEBUG: actor# [123:288:2270] Bootstrap 2024-11-21T17:45:58.024988Z node 123 :TX_PROXY DEBUG: actor# [123:288:2270] Become StateWork (SchemeCache [123:293:2275]) 2024-11-21T17:45:58.025160Z node 123 :TX_PROXY DEBUG: actor# [123:288:2270] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:45:58.025646Z node 123 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:45:58.026976Z node 123 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:45:58.027434Z node 123 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:45:58.027617Z node 123 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:45:58.027854Z node 123 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:45:58.028303Z node 123 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:45:58.028312Z node 123 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:45:58.028344Z node 123 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:45:58.029629Z node 123 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:45:58.029667Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:45:58.029695Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:45:58.029727Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:45:58.029741Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:45:58.029863Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:45:58.050984Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:45:58.051031Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:45:58.062016Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:45:58.062068Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:45:58.062085Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:45:58.062096Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:45:58.062124Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:45:58.062132Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:45:58.062138Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:45:58.062146Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:45:58.073166Z node 123 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:45:58.073261Z node 123 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:45:58.073468Z node 123 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:45:58.073480Z node 123 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:45:58.073526Z node 123 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:45:58.073698Z node 123 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/001ddb/r3tmp/tmpAppBIS/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T17:45:58.073769Z node 123 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 123:1 Path# /home/runner/.ya/build/build_root/6fwh/001ddb/r3tmp/tmpAppBIS/pdisk_1.dat 2024-11-21T17:45:58.085051Z node 123 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:45:58.085130Z node 123 :CONFIGS_DISPATCHER DEBUG: TConfigsDispatcher Bootstrap 2024-11-21T17:45:58.085200Z node 123 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:45:58.085225Z node 123 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:45:58.085256Z node 123 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:45:58.085299Z node 123 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:45:58.085331Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [123:379:2337], Recipient [123:378:2336]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:58.085339Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:58.085433Z node 123 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:45:58.085444Z node 123 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:45:58.085449Z node 123 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:45:58.085466Z node 123 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[123:386:2341] 2024-11-21T17:45:58.085495Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [123:381:2335], Recipient [123:378:2336]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:58.085500Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:58.085881Z node 123 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:45:58.085889Z node 123 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [123:379:2337] 2024-11-21T17:45:58.086055Z node 123 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[123:386:2341] 2024-11-21T17:45:58.086063Z node 123 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:45:58.086069Z node 123 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:45:58.086072Z node 123 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:45:58.088825Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [123:412:2348], Recipient [123:378:2336]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:58.088846Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:58.104116Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, received event# 273481728, Sender [123:436:2374], Recipient [123:378:2336]: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest 2024-11-21T17:45:58.104144Z node 123 :CONFIGS_DISPATCHER TRACE: StateInit, processing event TEvConfigsDispatcher::TEvSetConfigSubscriptionRequest >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath >> TKeyValueTracingTest::ReadHuge [FAIL] >> TKeyValueTracingTest::ReadSmall [FAIL] >> DataShardVolatile::DistributedWrite >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks+StreamLookup >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Results] >> test.py::test[aggregate-compare_by--Debug] [GOOD] >> test.py::test[aggregate-compare_by--Plan] >> test.py::test[aggregate-compare_by--Plan] [GOOD] >> test.py::test[aggregate-compare_by--Results] >> TKeyValueTracingTest::WriteSmall [FAIL] >> test.py::test[blocks-combine_all_min_filter--Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt--Debug] >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain >> test.py::test[pg-select_unionall_self-default.txt-Debug] [GOOD] >> test.py::test[pg-select_unionall_self-default.txt-Plan] [GOOD] >> test.py::test[pg-select_unionall_self-default.txt-Results] >> test.py::test[pg-tpch-q21-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Analyze] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2024-11-21T17:45:03.552374Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790038522937761:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpgnmiei/pdisk_1.dat 2024-11-21T17:45:03.586060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:03.606620Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:03.607992Z node 1 :HTTP ERROR: (#30,[::1]:5317) connection closed with error: Connection refused 2024-11-21T17:45:03.610220Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:03.682825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:03.682857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:03.688269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:05.925432Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790049758919240:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:05.927199Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpiNqJos/pdisk_1.dat 2024-11-21T17:45:06.006923Z node 2 :HTTP ERROR: (#32,[::1]:1912) connection closed with error: Connection refused 2024-11-21T17:45:06.018107Z node 2 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:06.029737Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:06.072810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:06.072838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:06.076938Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:08.510322Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790059671494969:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:08.510336Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpqCvwTg/pdisk_1.dat 2024-11-21T17:45:08.573115Z node 3 :HTTP ERROR: (#30,[::1]:17868) connection closed with error: Connection refused 2024-11-21T17:45:08.574967Z node 3 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:08.575018Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:08.620655Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:08.620677Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:08.624486Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:11.159166Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790073795843348:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:11.159196Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpR24qHo/pdisk_1.dat 2024-11-21T17:45:11.200225Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:11.209998Z node 4 :HTTP ERROR: (#32,[::1]:20727) connection closed with error: Connection refused 2024-11-21T17:45:11.214159Z node 4 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:11.268322Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:11.268347Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:11.270490Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:13.864851Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439790080799976974:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:13.864874Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpMLllSP/pdisk_1.dat 2024-11-21T17:45:13.932146Z node 5 :HTTP ERROR: (#34,[::1]:25611) connection closed with error: Connection refused 2024-11-21T17:45:13.935319Z node 5 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:13.941294Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:13.968469Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:13.968496Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:13.971518Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:16.309180Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439790095198041540:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:16.310244Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpwqhVkU/pdisk_1.dat 2024-11-21T17:45:16.369365Z node 6 :HTTP ERROR: (#36,[::1]:17221) connection closed with error: Connection refused 2024-11-21T17:45:16.376396Z node 6 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:16.380869Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:16.421298Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:16.421319Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:16.421986Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:18.735620Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439790104694404637:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:18.737994Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmp0u9Dxh/pdisk_1.dat 2024-11-21T17:45:18.764735Z node 7 :HTTP ERROR: (#38,[::1]:26287) connection closed with error: Connection refused 2024-11-21T17:45:18.771546Z node 7 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:18.772777Z node 7 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:18.837493Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:18.837517Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:18.838748Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmp3AVuLW/pdisk_1.dat 2024-11-21T17:45:21.112325Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:21.131772Z node 8 :HTTP ERROR: (#40,[::1]:21270) connection closed with error: Connection refused 2024-11-21T17:45:21.131997Z node 8 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:21.136956Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:21.216604Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:21.216633Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:21.220617Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:23.481574Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7439790124321250967:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:23.481635Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpsYZ8LQ/pdisk_1.dat 2024-11-21T17:45:23.492600Z node 9 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:23.495370Z node 9 :HTTP ERROR: (#42,[::1]:24928) connection closed with error: Connection refused 2024-11-21T17:45:23.495453Z node 9 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:23.584448Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:23.584477Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:23.585580Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpSbRUI2/pdisk_1.dat 2024-11-21T17:45:26.280846Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:26.305082Z node 10 :HTTP ERROR: (#30,[::1]:16914) connection closed with error: Connection refused 2024-11-21T17:45:26.316642Z node 10 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:26.317452Z node 10 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:26.372561Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:26.372591Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:26.376612Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:28.684236Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=und ... e_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmp6YZHRQ/pdisk_1.dat 2024-11-21T17:45:33.773383Z node 13 :HTTP ERROR: (#36,[::1]:3981) connection closed with error: Connection refused 2024-11-21T17:45:33.792630Z node 13 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:33.794031Z node 13 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:33.829266Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:33.829286Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:33.836472Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:36.070630Z node 14 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7439790181114141717:2192];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpzOsrn9/pdisk_1.dat 2024-11-21T17:45:36.079146Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:36.091260Z node 14 :HTTP ERROR: (#38,[::1]:63483) connection closed with error: Connection refused 2024-11-21T17:45:36.091449Z node 14 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:36.096516Z node 14 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:36.174242Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:36.174288Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:36.175117Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmp9yAxTc/pdisk_1.dat 2024-11-21T17:45:38.559527Z node 15 :HTTP ERROR: (#40,[::1]:7821) connection closed with error: Connection refused 2024-11-21T17:45:38.559620Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:38.560577Z node 15 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:38.560879Z node 15 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:38.652496Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:38.652534Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:38.656687Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmphedWlw/pdisk_1.dat 2024-11-21T17:45:40.918045Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:40.924147Z node 16 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:40.927828Z node 16 :HTTP ERROR: (#42,[::1]:15794) connection closed with error: Connection refused 2024-11-21T17:45:40.928733Z node 16 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:41.008713Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:41.008752Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:41.012612Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpVeZe0l/pdisk_1.dat 2024-11-21T17:45:43.299083Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:43.301959Z node 17 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:43.304514Z node 17 :HTTP ERROR: (#30,[::1]:17955) connection closed with error: Connection refused 2024-11-21T17:45:43.312338Z node 17 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:43.378435Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:43.378463Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:43.379452Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:45.631141Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7439790220256859507:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmp6awTzs/pdisk_1.dat 2024-11-21T17:45:45.633445Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:45.641367Z node 18 :HTTP ERROR: (#32,[::1]:7046) connection closed with error: Connection refused 2024-11-21T17:45:45.641455Z node 18 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:45.644674Z node 18 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:45.732492Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:45.732521Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:45.733580Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpLmB5Qt/pdisk_1.dat 2024-11-21T17:45:48.038660Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:48.049062Z node 19 :HTTP ERROR: (#34,[::1]:18904) connection closed with error: Connection refused 2024-11-21T17:45:48.050833Z node 19 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:48.051017Z node 19 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:48.119798Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:48.119839Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:48.128700Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpCtgwud/pdisk_1.dat 2024-11-21T17:45:50.373543Z node 20 :HTTP ERROR: (#36,[::1]:27801) connection closed with error: Connection refused 2024-11-21T17:45:50.374801Z node 20 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:50.375153Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:50.378447Z node 20 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:50.454208Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:50.454240Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:50.455635Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpm00E6o/pdisk_1.dat 2024-11-21T17:45:52.766214Z node 21 :HTTP ERROR: (#35,[::1]:15340) connection closed with error: Connection refused 2024-11-21T17:45:52.766338Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:52.776575Z node 21 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:52.779219Z node 21 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:52.829080Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:52.829121Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:52.829917Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:55.069628Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7439790262029188021:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:55.072585Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmp14gzMo/pdisk_1.dat 2024-11-21T17:45:55.106205Z node 22 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:55.106510Z node 22 :HTTP ERROR: (#40,[::1]:27463) connection closed with error: Connection refused 2024-11-21T17:45:55.106612Z node 22 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:55.181003Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:55.181036Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:55.182052Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001db1/r3tmp/tmpcZqztY/pdisk_1.dat 2024-11-21T17:45:57.350254Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7439790271299928146:2056];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:57.357011Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:57.365049Z node 23 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:57.369281Z node 23 :HTTP ERROR: (#39,[::1]:20190) connection closed with error: Connection refused 2024-11-21T17:45:57.369368Z node 23 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T17:45:57.454797Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:57.454860Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:57.455888Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> test.py::test[aggr_factory-count-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-count-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-count-default.txt-Results] >> DataShardVolatile::DistributedWriteThenImmediateUpsert >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] [GOOD] >> test.py::test[pg-select_unionall_self-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_expr_partition-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xBE58B59) TestOneRead(TBasicString>, TBasicString>)+2032 (0xBBE26E0) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+157 (0xBBE577D) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xBBEC6E7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xBE5AB0E) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+426 (0xBBEBE8A) NUnitTest::TTestFactory::Execute()+803 (0xBE5B283) NUnitTest::RunMain(int, char**)+3005 (0xBE6AB9D) ??+0 (0x7F039231DD90) __libc_start_main+128 (0x7F039231DE40) _start+41 (0xB04F029) >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks+StreamLookup [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks-StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xBE58B59) TestOneRead(TBasicString>, TBasicString>)+2032 (0xBBE26E0) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+157 (0xBBE55ED) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xBBEC6E7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xBE5AB0E) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+426 (0xBBEBE8A) NUnitTest::TTestFactory::Execute()+803 (0xBE5B283) NUnitTest::RunMain(int, char**)+3005 (0xBE6AB9D) ??+0 (0x7F36C9E29D90) __libc_start_main+128 (0x7F36C9E29E40) _start+41 (0xB04F029) >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Analyze] [GOOD] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Debug] >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate >> test.py::test[simple_columns-simple_columns_join_subreq_same_key_by_all-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Debug] >> test.py::test[blocks-combine_all_min_filter_opt--Debug] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt--Plan] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: 2024-11-21T17:45:59.298314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:59.298852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:59.298873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002497/r3tmp/tmpBA9o0k/pdisk_1.dat 2024-11-21T17:45:59.408736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:45:59.425958Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:59.468438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:59.468475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:59.478953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:59.583528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:59.598649Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:45:59.598729Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:59.607210Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:59.607247Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:45:59.607397Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:45:59.607406Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:45:59.607413Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:45:59.607451Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:45:59.611184Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:45:59.611245Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:45:59.611266Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:45:59.611272Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:45:59.611276Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:45:59.611281Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.611496Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:45:59.611513Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:45:59.611525Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:45:59.611533Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.611539Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:45:59.611547Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:45:59.611552Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.611583Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:45:59.611628Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:45:59.611646Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:45:59.611931Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:45:59.622213Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:45:59.622251Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:45:59.798846Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:45:59.799662Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:45:59.799682Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.799816Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.799827Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:45:59.799839Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:45:59.799920Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:45:59.799957Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:45:59.800118Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.800133Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:45:59.800508Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:45:59.800635Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:45:59.800890Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:45:59.800898Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.801008Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:45:59.801013Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:45:59.801020Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.801168Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.801174Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:45:59.801178Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:45:59.801193Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:45:59.801201Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:45:59.801209Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.801697Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:45:59.802086Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:45:59.802112Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:45:59.802117Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:45:59.803420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:59.803436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:59.803443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:59.804020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:45:59.804983Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:00.004773Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:00.005078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:00.090413Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9cqv0tg43cfgy7ndzp2d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjEyYTc2MDMtOTU1OTUzNzMtZjI3ZDI3MzUtOWE2NDcxMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:00.091429Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T17:46:00.091532Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:00.102306Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:00.102363Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:00.103413Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T17:46:00.119343Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7x9d18dj3rrgam2ag5qbtb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjhhNDVjNS0yM2E5ZjY4ZS0zNmFlMWZhZi01MjgyODc3Mg= ... EBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:00.141903Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:00.141961Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:00.141967Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:00.141975Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2024-11-21T17:46:00.142022Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:00.142030Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:00.142196Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2024-11-21T17:46:00.142246Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:46:00.142262Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2024-11-21T17:46:00.142266Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 0 2024-11-21T17:46:00.142286Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:00.142290Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2024-11-21T17:46:00.142344Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:00.142348Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:00.142354Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2024-11-21T17:46:00.142375Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:00.142383Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:00.142390Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:00.559140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:00.559175Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:00.559194Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002497/r3tmp/tmpzAhuN4/pdisk_1.dat 2024-11-21T17:46:00.638417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:00.652848Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:00.694689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:00.694724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:00.705288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:00.809124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:00.821663Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:630:2536] 2024-11-21T17:46:00.821725Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:00.829408Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:00.829441Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:00.829555Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:00.829561Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:00.829566Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:00.829598Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:00.829606Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:00.829620Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:00.829630Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:647:2545] 2024-11-21T17:46:00.829633Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:00.829637Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:00.829639Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:00.829761Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:00.829768Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:00.829776Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:628:2534], serverId# [2:638:2540], sessionId# [0:0:0] 2024-11-21T17:46:00.829782Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:00.829786Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:00.829793Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:00.829797Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:00.829820Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:00.829864Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:00.829877Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:00.830081Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:00.840311Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:00.840344Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:01.016335Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:665:2557], serverId# [2:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:01.016584Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:01.016599Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:01.016833Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:01.016847Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:01.016859Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:01.016934Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:01.016974Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:01.017044Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:01.017060Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:01.017171Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:01.017279Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:01.017615Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:01.017624Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:01.017812Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:01.017820Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:01.017828Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:01.018052Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:01.018063Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:01.018069Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:01.018087Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:01.018099Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:01.018110Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:01.018232Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:01.018546Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:01.018558Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:01.018728Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:01.019632Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:701:2585], serverId# [2:702:2586], sessionId# [0:0:0] 2024-11-21T17:46:01.019662Z node 2 :TX_DATASHARD NOTICE: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2024-11-21T17:46:01.019691Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:701:2585], serverId# [2:702:2586], sessionId# [0:0:0] >> test.py::test[aggr_factory-count-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-multi_list_nulls-default.txt-Debug] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xBE58B59) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+1945 (0xBBE03A9) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+138 (0xBBE52FA) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xBBEC6E7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xBE5AB0E) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+426 (0xBBEBE8A) NUnitTest::TTestFactory::Execute()+803 (0xBE5B283) NUnitTest::RunMain(int, char**)+3005 (0xBE6AB9D) ??+0 (0x7FA972B6AD90) __libc_start_main+128 (0x7FA972B6AE40) _start+41 (0xB04F029) >> test.py::test[pg-select_win_expr_partition-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_expr_partition-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_expr_partition-default.txt-Results] >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock >> KqpStats::SysViewClientLost [GOOD] >> KqpStats::SysViewCancelled ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: 2024-11-21T17:45:59.310656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:59.311023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:59.311039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002476/r3tmp/tmppWJ9Qh/pdisk_1.dat 2024-11-21T17:45:59.413538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:45:59.430290Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:59.472798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:59.472836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:59.484768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:59.588773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:59.603385Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:45:59.603460Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:59.609941Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:59.609976Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:45:59.610088Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:45:59.610093Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:45:59.610097Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:45:59.610128Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:45:59.612462Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:45:59.612514Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:45:59.612531Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:45:59.612536Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:45:59.612540Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:45:59.612545Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.612727Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:45:59.612738Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:45:59.612747Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:45:59.612752Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.612757Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:45:59.612762Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:45:59.612766Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.612790Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:45:59.612830Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:45:59.612843Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:45:59.613044Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:45:59.623341Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:45:59.623384Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:45:59.800204Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:45:59.800855Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:45:59.800868Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.800943Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.800948Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:45:59.800954Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:45:59.800991Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:45:59.801010Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:45:59.801098Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.801107Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:45:59.801389Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:45:59.801480Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:45:59.801688Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:45:59.801694Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.801788Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:45:59.801797Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:45:59.801804Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.801957Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.801963Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:45:59.801967Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:45:59.801976Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:45:59.801982Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:45:59.801987Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.802414Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:45:59.802664Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:45:59.802683Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:45:59.802687Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:45:59.803955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:59.803973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:59.803979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:59.804722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:45:59.805552Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:00.003382Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:00.003796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:00.081653Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9cqv5pryt4p119wawdgt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWE2ZjY3YWItZGMyM2QyOTMtYzUyNWRhNjYtOTZmZGQ5MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:00.082594Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T17:46:00.082692Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:00.094075Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:00.094128Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:00.095150Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T17:46:00.095407Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:00.105801Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:0 ... X_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:00.807647Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:628:2534], serverId# [2:638:2540], sessionId# [0:0:0] 2024-11-21T17:46:00.807652Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:00.807656Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:00.807663Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:00.807666Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:00.807693Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:00.807738Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:00.807751Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:00.807950Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:00.818242Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:00.818289Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:00.993057Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:665:2557], serverId# [2:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:00.993311Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:00.993324Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:00.993554Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:00.993565Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:00.993576Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:00.993649Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:00.993685Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:00.993746Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:00.993760Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:00.993866Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:00.993949Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:00.994252Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:00.994289Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:00.994498Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:00.994508Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:00.994517Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:00.994728Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:00.994737Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:00.994742Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:00.994761Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:00.994771Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:00.994781Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:00.994891Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:00.995169Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:00.995177Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:00.995321Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:00.997035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:699:2583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:00.997064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:710:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:00.997145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:00.997930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:00.999308Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:01.185810Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:01.186256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:713:2591], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:01.231072Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9dx45pxfn5z1m4wk53ev, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmZhNmM1ZDgtNjhiMGQ2NWEtOTc0NDdmNTEtZDM2OTdhODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:01.231195Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:814:2652], serverId# [2:815:2653], sessionId# [0:0:0] 2024-11-21T17:46:01.231248Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:01.241890Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:01.241948Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:01.243014Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:822:2659], serverId# [2:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:01.243334Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:01.255427Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:01.255459Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:01.255558Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:01.255566Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2024-11-21T17:46:01.255605Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:822:2659], serverId# [2:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:01.255626Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:01.255634Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:01.255644Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:01.255658Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:01.255910Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:01.256008Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:01.256043Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:01.256046Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:01.256053Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2024-11-21T17:46:01.256088Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:01.256093Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:01.256198Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2024-11-21T17:46:01.256282Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:46:01.256301Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2024-11-21T17:46:01.256305Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2024-11-21T17:46:01.256333Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:01.256336Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2024-11-21T17:46:01.256379Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:01.256382Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:01.256385Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2024-11-21T17:46:01.256409Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:01.256413Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:01.256419Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds >> EraseRowsTests::EraseRowsShouldSuccess >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-ForceBlocks] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose |75.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[window-yql-14277-default.txt-Results] [GOOD] >> test.py::test[aggregate-compare_by--Results] [GOOD] >> test.py::test[aggregate-compare_tuple--Debug] >> test.py::test[blocks-combine_all_min_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_hashed_max--Debug] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks-StreamLookup [GOOD] >> test.py::test[aggr_factory-multi_list_nulls-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-multi_list_nulls-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-multi_list_nulls-default.txt-Results] >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpParams::BadParameterType >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::QueryExecTimeout >> test.py::test[window-current/session_extended--Debug] [GOOD] >> test.py::test[window-current/session_extended--Plan] [GOOD] >> test.py::test[window-current/session_extended--Results] >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenSplit >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 |75.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[join-yql-8980--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks-StreamLookup [GOOD] Test command err: 2024-11-21T17:45:58.980839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:58.981401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:58.981435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00254b/r3tmp/tmplSjxX2/pdisk_1.dat 2024-11-21T17:45:59.081842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:45:59.101208Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:59.143671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:59.143702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:59.154236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:59.266532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:59.285032Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:654:2553] 2024-11-21T17:45:59.285133Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:59.293119Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:59.293189Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:45:59.293388Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:45:59.293397Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:45:59.293406Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:45:59.293457Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:45:59.296929Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:45:59.297023Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:45:59.297051Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:685:2571] 2024-11-21T17:45:59.297056Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:45:59.297062Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:45:59.297068Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.297537Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:45:59.297570Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:45:59.297670Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:656:2555] 2024-11-21T17:45:59.297709Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:59.299017Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:642:2547], serverId# [1:667:2559], sessionId# [0:0:0] 2024-11-21T17:45:59.299087Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.299095Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:45:59.299106Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:45:59.299115Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.299201Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:45:59.299261Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:45:59.299282Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:45:59.299678Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:59.299734Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:45:59.299869Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:45:59.299878Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:45:59.299884Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:45:59.299927Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:45:59.299936Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:45:59.299952Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:45:59.299966Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:703:2581] 2024-11-21T17:45:59.299970Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:45:59.299974Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:45:59.299979Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:45:59.300064Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:662:2557] 2024-11-21T17:45:59.300107Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:59.301362Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:45:59.301389Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:45:59.301486Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:45:59.301494Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:45:59.301503Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:45:59.301509Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:45:59.301643Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:59.301664Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:45:59.301777Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2024-11-21T17:45:59.301785Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2024-11-21T17:45:59.301791Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2024-11-21T17:45:59.301825Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:45:59.301832Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2024-11-21T17:45:59.301846Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:45:59.301861Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:712:2584] 2024-11-21T17:45:59.301865Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2024-11-21T17:45:59.301870Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2024-11-21T17:45:59.301875Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T17:45:59.301943Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2024-11-21T17:45:59.301951Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2024-11-21T17:45:59.301967Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T17:45:59.301972Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:45:59.301977Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2024-11-21T17:45:59.301981Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:45:59.302079Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:45:59.312426Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:45:59.312476Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:45:59.354170Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:644:2549], serverId# [1:719:2589], sessionId# [0:0:0] 2024-11-21T17:45:59.354239Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:45:59.354315Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:45:59.354342Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:45:59.354500Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:45:59.354514Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:645:2550], serverId# [1:720:2590], sessionId# [0:0:0] 2024-11-21T17:45:59.354536Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2024-11-21T17:45:59.354550Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2024-11-21T17:45:59.354558Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2024-11-21T17:45:59.354596Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T17:45:59.364918Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:45:59.364970Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T17:45:59.365117Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2024-11-21T17:45:59.365128Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme 2024-11-21T17:45:59.509174Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:736:2606], serverId# [1:740:2610], sessionId# [0:0:0] 2024-11-21T17:45:59.509279Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:738:2608], serverId# [1:741:2611], sessionId# [0:0:0] 2024-11-21T17:45:59.509964Z node 1 :TX_DAT ... nd 0 read sets to remove in 72075186224037888 2024-11-21T17:46:01.691466Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:627:2533], serverId# [3:638:2540], sessionId# [0:0:0] 2024-11-21T17:46:01.691484Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:01.691490Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:01.691498Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:01.691504Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:01.691529Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:01.691581Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:01.691597Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:01.691891Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:01.702216Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:01.702262Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:01.895257Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:665:2557], serverId# [3:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:01.895425Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 526 RawX2: 12884904347 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:01.895435Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:01.895736Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:01.895746Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:01.895756Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:01.895818Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:01.895867Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:01.895932Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:01.895943Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:01.896053Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:01.896131Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:01.896574Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:01.896582Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:01.896703Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:01.896709Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:01.896715Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:01.899159Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:01.899172Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:01.899177Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:01.899196Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:01.899205Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:01.899214Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:01.899488Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:01.899774Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:01.899870Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:01.899878Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:01.904490Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:699:2583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:01.904531Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:708:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:01.904584Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:01.905370Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:01.906279Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.107711Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.108388Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:713:2591], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:02.163017Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9esg03x4qaq2f387y9rm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjQ2NWM3MDEtZDAyNDNkNGUtOTg0MmM5NzUtNDlmNDZmOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:02.163174Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:814:2652], serverId# [3:815:2653], sessionId# [0:0:0] 2024-11-21T17:46:02.163237Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:02.173856Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:02.173906Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.195067Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7x9f1z4g9c0cwhpqge003f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTRlMjA5YzctZjA0ZGMwOGItYzk5OTUxMDItNTA0Y2UxMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:02.195526Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint64_value: 0 } } 2024-11-21T17:46:02.196606Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:853:2683], serverId# [3:854:2684], sessionId# [0:0:0] 2024-11-21T17:46:02.196847Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:02.207239Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:02.207268Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.207280Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2024-11-21T17:46:02.207437Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2024-11-21T17:46:02.207442Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.207483Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:02.207489Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037888 2024-11-21T17:46:02.207565Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.207573Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.207580Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:02.207588Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.207602Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:853:2683], serverId# [3:854:2684], sessionId# [0:0:0] 2024-11-21T17:46:02.214350Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7x9f2z2vt884jwt64axczh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTRlMjA5YzctZjA0ZGMwOGItYzk5OTUxMDItNTA0Y2UxMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:02.214462Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:02.224850Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:02.224888Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.225095Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTRlMjA5YzctZjA0ZGMwOGItYzk5OTUxMDItNTA0Y2UxMGM=, ActorId: [3:821:2658], ActorState: ExecuteState, TraceId: 01jd7x9f2z2vt884jwt64axczh, Create QueryResponse for error on request, msg: 2024-11-21T17:46:02.225258Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7x9f2z2vt884jwt64axczh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTRlMjA5YzctZjA0ZGMwOGItYzk5OTUxMDItNTA0Y2UxMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:02.225337Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:02.225423Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:02.225428Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Results] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds >> EraseRowsTests::ConditionalEraseRowsShouldNotErase >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] >> test.py::test[pg-select_win_expr_partition-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_frame-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: 2024-11-21T17:45:59.324674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:59.325165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:59.325188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00245a/r3tmp/tmpfReUhv/pdisk_1.dat 2024-11-21T17:45:59.428833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:45:59.448112Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:59.490434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:59.490463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:59.500943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:59.604942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:59.618499Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:45:59.618554Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:59.623106Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:59.623129Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:45:59.623220Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:45:59.623226Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:45:59.623230Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:45:59.623256Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:45:59.625361Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:45:59.625407Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:45:59.625422Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:45:59.625426Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:45:59.625428Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:45:59.625432Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.625595Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:45:59.625604Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:45:59.625613Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:45:59.625618Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.625622Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:45:59.625629Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:45:59.625632Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.625655Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:45:59.625693Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:45:59.625704Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:45:59.625902Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:45:59.636206Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:45:59.636277Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:45:59.811521Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:45:59.812174Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:45:59.812192Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.812842Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.812857Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:45:59.812867Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:45:59.812934Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:45:59.812967Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:45:59.813123Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.813136Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:45:59.813502Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:45:59.813603Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:45:59.813870Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:45:59.813877Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.813969Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:45:59.813974Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:45:59.813979Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.814133Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.814139Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:45:59.814143Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:45:59.814156Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:45:59.814163Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:45:59.814169Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.814624Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:45:59.814971Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:45:59.814997Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:45:59.815003Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:45:59.816674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:59.816697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:59.816707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:59.817516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:45:59.818628Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:00.006931Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:00.007361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:00.084894Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9cr837frqke845w24657, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y5NjY5MWEtOGZjYTQyMGUtMTMyMTllNjgtZjBjOWUyODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:00.085925Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T17:46:00.086006Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:00.096724Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:00.096777Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:00.097723Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T17:46:00.097986Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:00.108433Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:0 ... _DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:02.099992Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:627:2533], serverId# [3:638:2540], sessionId# [0:0:0] 2024-11-21T17:46:02.100010Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.100016Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.100025Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:02.100031Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.100056Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:02.100114Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:02.100131Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:02.108571Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.119071Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:02.119130Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:02.294361Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:665:2557], serverId# [3:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:02.294588Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 526 RawX2: 12884904347 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:02.294600Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.294933Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.294947Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:02.294958Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:02.295035Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:02.295077Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:02.295161Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.295180Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:02.295319Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:02.295408Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.295779Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:02.295789Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.295952Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:02.295960Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:02.295968Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.296160Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.296170Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:02.296176Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:02.296194Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:02.296206Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:02.296219Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.296520Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.296905Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:02.297027Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:02.297037Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:02.298585Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:699:2583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.298606Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:708:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.298668Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.299477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:02.300408Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.489159Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.489830Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:713:2591], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:02.541481Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9f5ta1f2gyxt4bfbd5ss, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmQzYjEwNTgtODNkNzRiZmYtNGI3YjFjMjEtMTljNmYwNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:02.541658Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:814:2652], serverId# [3:815:2653], sessionId# [0:0:0] 2024-11-21T17:46:02.541726Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:02.552508Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:02.552566Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.553629Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:822:2659], serverId# [3:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:02.554034Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:02.564522Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:02.564554Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.564626Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:02.564634Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2024-11-21T17:46:02.564694Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.564703Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.564713Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:02.564729Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.564775Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:822:2659], serverId# [3:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:02.565023Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:02.565111Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:02.565150Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.565155Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:02.565162Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2024-11-21T17:46:02.565222Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:02.565245Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.565388Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2024-11-21T17:46:02.565475Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:46:02.565497Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2024-11-21T17:46:02.565503Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2024-11-21T17:46:02.565533Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:02.565538Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2024-11-21T17:46:02.565597Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.565602Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:02.565608Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2024-11-21T17:46:02.565634Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.565642Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.565648Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> test.py::test[pg_catalog-pg_stat_activity-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Results] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/pytest >> test.py::test[window-win_over_few_partitions_other--Results] [GOOD] Test command err: 127.0.0.1 - - [21/Nov/2024 17:43:18] "GET /nested_library.sql.txt HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 17:43:19] "GET /nested_library.sql.txt HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 17:43:20] "GET /nested_library.sql.txt HTTP/1.1" 200 - 127.0.0.1 - - [21/Nov/2024 17:43:21] "GET /nested_library.sql.txt HTTP/1.1" 200 - |75.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[blocks-add_uint64_opt--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: 2024-11-21T17:45:59.350677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:59.351175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:59.351204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00240d/r3tmp/tmp4xeW3P/pdisk_1.dat 2024-11-21T17:45:59.457306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:45:59.476359Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:59.519011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:59.519040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:59.529729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:59.634003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:59.648906Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:45:59.649020Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:59.656801Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:59.656854Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:45:59.657021Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:45:59.657031Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:45:59.657039Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:45:59.657092Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:45:59.660458Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:45:59.660569Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:45:59.660603Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:45:59.660609Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:45:59.660614Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:45:59.660620Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.660942Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:45:59.660973Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:45:59.660989Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:45:59.660997Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.661004Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:45:59.661016Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:45:59.661022Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.661063Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:45:59.661128Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:45:59.661149Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:45:59.661505Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:45:59.671848Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:45:59.671895Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:45:59.847775Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:45:59.848680Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:45:59.848708Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.848857Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.848868Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:45:59.848880Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:45:59.848962Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:45:59.848999Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:45:59.849177Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:45:59.849196Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:45:59.849588Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:45:59.849728Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:45:59.850099Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:45:59.850113Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.850254Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:45:59.850262Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:45:59.850271Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.850511Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:45:59.850522Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:45:59.850527Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:45:59.850546Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:45:59.850557Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:45:59.850567Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:45:59.851256Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:45:59.851665Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:45:59.851695Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:45:59.851702Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:45:59.853746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:59.853771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:59.853782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:59.854698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:45:59.855699Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:00.045561Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:00.046099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:00.136650Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9csd51038r0d07ajzzy8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTFlMWI3MTgtZWVmNmE5MS1lNzk4ZGNkMC03ZDQzZDJhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:00.137853Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T17:46:00.137953Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:00.148846Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:00.148914Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:00.150179Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T17:46:00.150517Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:00.161020Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:0 ... _DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:02.213846Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:627:2533], serverId# [3:638:2540], sessionId# [0:0:0] 2024-11-21T17:46:02.213864Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.213870Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.213879Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:02.213884Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.213911Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:02.213970Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:02.213988Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:02.214286Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.224815Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:02.224853Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:02.398960Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:665:2557], serverId# [3:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:02.399133Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 526 RawX2: 12884904347 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:02.399142Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.399353Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.399360Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:02.399369Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:02.399427Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:02.399456Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:02.399513Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.399523Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:02.399621Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:02.399694Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.399985Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:02.399992Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.400124Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:02.400130Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:02.400136Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.400295Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.400303Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:02.400307Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:02.400321Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:02.400329Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:02.400337Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.400539Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.400742Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:02.400806Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:02.400810Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:02.401942Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:699:2583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.401961Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:708:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.402001Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.402630Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:02.403235Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.597149Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.597580Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:713:2591], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:02.644927Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9f91epvwsw2rkw5hxf7j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2Q0MjIwMzMtNDUwZjA0ZjctNjMzOTdlMTYtNGQxYmRjMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:02.645058Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:814:2652], serverId# [3:815:2653], sessionId# [0:0:0] 2024-11-21T17:46:02.645117Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:02.655780Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:02.655836Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.656982Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:822:2659], serverId# [3:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:02.657259Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:02.667619Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:02.667643Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.667711Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:02.667718Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2024-11-21T17:46:02.667763Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.667769Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.667776Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:02.667784Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.667817Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:822:2659], serverId# [3:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:02.668097Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:02.668159Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:02.668190Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.668195Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:02.668201Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2024-11-21T17:46:02.668245Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:02.668253Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.668360Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2024-11-21T17:46:02.668406Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:46:02.668423Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2024-11-21T17:46:02.668428Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2024-11-21T17:46:02.668452Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:02.668456Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2024-11-21T17:46:02.668503Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.668508Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:02.668515Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2024-11-21T17:46:02.668535Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.668541Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.668547Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerLegacy >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors >> KqpParams::BadParameterType [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex >> test.py::test[aggr_factory-multi_list_nulls-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-variance-default.txt-Debug] >> test.py::test[pg_catalog-pg_stat_activity-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_stat_database-default.txt-Analyze] >> test.py::test[simple_columns-simple_columns_subreq-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::BadParameterType [GOOD] Test command err: Trying to start YDB, gRPC: 9939, MsgBus: 62992 2024-11-21T17:45:51.368205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:51.368777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:51.368808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001679/r3tmp/tmpBp5BZK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9939, node 1 TClient is connected to server localhost:62992 2024-11-21T17:45:51.698916Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:51.699771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:51.699787Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:51.699792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:51.699891Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:45:51.803871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:51.803908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:51.814517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:51.886930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:51.929615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:52.229250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:52.647773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:52.932988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:53.289270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1727:3348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:53.289307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:53.292882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:53.534621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:53.832667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.083855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.352252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.610024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.905512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2298:3791], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.905547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.905601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2303:3796], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.906651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:55.081410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2305:3798], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:55.368019Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:2605:4015] TxId: 281474976715671. Ctx: { TraceId: 01jd7x98d62z1a05cjv0krbp2b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZkYWFjZDMtMjAyMWE2ZDYtZWZhZGRlMzYtODQ5MzU5YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2024-11-21T17:45:55.369712Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2613:4056], TxId: 281474976715671, task: 3. Ctx: { TraceId : 01jd7x98d62z1a05cjv0krbp2b. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YTZkYWFjZDMtMjAyMWE2ZDYtZWZhZGRlMzYtODQ5MzU5YTc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2605:4015], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T17:45:55.369837Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2611:4054], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=YTZkYWFjZDMtMjAyMWE2ZDYtZWZhZGRlMzYtODQ5MzU5YTc=. TraceId : 01jd7x98d62z1a05cjv0krbp2b. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2605:4015], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T17:45:55.369878Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2612:4055], TxId: 281474976715671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YTZkYWFjZDMtMjAyMWE2ZDYtZWZhZGRlMzYtODQ5MzU5YTc=. CustomerSuppliedId : . TraceId : 01jd7x98d62z1a05cjv0krbp2b. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2605:4015], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T17:45:55.371341Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTZkYWFjZDMtMjAyMWE2ZDYtZWZhZGRlMzYtODQ5MzU5YTc=, ActorId: [1:2569:4015], ActorState: ExecuteState, TraceId: 01jd7x98d62z1a05cjv0krbp2b, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 30134, MsgBus: 29666 2024-11-21T17:45:56.047206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:56.047255Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:45:56.047283Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001679/r3tmp/tmpYUwLSg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30134, node 2 TClient is connected to server localhost:29666 TClient is connected to server localhost:29666 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:56.193349Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:56.193376Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:56.193381Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-1 ... : 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:58.402999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:58.639660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:58.931114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2294:3787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:58.931163Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:58.931211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2299:3792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:58.932279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:59.101560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2301:3794], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:59.317304Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:2603:4011] TxId: 281474976715671. Ctx: { TraceId: 01jd7x9c8m3j5abdnhbdwmj0ec, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzA3ZGMyZDQtMjY2NzE5ZWYtNGY3MTI2M2EtYWVhMTMzZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2024-11-21T17:45:59.317454Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:2611:4054], TxId: 281474976715671, task: 3. Ctx: { TraceId : 01jd7x9c8m3j5abdnhbdwmj0ec. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzA3ZGMyZDQtMjY2NzE5ZWYtNGY3MTI2M2EtYWVhMTMzZDE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:2603:4011], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T17:45:59.317511Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:2609:4052], TxId: 281474976715671, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzA3ZGMyZDQtMjY2NzE5ZWYtNGY3MTI2M2EtYWVhMTMzZDE=. TraceId : 01jd7x9c8m3j5abdnhbdwmj0ec. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:2603:4011], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T17:45:59.317541Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:2610:4053], TxId: 281474976715671, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd7x9c8m3j5abdnhbdwmj0ec. SessionId : ydb://session/3?node_id=2&id=YzA3ZGMyZDQtMjY2NzE5ZWYtNGY3MTI2M2EtYWVhMTMzZDE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:2603:4011], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T17:46:01.938487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:46:01.938513Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:02.255438Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzA3ZGMyZDQtMjY2NzE5ZWYtNGY3MTI2M2EtYWVhMTMzZDE=, ActorId: [2:2565:4011], ActorState: ExecuteState, TraceId: 01jd7x9c8m3j5abdnhbdwmj0ec, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 16068, MsgBus: 4348 2024-11-21T17:46:02.521700Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790293172296335:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:02.521719Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001679/r3tmp/tmphLyQlK/pdisk_1.dat 2024-11-21T17:46:02.538296Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16068, node 3 2024-11-21T17:46:02.545957Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:02.545970Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:02.545972Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:02.546048Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4348 TClient is connected to server localhost:4348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:02.627557Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:02.627591Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:02.627940Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.628382Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:46:02.632176Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:02.645302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:46:02.655090Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.678804Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:02.687849Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:02.866651Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790293172297876:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.866708Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.872269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.878967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.894024Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.908306Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.921642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.935652Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.952246Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790293172298377:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.952274Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.952285Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790293172298382:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.953075Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:02.956965Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790293172298384:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:03.141899Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmVkZjIxMjktMmVmOTgzNzAtZTA3ZmJlYjUtMzQxMGQ4NTE=, ActorId: [3:7439790297467265963:2454], ActorState: ExecuteState, TraceId: 01jd7x9fzt21rj98byrkwftbd5, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1189: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $group type mismatch, expected: { Kind: Data Data { Scheme: 2 } }, actual: Type (Data), schemeType: Int32, schemeTypeId: 1 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributes >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> test.py::test[pg-select_win_frame-default.txt-Debug] [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices >> test.py::test[pg-select_win_frame-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_frame-default.txt-Results] >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx >> DistributedEraseTests::ConditionalEraseRowsShouldErase >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> test.py::test[blocks-combine_hashed_max--Debug] [GOOD] >> test.py::test[blocks-combine_hashed_max--Plan] [GOOD] >> test.py::test[blocks-combine_hashed_max--Results] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds >> test.py::test[pg_catalog-pg_stat_database-default.txt-Analyze] [GOOD] >> test.py::test[pg_catalog-pg_stat_database-default.txt-Debug] >> test.py::test[window-current/session_extended--Results] [GOOD] >> test.py::test[window-full/noncompact_with_nulls--Debug] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult |75.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2024-11-21T17:46:02.601061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:02.601582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:02.601615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0023ed/r3tmp/tmp5Y2Mo6/pdisk_1.dat 2024-11-21T17:46:02.715111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.733345Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:02.776088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:02.776119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:02.786646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:02.896073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.910525Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:46:02.910608Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:02.916460Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:02.916505Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:02.916630Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:02.916636Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:02.916641Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:02.916680Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:02.919230Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:02.919297Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:02.919320Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:46:02.919324Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:02.919327Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:02.919331Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.919566Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:02.919581Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:02.919591Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:46:02.919596Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.919601Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.919610Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:02.919614Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.919644Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:02.919693Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:02.919709Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:02.919956Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.930302Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:02.930371Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:03.108521Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:46:03.109430Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:03.109455Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.109596Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:03.109606Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:03.109616Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:03.109686Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:03.109723Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:03.109897Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:03.109916Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:03.110312Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:03.110463Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:03.110858Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:03.110869Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.111020Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:03.111029Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:03.111038Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:03.111294Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:03.111305Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:03.111311Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:03.111328Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:03.111339Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:03.111349Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.112081Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.112525Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:03.112552Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:03.112559Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:03.114216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.114237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.114246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.115005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:03.115918Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.302065Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.302444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:03.383534Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9fz9ab3jek03febkhkgs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWUwZmI3NTEtYjk1M2NmZWEtZmZmMDVjNmItNTc0MDk0NWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:03.384589Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T17:46:03.384681Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:03.395428Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:03.395472Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.396543Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T17:46:03.396582Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:03.406933Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:0 ... 7:46:03.978778Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:03.989494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:04.094243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.106547Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:630:2536] 2024-11-21T17:46:04.106634Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:04.113541Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:04.113578Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:04.113689Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:04.113696Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:04.113701Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:04.113733Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:04.113741Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:04.113754Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:04.113765Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:647:2545] 2024-11-21T17:46:04.113768Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:04.113771Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:04.113774Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.113917Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:04.113929Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:04.113939Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:628:2534], serverId# [2:638:2540], sessionId# [0:0:0] 2024-11-21T17:46:04.113947Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:04.113952Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:04.113958Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:04.113962Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:04.113986Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:04.114025Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:04.114037Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:04.114242Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:04.124519Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:04.124559Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:04.298917Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:665:2557], serverId# [2:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:04.299109Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:04.299120Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.299319Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:04.299329Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:04.299338Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:04.299398Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:04.299432Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:04.299494Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:04.299506Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:04.299592Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:04.299667Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:04.299984Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:04.299992Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.300170Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:04.300177Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:04.300185Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:04.300438Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:04.300449Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:04.300455Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:04.300472Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:04.300481Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:04.300492Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.300602Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:04.300882Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:04.300892Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:04.301039Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:04.301842Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:701:2585], serverId# [2:702:2586], sessionId# [0:0:0] 2024-11-21T17:46:04.301880Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:04.322393Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:04.322422Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.322502Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:701:2585], serverId# [2:702:2586], sessionId# [0:0:0] 2024-11-21T17:46:04.322899Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:707:2591], serverId# [2:708:2592], sessionId# [0:0:0] 2024-11-21T17:46:04.322930Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:04.322976Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:04.322981Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.323003Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:707:2591], serverId# [2:708:2592], sessionId# [0:0:0] 2024-11-21T17:46:04.323205Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:712:2596], serverId# [2:713:2597], sessionId# [0:0:0] 2024-11-21T17:46:04.323221Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:04.323236Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:04.323240Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.323259Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:712:2596], serverId# [2:713:2597], sessionId# [0:0:0] 2024-11-21T17:46:04.323440Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:717:2601], serverId# [2:718:2602], sessionId# [0:0:0] 2024-11-21T17:46:04.323454Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:04.323469Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:04.323473Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.323493Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:717:2601], serverId# [2:718:2602], sessionId# [0:0:0] 2024-11-21T17:46:04.323661Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:722:2606], serverId# [2:723:2607], sessionId# [0:0:0] 2024-11-21T17:46:04.323675Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:04.323697Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:04.323701Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.323722Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:722:2606], serverId# [2:723:2607], sessionId# [0:0:0] 2024-11-21T17:46:04.323888Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:727:2611], serverId# [2:728:2612], sessionId# [0:0:0] 2024-11-21T17:46:04.323902Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:04.323916Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:04.323920Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.323939Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:727:2611], serverId# [2:728:2612], sessionId# [0:0:0] >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator >> test.py::test[aggregate-compare_tuple--Debug] [GOOD] >> test.py::test[aggregate-compare_tuple--Plan] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } >> test.py::test[aggregate-compare_tuple--Plan] [GOOD] >> test.py::test[aggregate-compare_tuple--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: 2024-11-21T17:46:03.220665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:03.220990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:03.221004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0023d5/r3tmp/tmp8HWp9c/pdisk_1.dat 2024-11-21T17:46:03.312700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.330540Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:03.372754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:03.372788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:03.383234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:03.486947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.500432Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:46:03.500489Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:03.507760Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:03.507794Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:03.507956Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:03.507966Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:03.507973Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:03.508011Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:03.511319Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:03.511377Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:03.511399Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:46:03.511404Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:03.511408Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:03.511412Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.511626Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:03.511644Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:03.511655Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:46:03.511662Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:03.511668Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:03.511675Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:03.511680Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:03.511709Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:03.511756Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:03.511771Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:03.512029Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.522297Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:03.522337Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:03.696860Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:46:03.697659Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:03.697673Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.697763Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:03.697770Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:03.697779Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:03.697837Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:03.697866Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:03.697988Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:03.698001Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:03.698328Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:03.698431Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:03.698729Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:03.698735Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.698842Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:03.698848Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:03.698855Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:03.699017Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:03.699024Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:03.699029Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:03.699044Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:03.699051Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:03.699060Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.699622Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.699909Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:03.699930Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:03.699935Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:03.701481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.701502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.701511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.702232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:03.703012Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.895702Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.896096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:03.992175Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9ghnd58x8jp503jfjhb5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTJhNjQ0YS1lYmE5OWJmMC1kZGYzYmU0Yi1jYjY5MzRlNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:03.993356Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T17:46:03.993477Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:04.004459Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:04.004549Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.007643Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T17:46:04.007997Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:04.018510Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:0 ... X_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:04.688799Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:628:2534], serverId# [2:638:2540], sessionId# [0:0:0] 2024-11-21T17:46:04.688805Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:04.688809Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:04.688814Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:04.688818Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:04.688842Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:04.688886Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:04.688900Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:04.689131Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:04.699338Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:04.699385Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:04.873378Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:665:2557], serverId# [2:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:04.873573Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:04.873582Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.873774Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:04.873785Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:04.873795Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:04.873866Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:04.873897Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:04.873960Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:04.873972Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:04.874045Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:04.874131Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:04.874408Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:04.874414Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.874580Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:04.874585Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:04.874590Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:04.874769Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:04.874778Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:04.874783Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:04.874798Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:04.874807Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:04.874816Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.874901Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:04.875106Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:04.875112Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:04.875219Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:04.876258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:699:2583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.876283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:710:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.876342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.876935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:04.878274Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:05.065020Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:05.065481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:713:2591], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:05.109862Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9hpc2arv8fapw1f3v9m4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzIwZDQ3MGQtMzczMTE5ODMtMWNmZTA1NWQtMzJkMTNhYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:05.110005Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:814:2652], serverId# [2:815:2653], sessionId# [0:0:0] 2024-11-21T17:46:05.110065Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:05.120643Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:05.120687Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:05.121652Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:822:2659], serverId# [2:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:05.121897Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:05.132252Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:05.132276Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:05.132345Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:05.132352Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2024-11-21T17:46:05.132393Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:822:2659], serverId# [2:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:05.132409Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:05.132416Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:05.132423Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:05.132433Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:05.132618Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:05.132696Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:05.132731Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:05.132736Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:05.132744Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2024-11-21T17:46:05.132782Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:05.132790Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:05.132915Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2024-11-21T17:46:05.132962Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:46:05.132983Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2024-11-21T17:46:05.132990Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2024-11-21T17:46:05.133020Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:05.133025Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2024-11-21T17:46:05.133079Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:05.133085Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:05.133091Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2024-11-21T17:46:05.133116Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:05.133122Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:05.133129Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> test.py::test[pg_catalog-pg_stat_database-default.txt-Debug] [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds >> test.py::test[pg_catalog-pg_stat_database-default.txt-ForceBlocks] >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort >> test.py::test[blocks-combine_hashed_max--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal--Debug] >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: 2024-11-21T17:46:01.736173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:01.737073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:01.737120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002409/r3tmp/tmpAXn9sr/pdisk_1.dat 2024-11-21T17:46:01.861929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:01.891268Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:01.943328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:01.943368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:01.955482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:02.067957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.085743Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:654:2553] 2024-11-21T17:46:02.085834Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:02.094397Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:02.094468Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:02.094666Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:02.094675Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:02.094683Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:02.094728Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:02.098627Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:02.098700Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:02.098727Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:685:2571] 2024-11-21T17:46:02.098732Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:02.098737Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:02.098742Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.099162Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:02.099187Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:02.099278Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:656:2555] 2024-11-21T17:46:02.099315Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:02.100703Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:642:2547], serverId# [1:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:02.100762Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.100768Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.100774Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:02.100782Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.100847Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:02.100880Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:02.100895Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:02.101279Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:02.101329Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:02.101443Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:46:02.101451Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:46:02.101458Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:46:02.101488Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:02.101495Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:46:02.101507Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:02.101520Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:703:2581] 2024-11-21T17:46:02.101524Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:46:02.101527Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:46:02.101531Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:46:02.101609Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:662:2557] 2024-11-21T17:46:02.101654Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:02.102803Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:46:02.102820Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:46:02.102906Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:02.102912Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.102918Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:46:02.102923Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:02.103021Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:02.103038Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:02.103131Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2024-11-21T17:46:02.103138Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2024-11-21T17:46:02.103144Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2024-11-21T17:46:02.103168Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:02.103175Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2024-11-21T17:46:02.103185Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:02.103197Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:712:2584] 2024-11-21T17:46:02.103200Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2024-11-21T17:46:02.103204Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2024-11-21T17:46:02.103207Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T17:46:02.103273Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2024-11-21T17:46:02.103283Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2024-11-21T17:46:02.103298Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T17:46:02.103302Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.103307Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2024-11-21T17:46:02.103311Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:46:02.103723Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.114037Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:02.114079Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:02.155527Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:644:2549], serverId# [1:719:2589], sessionId# [0:0:0] 2024-11-21T17:46:02.155626Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:46:02.155777Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:46:02.155823Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:46:02.156030Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:46:02.156046Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:645:2550], serverId# [1:720:2590], sessionId# [0:0:0] 2024-11-21T17:46:02.156069Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2024-11-21T17:46:02.156103Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2024-11-21T17:46:02.156117Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2024-11-21T17:46:02.156182Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T17:46:02.166600Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:46:02.166648Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:02.166816Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2024-11-21T17:46:02.166827Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:02.310864Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:736:2606], serverId# [1:740:2610], sessionId# [0:0:0] 2024-11-21T17:46:02.310992Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:738:2608], serverId# [1:741:2611], sessionId# [0:0:0] 2024-11-21T17:46:02.312015Z node 1 :TX_DAT ... 72075186224037892, clientId# [3:1106:2854], serverId# [3:1134:2871], sessionId# [0:0:0] 2024-11-21T17:46:05.461761Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack init split/merge destination OpId 281474976715664 2024-11-21T17:46:05.461768Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state SplitDstReceivingSnapshot 2024-11-21T17:46:05.462049Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2024-11-21T17:46:05.462096Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack init split/merge destination OpId 281474976715664 2024-11-21T17:46:05.462102Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 not sending time cast registration request in state SplitDstReceivingSnapshot 2024-11-21T17:46:05.462297Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 received split OpId 281474976715664 at state Ready 2024-11-21T17:46:05.472635Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 starting snapshot for split OpId 281474976715664 2024-11-21T17:46:05.472726Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 CancelReadIterators#0 2024-11-21T17:46:05.473122Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 3, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2024-11-21T17:46:05.473132Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 3, finished edge# 0, front# 0 2024-11-21T17:46:05.473195Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 4, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2024-11-21T17:46:05.473199Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 4, finished edge# 0, front# 0 2024-11-21T17:46:05.473665Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2024-11-21T17:46:05.473673Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2024-11-21T17:46:05.473775Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2024-11-21T17:46:05.473779Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2024-11-21T17:46:05.473884Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2024-11-21T17:46:05.473891Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2024-11-21T17:46:05.473969Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 snapshot complete for split OpId 281474976715664 2024-11-21T17:46:05.474005Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715664 2024-11-21T17:46:05.474020Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715664 2024-11-21T17:46:05.474038Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715664 2024-11-21T17:46:05.474045Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715664 2024-11-21T17:46:05.474079Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715664 2024-11-21T17:46:05.474129Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715664 2024-11-21T17:46:05.474136Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715664 2024-11-21T17:46:05.474141Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715664 2024-11-21T17:46:05.474145Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715664 2024-11-21T17:46:05.474158Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715664 2024-11-21T17:46:05.474268Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Sending snapshots from src for split OpId 281474976715664 2024-11-21T17:46:05.474291Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715664 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2024-11-21T17:46:05.474323Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715664 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2024-11-21T17:46:05.474413Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [3:1175:2901], serverId# [3:1176:2902], sessionId# [0:0:0] 2024-11-21T17:46:05.474420Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1174:2900], serverId# [3:1177:2903], sessionId# [0:0:0] 2024-11-21T17:46:05.474435Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 Received snapshot for split/merge TxId 281474976715664 from tabeltId 72075186224037889 2024-11-21T17:46:05.474529Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Received snapshot for split/merge TxId 281474976715664 from tabeltId 72075186224037889 2024-11-21T17:46:05.474772Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack snapshot OpId 281474976715664 2024-11-21T17:46:05.474789Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2024-11-21T17:46:05.474804Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:05.474818Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2024-11-21T17:46:05.474827Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [3:1180:2906] 2024-11-21T17:46:05.474830Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2024-11-21T17:46:05.474834Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2024-11-21T17:46:05.474839Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2024-11-21T17:46:05.474918Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715664 2024-11-21T17:46:05.474984Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2024-11-21T17:46:05.474988Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2024-11-21T17:46:05.475019Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2024-11-21T17:46:05.475025Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:05.475031Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2024-11-21T17:46:05.475037Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2024-11-21T17:46:05.475054Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1175:2901], serverId# [3:1176:2902], sessionId# [0:0:0] 2024-11-21T17:46:05.475116Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack snapshot OpId 281474976715664 2024-11-21T17:46:05.475123Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037892 2024-11-21T17:46:05.475132Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:05.475139Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2024-11-21T17:46:05.475144Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [3:1182:2908] 2024-11-21T17:46:05.475147Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2024-11-21T17:46:05.475150Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2024-11-21T17:46:05.475152Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2024-11-21T17:46:05.475188Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715664 2024-11-21T17:46:05.475236Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2024-11-21T17:46:05.475240Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2024-11-21T17:46:05.475266Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2024-11-21T17:46:05.475270Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:05.475273Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2024-11-21T17:46:05.475276Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2024-11-21T17:46:05.475307Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1174:2900], serverId# [3:1177:2903], sessionId# [0:0:0] 2024-11-21T17:46:05.475318Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 1500 next step 2000 2024-11-21T17:46:05.475322Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2024-11-21T17:46:05.475353Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 1500 next step 2000 2024-11-21T17:46:05.475356Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2024-11-21T17:46:05.495924Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715664 2024-11-21T17:46:05.496715Z node 3 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2024-11-21T17:46:05.497257Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2024-11-21T17:46:05.497271Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2024-11-21T17:46:05.497368Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:05.497373Z node 3 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037889 state 5 2024-11-21T17:46:05.497399Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1046:2799], serverId# [3:1047:2800], sessionId# [0:0:0] 2024-11-21T17:46:05.497430Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715664 2024-11-21T17:46:05.497439Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T17:46:05.497443Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 >> DataShardVolatile::DistributedWriteShardRestartBeforePlan [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Results] >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead Test command err: Trying to start YDB, gRPC: 18979, MsgBus: 16419 2024-11-21T17:45:47.269742Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790228902072543:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:47.269798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00168f/r3tmp/tmpJFNIEu/pdisk_1.dat 2024-11-21T17:45:47.345673Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18979, node 1 2024-11-21T17:45:47.363564Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:47.363577Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:47.363580Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:47.363617Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16419 2024-11-21T17:45:47.416479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:47.416512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:47.424643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:45:47.449759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.453192Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:47.520795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.601901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.637765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.666184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.787782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790228902073940:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.787820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.792877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.810380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.824297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.844437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.867845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.889391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.911212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790228902074454:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.911253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.911544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790228902074461:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.912431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:47.914848Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:47.914892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790228902074463:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:48.148352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:45:48.207500Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY0YWE5MDAtNWQwMDNlZjItNzYwZjk2NDgtZTE1OTliZWQ=, ActorId: [1:7439790233197042053:2454], ActorState: ExecuteState, TraceId: 01jd7x91de06f48sv0h6eb2yk4, Create QueryResponse for error on request, msg: 2024-11-21T17:45:48.216307Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY0YWE5MDAtNWQwMDNlZjItNzYwZjk2NDgtZTE1OTliZWQ=, ActorId: [1:7439790233197042053:2454], ActorState: ExecuteState, TraceId: 01jd7x91dh2q825pmpbbpyqane, Create QueryResponse for error on request, msg: 2024-11-21T17:45:48.221636Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY0YWE5MDAtNWQwMDNlZjItNzYwZjk2NDgtZTE1OTliZWQ=, ActorId: [1:7439790233197042053:2454], ActorState: ExecuteState, TraceId: 01jd7x91ds5yzmn718v8c53xr0, Create QueryResponse for error on request, msg: 2024-11-21T17:45:48.226756Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY0YWE5MDAtNWQwMDNlZjItNzYwZjk2NDgtZTE1OTliZWQ=, ActorId: [1:7439790233197042053:2454], ActorState: ExecuteState, TraceId: 01jd7x91e10m61t9axe3v30vat, Create QueryResponse for error on request, msg: 2024-11-21T17:45:48.231883Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY0YWE5MDAtNWQwMDNlZjItNzYwZjk2NDgtZTE1OTliZWQ=, ActorId: [1:7439790233197042053:2454], ActorState: ExecuteState, TraceId: 01jd7x91e3a8xt36emhxcd82c2, Create QueryResponse for error on request, msg: 2024-11-21T17:45:48.241138Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY0YWE5MDAtNWQwMDNlZjItNzYwZjk2NDgtZTE1OTliZWQ=, ActorId: [1:7439790233197042053:2454], ActorState: ExecuteState, TraceId: 01jd7x91e82ppq71x96wp3041v, Create QueryResponse for error on request, msg: 2024-11-21T17:45:48.251191Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY0YWE5MDAtNWQwMDNlZjItNzYwZjk2NDgtZTE1OTliZWQ=, ActorId: [1:7439790233197042053:2454], ActorState: ExecuteState, TraceId: 01jd7x91ejb9s6jvw6nc6xg9sx, Create QueryResponse for error on request, msg: 2024-11-21T17:45:48.264615Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790233197042197:2454] TxId: 281474976715673. Ctx: { TraceId: 01jd7x91ew081nnvn85wx5d9vv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjY0YWE5MDAtNWQwMDNlZjItNzYwZjk2NDgtZTE1OTliZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 8ms } {
: Error: Cancelling after 12ms during execution } ] 2024-11-21T17:45:48.265742Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY0YWE5MDAtNWQwMDNlZjItNzYwZjk2NDgtZTE1OTliZWQ=, ActorId: [1:7439790233197042053:2454], ActorState: ExecuteState, TraceId: 01jd7x91ew081nnvn85wx5d9vv, Create QueryResponse for error on request, msg: 2024-11-21T17:45:48.280336Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY0YWE5MDAtNWQwMDNlZjItNzYwZjk2NDgtZTE1OTliZWQ=, ActorId: [1:7439790233197042053:2454], ActorState: ExecuteState, TraceId: 01jd7x91fd9mjbp1z0hcfnckev, Create QueryResponse for error on request, msg: 2024-11-21T17:45:48.297465Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY0YWE5MDAtNWQwMDNlZjItNzYwZjk2NDgtZTE1OTliZWQ=, ActorId: [1:7439790233197042053:2454], ActorState: ExecuteState, TraceId: 01jd7x91fs6peetk6fryz72e14, Create QueryResponse for error on request, msg: 2024-11-21T17:45:48.315928Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY0YWE5MDAtNWQwMDNlZjItNzYwZjk2NDgtZTE1OTliZWQ=, ActorId: [1:7439790233197042053:2454], ActorState: ExecuteState, TraceId: 01jd7x91gc5k2dehhz6f70ethx, Create QueryResponse for error on request, msg: [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. Trying to start YDB, gRPC: 21834, MsgBus: 9089 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00168f/r3tmp/tmpofHTdh/pdisk_1.dat 2024-11-21T17:45:50.524371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/init ... esponse for error on request, msg: 2024-11-21T17:45:52.696191Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmI1MGYwYTQtZTBhZDUxY2ItMTVlNjdhMWEtN2Q4YTBhYmE=, ActorId: [1:7439790244459706234:2454], ActorState: ExecuteState, TraceId: 01jd7x95ra82xvx75s45qfeh21, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29768, MsgBus: 64759 2024-11-21T17:46:04.074731Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790299277703094:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:04.074940Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00168f/r3tmp/tmpBELlrq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29768, node 2 2024-11-21T17:46:04.089172Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:04.091144Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:04.091148Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:04.091150Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:04.091187Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64759 TClient is connected to server localhost:64759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:04.174782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:04.174820Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:04.175892Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:04.177201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:04.187764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:04.196551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:04.212903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:04.224194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:04.330343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790299277704631:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.330370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.333828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.338903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.349045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.355995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.363242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.369910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.378086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790299277705130:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.378110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.378178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790299277705135:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.378652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:04.383294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790299277705137:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:04.523166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.548922Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, ActorId: [2:7439790299277705409:2454], ActorState: ExecuteState, TraceId: 01jd7x9hc38mjxdtfpem3gc1e6, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.552036Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, ActorId: [2:7439790299277705409:2454], ActorState: ExecuteState, TraceId: 01jd7x9hc50ebf3n26ecjwrpj1, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.556160Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, ActorId: [2:7439790299277705409:2454], ActorState: ExecuteState, TraceId: 01jd7x9hc819veyp8bj69zhxbr, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.560712Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, ActorId: [2:7439790299277705409:2454], ActorState: ExecuteState, TraceId: 01jd7x9hcc7jadw51y6rpdswk4, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.566346Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, ActorId: [2:7439790299277705409:2454], ActorState: ExecuteState, TraceId: 01jd7x9hch83895p0zhpx1vvdp, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.573028Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, ActorId: [2:7439790299277705409:2454], ActorState: ExecuteState, TraceId: 01jd7x9hcp3vzabxfhme3hdeab, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.581138Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, ActorId: [2:7439790299277705409:2454], ActorState: ExecuteState, TraceId: 01jd7x9hcx6c85zvjfq4b3x1t4, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.589916Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, ActorId: [2:7439790299277705409:2454], ActorState: ExecuteState, TraceId: 01jd7x9hd51j4x545a7d22j0sy, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.605810Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, ActorId: [2:7439790299277705409:2454], ActorState: ExecuteState, TraceId: 01jd7x9hdmfkprd5c0gryws3kq, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.619127Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439790299277705566:2454] TxId: 281474976715673. Ctx: { TraceId: 01jd7x9he04gsgn1ppknhee688, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 10ms } {
: Error: Cancelling after 10ms during execution } ] 2024-11-21T17:46:04.619257Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, ActorId: [2:7439790299277705409:2454], ActorState: ExecuteState, TraceId: 01jd7x9he04gsgn1ppknhee688, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.632449Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, ActorId: [2:7439790299277705409:2454], ActorState: ExecuteState, TraceId: 01jd7x9hed7myc127bpc3w54bx, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.645737Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRiYmQ0MmEtYWFlN2UzYzEtZWQ0NmU2ODAtNDRjOTZmNTU=, ActorId: [2:7439790299277705409:2454], ActorState: ExecuteState, TraceId: 01jd7x9hesax7kfjsxyj854h5r, Create QueryResponse for error on request, msg: [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: 2024-11-21T17:46:02.464253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:02.464784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:02.464814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0023fb/r3tmp/tmptuXeOZ/pdisk_1.dat 2024-11-21T17:46:02.574793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.593849Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:02.636422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:02.636457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:02.647040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:02.751917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.766085Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:46:02.766141Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:02.772627Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:02.772662Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:02.772776Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:02.772783Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:02.772787Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:02.772814Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:02.775680Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:02.775735Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:02.775755Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:46:02.775760Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:02.775764Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:02.775769Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.775957Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:02.775971Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:02.775983Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:46:02.775991Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.775996Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.776005Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:02.776052Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.776085Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:02.776131Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:02.776146Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:02.776440Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.788207Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:02.788285Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:02.965797Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:46:02.966661Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:02.966684Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.966822Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.966832Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:02.966844Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:02.966908Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:02.966938Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:02.967084Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:02.967100Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:02.967516Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:02.967661Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:02.967946Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:02.967953Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.968051Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:02.968056Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:02.968062Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.968219Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:02.968245Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:02.968251Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:02.968269Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:02.968278Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:02.968287Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:02.968795Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:02.969106Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:02.969136Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:02.969142Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:02.970707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.970724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.970731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.971341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:02.972188Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.158499Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.158893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:03.221061Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9fttfp2vqh1ggt5act9b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzViNDU5MTItODVlMGI5ZjUtMTIxZDI5OGItZjY4ODg4ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:03.221753Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T17:46:03.221826Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:03.232445Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:03.232495Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.233446Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T17:46:03.233691Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:03.244002Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:0 ... _DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:05.325613Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:627:2533], serverId# [3:638:2540], sessionId# [0:0:0] 2024-11-21T17:46:05.325626Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:05.325630Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:05.325637Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:05.325641Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:05.325663Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:05.325718Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:05.325733Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:05.325962Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:05.336289Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:05.336337Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:05.510511Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:665:2557], serverId# [3:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:05.510649Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 526 RawX2: 12884904347 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:05.510656Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:05.510915Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:05.510925Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:05.510933Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:05.510985Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:05.511015Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:05.511062Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:05.511073Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:05.511149Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:05.511211Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:05.511463Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:05.511469Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:05.511608Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:05.511613Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:05.511619Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:05.511739Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:05.511744Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:05.511748Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:05.511761Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:05.511768Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:05.511776Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:05.511931Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:05.512148Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:05.512221Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:05.512262Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:05.513652Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:699:2583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:05.513675Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:708:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:05.513719Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:05.514336Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:05.514993Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:05.702131Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:05.702474Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:713:2591], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:05.743505Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9ja98ed19wv901bjne9f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQxNjFlYmMtOThlMzFhNmQtNWIwZmNkOTUtNDE5ZmMwMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:05.743645Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:814:2652], serverId# [3:815:2653], sessionId# [0:0:0] 2024-11-21T17:46:05.743705Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:05.754439Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:05.754497Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:05.755689Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:822:2659], serverId# [3:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:05.755999Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:05.766442Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:05.766471Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:05.766539Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:05.766546Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2024-11-21T17:46:05.766597Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:05.766604Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:05.766611Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:05.766624Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:05.766663Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:822:2659], serverId# [3:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:05.766882Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:05.766970Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:05.766997Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:05.767000Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:05.767005Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2024-11-21T17:46:05.767039Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:05.767043Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:05.767140Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2024-11-21T17:46:05.767204Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:46:05.767220Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2024-11-21T17:46:05.767223Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2024-11-21T17:46:05.767248Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:05.767251Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2024-11-21T17:46:05.767291Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:05.767293Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:05.767296Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2024-11-21T17:46:05.767319Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:05.767323Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:05.767327Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] |75.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] |75.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> test.py::test[aggr_factory-variance-default.txt-Debug] [GOOD] >> test.py::test[aggr_factory-variance-default.txt-Plan] [GOOD] >> test.py::test[aggr_factory-variance-default.txt-Results] |75.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |75.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[aggregate-group_by_rollup_column_ref_same_names--Results] [GOOD] >> test.py::test[pg_catalog-pg_stat_database-default.txt-ForceBlocks] [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> KqpLimits::QueryExecTimeout [GOOD] >> test.py::test[aggregate-compare_tuple--Results] [GOOD] >> test.py::test[aggregate-ensure_count-default.txt-Debug] >> TKeyValueTracingTest::WriteHuge ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2024-11-21T17:46:03.199165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:03.199656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:03.199688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0023d9/r3tmp/tmpiKiRZO/pdisk_1.dat 2024-11-21T17:46:03.294778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.312091Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:03.354729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:03.354765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:03.365365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:03.469425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.483189Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:46:03.483248Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:03.489964Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:03.490008Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:03.490177Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:03.490188Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:03.490196Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:03.490241Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:03.494183Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:03.494269Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:03.494295Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:46:03.494301Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:03.494306Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:03.494311Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.494586Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:03.494610Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:03.494625Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:46:03.494633Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:03.494639Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:03.494648Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:03.494653Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:03.494690Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:03.494743Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:03.494762Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:03.495077Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.505367Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:03.505409Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:03.679743Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:46:03.680591Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:03.680613Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.680745Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:03.680755Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:03.680765Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:03.680833Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:03.680865Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:03.681025Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:03.681041Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:03.681424Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:03.681558Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:03.681917Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:03.681927Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.682062Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:03.682069Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:03.682078Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:03.682313Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:03.682322Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:03.682328Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:03.682347Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:03.682358Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:03.682368Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.683035Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.683439Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:03.683508Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:03.683515Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:03.685382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.685408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.685439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.686293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:03.687262Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.873460Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.873765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:03.958207Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9gh53eehwvjxvv5n5jdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDk0MzFmMDgtYWRiMTg2NDYtZWU1ZjFkNGMtODllNjJiM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:03.959365Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T17:46:03.959471Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:03.970387Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:03.970456Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.971659Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T17:46:03.971966Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:03.971977Z node 1 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2024-11-21T17:46:03.972012Z n ... lt 281474976715662 datashard 72075186224037893 state Ready 2024-11-21T17:46:06.071625Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 Got TEvSchemaChangedResult from SS at 72075186224037893 2024-11-21T17:46:06.072181Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1203:2990], serverId# [2:1204:2991], sessionId# [0:0:0] 2024-11-21T17:46:06.072213Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1203:2990], serverId# [2:1204:2991], sessionId# [0:0:0] 2024-11-21T17:46:06.072381Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1208:2995], serverId# [2:1209:2996], sessionId# [0:0:0] 2024-11-21T17:46:06.072402Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1208:2995], serverId# [2:1209:2996], sessionId# [0:0:0] 2024-11-21T17:46:06.072518Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1213:3000], serverId# [2:1214:3001], sessionId# [0:0:0] 2024-11-21T17:46:06.072534Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1213:3000], serverId# [2:1214:3001], sessionId# [0:0:0] 2024-11-21T17:46:06.072901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:06.073389Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2024-11-21T17:46:06.073409Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:46:06.073419Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T17:46:06.073443Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2024-11-21T17:46:06.073454Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2024-11-21T17:46:06.073463Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:06.106109Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1236:3020] 2024-11-21T17:46:06.106155Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:06.107049Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:06.107070Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:06.107151Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2024-11-21T17:46:06.107156Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2024-11-21T17:46:06.107160Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2024-11-21T17:46:06.107186Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:06.107192Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037894 2024-11-21T17:46:06.107202Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:06.107212Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037894, actorId: [2:1253:3029] 2024-11-21T17:46:06.107215Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2024-11-21T17:46:06.107218Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2024-11-21T17:46:06.107221Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2024-11-21T17:46:06.107279Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037894 2024-11-21T17:46:06.107285Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2024-11-21T17:46:06.107297Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1235:3019], serverId# [2:1244:3024], sessionId# [0:0:0] 2024-11-21T17:46:06.107376Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2024-11-21T17:46:06.107380Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:06.107385Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037894 TxInFly 0 2024-11-21T17:46:06.107389Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2024-11-21T17:46:06.107406Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2024-11-21T17:46:06.107439Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2024-11-21T17:46:06.107450Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2024-11-21T17:46:06.107511Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2024-11-21T17:46:06.127918Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2024-11-21T17:46:06.127969Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:06.302287Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1263:3039], serverId# [2:1265:3041], sessionId# [0:0:0] 2024-11-21T17:46:06.302403Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715663 at step 4000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 4000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2024-11-21T17:46:06.302414Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2024-11-21T17:46:06.302595Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2024-11-21T17:46:06.302606Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:06.302614Z node 2 :TX_DATASHARD DEBUG: Found ready operation [4000:281474976715663] in PlanQueue unit at 72075186224037894 2024-11-21T17:46:06.302673Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037894 loaded tx from db 4000:281474976715663 keys extracted: 0 2024-11-21T17:46:06.302704Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:06.302878Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2024-11-21T17:46:06.302895Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 8] schema version# 1 2024-11-21T17:46:06.302988Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:06.303099Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:06.303395Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 3500 2024-11-21T17:46:06.303408Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2024-11-21T17:46:06.303465Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037894 step# 4000 txid# 281474976715663} 2024-11-21T17:46:06.303473Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 4000} 2024-11-21T17:46:06.303480Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2024-11-21T17:46:06.303764Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2024-11-21T17:46:06.303777Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2024-11-21T17:46:06.303784Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037894 2024-11-21T17:46:06.303800Z node 2 :TX_DATASHARD DEBUG: Complete [4000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:06.303809Z node 2 :TX_DATASHARD INFO: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2024-11-21T17:46:06.303821Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2024-11-21T17:46:06.303962Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2024-11-21T17:46:06.303984Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:46:06.303999Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2024-11-21T17:46:06.304017Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T17:46:06.304104Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2024-11-21T17:46:06.304126Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2024-11-21T17:46:06.304180Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:06.304781Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 4000 2024-11-21T17:46:06.304924Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2024-11-21T17:46:06.304937Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2024-11-21T17:46:06.305824Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1292:3062], serverId# [2:1293:3063], sessionId# [0:0:0] 2024-11-21T17:46:06.305874Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1292:3062], serverId# [2:1293:3063], sessionId# [0:0:0] 2024-11-21T17:46:06.306112Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1297:3067], serverId# [2:1298:3068], sessionId# [0:0:0] 2024-11-21T17:46:06.306143Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1297:3067], serverId# [2:1298:3068], sessionId# [0:0:0] 2024-11-21T17:46:06.306372Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1302:3072], serverId# [2:1303:3073], sessionId# [0:0:0] 2024-11-21T17:46:06.306408Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1302:3072], serverId# [2:1303:3073], sessionId# [0:0:0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2024-11-21T17:45:42.373094Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.373103Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.373107Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.373237Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.374929Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.374991Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.375089Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.375222Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.375265Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:45:42.375302Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.375312Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:45:42.375497Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.375501Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.375505Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.375576Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.376416Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.376463Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.376516Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.376591Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.376613Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:45:42.376628Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.376634Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:45:42.376863Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.376867Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.376870Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.376927Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.377075Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.377778Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.377850Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.378169Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.378406Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:45:42.378454Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.378465Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:45:42.378769Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.378772Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.378775Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.378837Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.378993Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.379013Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.381973Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.382810Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.382910Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:45:42.382934Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.382960Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:45:42.383335Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.383341Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.383344Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.383428Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.383576Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.383610Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.383647Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.383729Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.383755Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:45:42.383777Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.383784Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T17:45:42.383937Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.383941Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.383944Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.383980Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.384121Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.384148Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.384173Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.384225Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.384267Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:45:42.384290Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.384296Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T17:45:42.432208Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.432217Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.432220Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.432338Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.432568Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.432624Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.433232Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.433497Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.433594Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:45:42.433655Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.433669Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T17:45:42.434050Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.434054Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.434057Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:45:42.434127Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:45:42.434308Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:45:42.434327Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.434359Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:45:42.435086Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:42.435128Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:45:42.435142Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:45:42.435149Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:45:42.526787Z :ReadSession INFO: Random seed for debugging is 1732211142526776 2024-11-21T17:45:42.789729Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790208704999198:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:42.789783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:42.803643Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790204698273716:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ecd/r3tmp/tmpPTrg8a/pdisk_1.dat 2024-11-21T17:45:42.833466Z n ... e: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid cc4518d5-40e71ab0-7493e0a0-51862a8a has messages 1 2024-11-21T17:45:55.885027Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 read done: guid# cc4518d5-40e71ab0-7493e0a0-51862a8a, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 200 2024-11-21T17:45:55.885044Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 response to read: guid# cc4518d5-40e71ab0-7493e0a0-51862a8a 2024-11-21T17:45:55.885149Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 Process answer. Aval parts: 0 2024-11-21T17:45:55.885381Z :DEBUG: [/Root] [/Root] [7b55e277-26b978c2-66966536-a9a9dc09] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:45:55.885459Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2024-11-21T17:45:55.885481Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: 2024-11-21T17:45:55.885540Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T17:45:55.885577Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 got read request: guid# 8b489356-125d5a4-22ad83ea-729dc792 Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T17:45:55.882000Z WriteTime: 2024-11-21T17:45:55.882000Z Ip: "ipv6:[::1]:50208" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:50208" } } } 2024-11-21T17:45:55.885632Z :DEBUG: [/Root] [/Root] [7b55e277-26b978c2-66966536-a9a9dc09] [dc1] Commit offsets [2, 3). Partition stream id: 1 2024-11-21T17:45:55.885803Z :DEBUG: [/Root] [/Root] [7b55e277-26b978c2-66966536-a9a9dc09] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:45:55.885867Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 grpc read done: success# 1, data# { commit { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } } 2024-11-21T17:45:55.885929Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 4 2024-11-21T17:45:55.886038Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:45:55.886049Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:45:55.886089Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_13336517014917941030_v1 2024-11-21T17:45:55.886121Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:45:55.887184Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:45:55.887208Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:45:55.887236Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 4 2024-11-21T17:45:55.887316Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 4 } 2024-11-21T17:45:55.887337Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 4 2024-11-21T17:45:55.887348Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 replying for commits: assignId# 1, from# 4, to# 4, offset# 3 2024-11-21T17:45:55.887575Z :DEBUG: [/Root] [/Root] [7b55e277-26b978c2-66966536-a9a9dc09] [dc1] Committed response: { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } 2024-11-21T17:45:55.982441Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|59b27b9-211d9f74-91192c20-d5599ece_0] Write session will now close 2024-11-21T17:45:55.982467Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|59b27b9-211d9f74-91192c20-d5599ece_0] Write session: aborting 2024-11-21T17:45:55.982711Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|59b27b9-211d9f74-91192c20-d5599ece_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:45:55.982721Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|59b27b9-211d9f74-91192c20-d5599ece_0] Write session: destroy 2024-11-21T17:45:55.982908Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|59b27b9-211d9f74-91192c20-d5599ece_0 grpc read done: success: 0 data: 2024-11-21T17:45:55.982928Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|59b27b9-211d9f74-91192c20-d5599ece_0 grpc read failed 2024-11-21T17:45:55.982936Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|59b27b9-211d9f74-91192c20-d5599ece_0 grpc closed 2024-11-21T17:45:55.982945Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|59b27b9-211d9f74-91192c20-d5599ece_0 is DEAD 2024-11-21T17:45:55.983460Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:45:55.983487Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439790264539576706:2586] destroyed 2024-11-21T17:45:55.983503Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:45:55.983220Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:45:57.876249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:45:57.876266Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:58.564514Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:45:58.654020Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2024-11-21T17:46:03.565487Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:46:05.884952Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2024-11-21T17:46:05.985998Z :INFO: [/Root] [/Root] [7b55e277-26b978c2-66966536-a9a9dc09] Closing read session. Close timeout: 0.000000s 2024-11-21T17:46:05.986022Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2024-11-21T17:46:05.986031Z :INFO: [/Root] [/Root] [7b55e277-26b978c2-66966536-a9a9dc09] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16341 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:05.986064Z :NOTICE: [/Root] [/Root] [7b55e277-26b978c2-66966536-a9a9dc09] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:46:05.986075Z :DEBUG: [/Root] [/Root] [7b55e277-26b978c2-66966536-a9a9dc09] [dc1] Abort session to cluster 2024-11-21T17:46:05.986261Z :NOTICE: [/Root] [/Root] [7b55e277-26b978c2-66966536-a9a9dc09] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:46:05.986642Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 grpc read done: success# 0, data# { } 2024-11-21T17:46:05.986661Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 grpc read failed 2024-11-21T17:46:05.986668Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 grpc closed 2024-11-21T17:46:05.986687Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_13336517014917941030_v1 is DEAD 2024-11-21T17:46:05.986869Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:05.986893Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_13336517014917941030_v1 2024-11-21T17:46:05.986923Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439790238769772382:2484] destroyed 2024-11-21T17:46:05.986945Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_13336517014917941030_v1 2024-11-21T17:46:05.987070Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439790238769772379:2481] disconnected; active server actors: 1 2024-11-21T17:46:05.987084Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439790238769772379:2481] client user disconnected session shared/user_1_1_13336517014917941030_v1 2024-11-21T17:46:06.148453Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790311784217615:2712] TxId: 281474976710722. Ctx: { TraceId: 01jd7x9jx9983g0c76s04m5d7q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDVmZWUxODktY2VkNzEwYjgtMTYxMGIzOWQtOGZlZjM3YzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2024-11-21T17:46:06.148769Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790311784217622:2720], TxId: 281474976710722, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDVmZWUxODktY2VkNzEwYjgtMTYxMGIzOWQtOGZlZjM3YzI=. CustomerSuppliedId : . TraceId : 01jd7x9jx9983g0c76s04m5d7q. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439790311784217615:2712], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T17:46:06.148830Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790311784217621:2719], TxId: 281474976710722, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDVmZWUxODktY2VkNzEwYjgtMTYxMGIzOWQtOGZlZjM3YzI=. TraceId : 01jd7x9jx9983g0c76s04m5d7q. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439790311784217615:2712], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> test.py::test[pg-select_win_frame-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_max-default.txt-Debug] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] Test command err: 2024-11-21T17:46:03.609786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:03.610130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:03.610145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0023cd/r3tmp/tmp36P3hl/pdisk_1.dat 2024-11-21T17:46:03.705501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.723754Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:03.766365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:03.766401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:03.776912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:03.882888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.898098Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:46:03.898190Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:03.906142Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:03.906180Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:03.906326Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:03.906338Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:03.906345Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:03.906410Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:03.910221Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:03.910286Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:03.910311Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:46:03.910316Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:03.910321Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:03.910326Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.910575Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:03.910598Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:03.910613Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:46:03.910621Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:03.910627Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:03.910635Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:03.910640Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:03.910678Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:03.910727Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:03.910745Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:03.911060Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.921394Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:03.921443Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:04.096810Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:46:04.097682Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:04.097697Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.097795Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:04.097804Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:04.097815Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:04.097882Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:04.097913Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:04.098037Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:04.098051Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:04.098415Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:04.098536Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:04.098866Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:04.098874Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.098987Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:04.098993Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:04.099001Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:04.099203Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:04.099212Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:04.099218Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:04.099233Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:04.099243Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:04.099253Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.099862Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:04.100197Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:04.100223Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:04.100248Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:04.101993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.102017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.102027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.102839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:04.103728Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:04.289599Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:04.289929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:04.366247Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9gy55287pvzxnqj3q49a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIxMDkwYjctMTJjMmNmMGEtMWU1YmE2ZjMtYWIxNDNiYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:04.366926Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T17:46:04.366995Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:04.377708Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:04.377759Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.378768Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T17:46:04.379059Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:04.389496Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:0 ... _DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:06.175155Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:627:2533], serverId# [3:638:2540], sessionId# [0:0:0] 2024-11-21T17:46:06.175165Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:06.175169Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:06.175175Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:06.175178Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:06.175196Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:06.175238Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:06.175249Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:06.175460Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:06.185744Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:06.185790Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:06.360628Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:665:2557], serverId# [3:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:06.360755Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 526 RawX2: 12884904347 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:06.360762Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:06.360982Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:06.360989Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:06.360996Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:06.361040Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:06.361068Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:06.361109Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:06.361118Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:06.361188Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:06.361264Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:06.361494Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:06.361498Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:06.361602Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:06.361607Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:06.361612Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:06.361719Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:06.361724Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:06.361727Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:06.361738Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:06.361745Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:06.361752Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:06.361869Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:06.362053Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:06.362118Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:06.362123Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:06.363072Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:699:2583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:06.363090Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:708:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:06.363125Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:06.363651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:06.364298Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:06.550617Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:06.550940Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:713:2591], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:06.592624Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9k4te6xtzdeqgybgnmxy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2FkZjEzMzctMjkxYjllZmYtNDYwZGYyZjgtNzVlZjMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:06.592788Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:814:2652], serverId# [3:815:2653], sessionId# [0:0:0] 2024-11-21T17:46:06.592858Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:06.603577Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:06.603635Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:06.604639Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:822:2659], serverId# [3:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:06.604950Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:06.615391Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:06.615420Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:06.615485Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:06.615492Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2024-11-21T17:46:06.615552Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:06.615561Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:06.615569Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:06.615583Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:06.615630Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:822:2659], serverId# [3:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:06.615853Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:06.615946Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:06.615980Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:06.615985Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:06.615993Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2024-11-21T17:46:06.616037Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:06.616044Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:06.616166Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2024-11-21T17:46:06.616265Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 42, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:46:06.616289Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2024-11-21T17:46:06.616295Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2024-11-21T17:46:06.616326Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:06.616331Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2024-11-21T17:46:06.616391Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:06.616396Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:06.616402Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2024-11-21T17:46:06.616426Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:06.616434Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:06.616440Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots2 |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::QueryExecTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 4062, MsgBus: 26830 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001672/r3tmp/tmppKco3o/pdisk_1.dat 2024-11-21T17:45:54.441632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:54.449761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:54.449790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:54.453878Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:54.454243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4062, node 1 2024-11-21T17:45:54.476447Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:54.476466Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:54.476467Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:54.476510Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26830 TClient is connected to server localhost:26830 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:45:54.560545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:54.568733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.630251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:54.647160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:54.658583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:54.735162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790259518095501:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.735193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.760160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.766287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.773379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.779945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.835473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.848531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:54.859655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790259518096016:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.859693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790259518096021:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.859710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:54.860204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:54.862906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790259518096023:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:55.104789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:55.381792Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790263813064048:2487] TxId: 281474976715672. Ctx: { TraceId: 01jd7x98bf4yp7dcqeftaks7jb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjExM2U3YWUtMWE3OWZmZGQtZTNjZTA4NjctNjgzYjJjNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at ExecuteState, current limit is 1024 bytes. } 2024-11-21T17:45:55.383199Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790263813064056:2498], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NjExM2U3YWUtMWE3OWZmZGQtZTNjZTA4NjctNjgzYjJjNWQ=. TraceId : 01jd7x98bf4yp7dcqeftaks7jb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439790263813064048:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T17:45:55.383413Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790263813064059:2501], TxId: 281474976715672, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NjExM2U3YWUtMWE3OWZmZGQtZTNjZTA4NjctNjgzYjJjNWQ=. TraceId : 01jd7x98bf4yp7dcqeftaks7jb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439790263813064048:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T17:45:55.383889Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790263813064055:2497], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7x98bf4yp7dcqeftaks7jb. SessionId : ydb://session/3?node_id=1&id=NjExM2U3YWUtMWE3OWZmZGQtZTNjZTA4NjctNjgzYjJjNWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439790263813064048:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T17:45:55.384208Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790263813064058:2500], TxId: 281474976715672, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NjExM2U3YWUtMWE3OWZmZGQtZTNjZTA4NjctNjgzYjJjNWQ=. TraceId : 01jd7x98bf4yp7dcqeftaks7jb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439790263813064048:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T17:45:55.384685Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjExM2U3YWUtMWE3OWZmZGQtZTNjZTA4NjctNjgzYjJjNWQ=, ActorId: [1:7439790263813064020:2487], ActorState: ExecuteState, TraceId: 01jd7x98bf4yp7dcqeftaks7jb, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at ExecuteState, current limit is 1024 bytes. Trying to start YDB, gRPC: 28913, MsgBus: 29181 2024-11-21T17:45:55.695982Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790261389126448:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:55.696352Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001672/r3tmp/tmp9VGlxZ/pdisk_1.dat 2024-11-21T17:45:55.706816Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28913, node 2 2024-11-21T17:45:55.714470Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:55.714485Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:55.714486Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:55.714515Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29181 TClient is connected to server localhost:29181 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 Su ... FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:56.104802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790265684095785:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:56.104826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:56.104878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790265684095791:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:56.105612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:56.110316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790265684095793:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:56.312174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:00.696391Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790261389126448:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:00.696454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:02.037174Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGU4NmUxNTMtZDQxYjVlMDYtNDI0MWU2MDUtNzYxN2NlZmY=, ActorId: [2:7439790287158933678:2591], ActorState: ExecuteState, TraceId: 01jd7x9etfbmn4mz8k7a0ttsc7, Create QueryResponse for error on request, msg:
: Error: Task execution timeout 94ms exceeded, terminating after 94ms 2024-11-21T17:46:02.155784Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439790291453901065:2591] TxId: 281474976715674. Ctx: { TraceId: 01jd7x9ey67fs03n091hzbt9fe, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGU4NmUxNTMtZDQxYjVlMDYtNDI0MWU2MDUtNzYxN2NlZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 101ms during execution } ] 2024-11-21T17:46:02.155884Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790291453901079:2622], TxId: 281474976715674, task: 9. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NGU4NmUxNTMtZDQxYjVlMDYtNDI0MWU2MDUtNzYxN2NlZmY=. TraceId : 01jd7x9ey67fs03n091hzbt9fe. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439790291453901065:2591], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T17:46:02.194469Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790291453901077:2620], TxId: 281474976715674, task: 7. Ctx: { TraceId : 01jd7x9ey67fs03n091hzbt9fe. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NGU4NmUxNTMtZDQxYjVlMDYtNDI0MWU2MDUtNzYxN2NlZmY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439790291453901065:2591], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T17:46:02.194809Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGU4NmUxNTMtZDQxYjVlMDYtNDI0MWU2MDUtNzYxN2NlZmY=, ActorId: [2:7439790287158933678:2591], ActorState: ExecuteState, TraceId: 01jd7x9ey67fs03n091hzbt9fe, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 101ms during execution Trying to start YDB, gRPC: 1834, MsgBus: 26269 2024-11-21T17:46:02.511465Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790292409307633:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001672/r3tmp/tmphvc0Mg/pdisk_1.dat 2024-11-21T17:46:02.526669Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 1834, node 3 2024-11-21T17:46:02.534764Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:02.534906Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:02.534913Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:02.534914Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:02.534963Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26269 TClient is connected to server localhost:26269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:02.610911Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:02.610949Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:02.611998Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:02.613405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:02.615608Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:02.628466Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:46:02.649058Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.686478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:02.698912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:02.820596Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790292409309017:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.820634Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.824899Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.834970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.844747Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.852014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.858576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.872287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:02.880620Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790292409309520:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.880643Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.880673Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790292409309525:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:02.881198Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:02.885371Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790292409309527:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=3&id=YTdlODU5OWQtYzlkZTFiMmMtYzUxYzdiZTItYjA3ZmE5NWU= ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: 2024-11-21T17:46:03.031366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:03.031986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:03.032018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0023e6/r3tmp/tmpYvpKqb/pdisk_1.dat 2024-11-21T17:46:03.142175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.159504Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:03.201570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:03.201603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:03.212149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:03.316022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.331955Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:654:2553] 2024-11-21T17:46:03.332022Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:03.339434Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:03.339496Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:03.339630Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:03.339642Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:03.339649Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:03.339689Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:03.343280Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:03.343339Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:03.343360Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:685:2571] 2024-11-21T17:46:03.343365Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:03.343369Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:03.343374Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:03.343708Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:03.343733Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:03.343822Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:656:2555] 2024-11-21T17:46:03.343854Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:03.344963Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:642:2547], serverId# [1:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:03.345018Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:03.345025Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:03.345033Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:03.345041Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:03.345119Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:03.345191Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:03.345211Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:03.345549Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:03.345601Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:03.345714Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:46:03.345724Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:46:03.345730Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:46:03.345763Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:03.345771Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:46:03.345784Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:03.345797Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:703:2581] 2024-11-21T17:46:03.345801Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:46:03.345805Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:46:03.345809Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:46:03.345881Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:662:2557] 2024-11-21T17:46:03.345916Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:03.347010Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:46:03.347029Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:46:03.347139Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:03.347146Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:03.347152Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:46:03.347158Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:03.347267Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:03.347287Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:03.347385Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2024-11-21T17:46:03.347394Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2024-11-21T17:46:03.347399Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2024-11-21T17:46:03.347426Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:03.347432Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2024-11-21T17:46:03.347443Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:03.347455Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:712:2584] 2024-11-21T17:46:03.347459Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2024-11-21T17:46:03.347462Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2024-11-21T17:46:03.347466Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T17:46:03.347538Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2024-11-21T17:46:03.347548Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2024-11-21T17:46:03.347563Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T17:46:03.347567Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:03.347571Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2024-11-21T17:46:03.347576Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:46:03.347653Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:03.357928Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:03.357968Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:03.399063Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:644:2549], serverId# [1:719:2589], sessionId# [0:0:0] 2024-11-21T17:46:03.399128Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:46:03.399206Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:46:03.399230Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:46:03.399358Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:46:03.399368Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:645:2550], serverId# [1:720:2590], sessionId# [0:0:0] 2024-11-21T17:46:03.399383Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2024-11-21T17:46:03.399394Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2024-11-21T17:46:03.399402Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2024-11-21T17:46:03.399435Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T17:46:03.409741Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:46:03.409779Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:03.409886Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2024-11-21T17:46:03.409892Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:03.553377Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:736:2606], serverId# [1:740:2610], sessionId# [0:0:0] 2024-11-21T17:46:03.553476Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:738:2608], serverId# [1:741:2611], sessionId# [0:0:0] 2024-11-21T17:46:03.554235Z node 1 :TX_DAT ... cesMode: (empty maybe) } From: (Uint32 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 4 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T17:46:06.791422Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1051:2803] Handle TEvDataShard::TEvEraseRowsRequest 2024-11-21T17:46:06.791434Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1051:2803] Propose tx: txId# 281474976715663, shard# 72075186224037890, keys# 3, dependents# 0, dependencies# 1 2024-11-21T17:46:06.791445Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1051:2803] Propose tx: txId# 281474976715663, shard# 72075186224037888, keys# 3, dependents# 0, dependencies# 1 2024-11-21T17:46:06.791449Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1051:2803] Propose tx: txId# 281474976715663, shard# 72075186224037889, keys# 3, dependents# 2, dependencies# 0 2024-11-21T17:46:06.791483Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2024-11-21T17:46:06.791508Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715663 at tablet 72075186224037890 2024-11-21T17:46:06.791569Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:06.791580Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715663 at tablet 72075186224037888 2024-11-21T17:46:06.791616Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:46:06.791624Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715663 at tablet 72075186224037889 2024-11-21T17:46:06.801955Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2024-11-21T17:46:06.801979Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:06.802008Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2024-11-21T17:46:06.802017Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2024-11-21T17:46:06.802030Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:46:06.802044Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1051:2803] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037888, status# 1 2024-11-21T17:46:06.802056Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1051:2803] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037889, status# 1 2024-11-21T17:46:06.802066Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2024-11-21T17:46:06.802071Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2024-11-21T17:46:06.802084Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2024-11-21T17:46:06.802088Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2024-11-21T17:46:06.802092Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1051:2803] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037890, status# 1 2024-11-21T17:46:06.802096Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1051:2803] Register plan: txId# 281474976715663, minStep# 1516, maxStep# 31516 2024-11-21T17:46:06.812751Z node 3 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2024-11-21T17:46:06.812776Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2024-11-21T17:46:06.813299Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset 2024-11-21T17:46:06.813314Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037890 is reset 2024-11-21T17:46:06.813390Z node 3 :TX_DATASHARD ERROR: [DistEraser] [3:1051:2803] Reply: txId# 281474976715663, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976715663, shard# 72075186224037888 2024-11-21T17:46:06.813545Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2024-11-21T17:46:06.813552Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2024-11-21T17:46:06.813619Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1046:2799], serverId# [3:1047:2800], sessionId# [0:0:0] 2024-11-21T17:46:06.813631Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:06.813638Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:06.813645Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2024-11-21T17:46:06.813652Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:06.825144Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:1063:2814] 2024-11-21T17:46:06.825190Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:06.825463Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:06.825644Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:06.825820Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:06.825827Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:06.825832Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:06.825863Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:06.825872Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2024-11-21T17:46:06.825884Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:06.825907Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:1077:2821] 2024-11-21T17:46:06.825913Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:06.825917Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:06.825920Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:06.825972Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2024-11-21T17:46:06.825981Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2024-11-21T17:46:06.826154Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:06.826162Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:06.826173Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1501 2024-11-21T17:46:06.826176Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:06.826196Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:06.826199Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:06.826204Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 1 2024-11-21T17:46:06.826207Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:06.826215Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:06.826254Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2024-11-21T17:46:06.826258Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715662 2024-11-21T17:46:06.826261Z node 3 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715662 2024-11-21T17:46:06.826288Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715662 2024-11-21T17:46:06.826296Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1501 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T17:46:06.826300Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1501:281474976715662 at 72075186224037889 2024-11-21T17:46:06.826307Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2024-11-21T17:46:06.826310Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 1501 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T17:46:06.826318Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2024-11-21T17:46:06.826320Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715662 2024-11-21T17:46:06.826322Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715662 2024-11-21T17:46:06.826334Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715662 2024-11-21T17:46:06.826340Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 1501 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2024-11-21T17:46:06.826343Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1501:281474976715662 at 72075186224037890 2024-11-21T17:46:06.826351Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2024-11-21T17:46:06.826355Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037890 {TEvReadSet step# 1501 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2024-11-21T17:46:06.826365Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 1500 next step 1501 2024-11-21T17:46:06.826371Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037888: waitStep# 1501 readStep# 1501 observedStep# 1501 2024-11-21T17:46:06.826383Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715662 2024-11-21T17:46:06.826391Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream |75.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: 2024-11-21T17:46:04.247105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:04.247653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:04.247685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0023af/r3tmp/tmpx4xudE/pdisk_1.dat 2024-11-21T17:46:04.346113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.362905Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:04.404948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:04.404977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:04.415427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:04.518555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.531889Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:46:04.531948Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:04.537180Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:04.537212Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:04.537333Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:04.537339Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:04.537343Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:04.537373Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:04.539647Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:04.539697Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:04.539714Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:46:04.539717Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:04.539720Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:04.539723Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.539925Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:04.539939Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:04.539961Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:46:04.539974Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:04.539979Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:04.539986Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:04.539990Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:04.540020Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:04.540066Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:04.540081Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:04.540315Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:04.550593Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:04.550635Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:04.724668Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:46:04.725532Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:04.725552Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.725657Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:04.725665Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:04.725674Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:04.725733Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:04.725761Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:04.725896Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:04.725910Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:04.726233Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:04.726362Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:04.726678Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:04.726688Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.726805Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:04.726811Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:04.726818Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:04.727018Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:04.727025Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:04.727030Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:04.727044Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:04.727052Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:04.727060Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.727662Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:04.728019Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:04.728045Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:04.728051Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:04.729728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.729750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.729758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:04.730516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:04.731350Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:04.917260Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:04.917642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:04.986450Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9hhsdsqznm68r3414pp4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRjODAyNjYtMzZhMjNjMDMtMmVhNDEwNWUtYTRhZjUwNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:04.987277Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T17:46:04.987359Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:04.997997Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:04.998045Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.998872Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T17:46:04.999078Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:05.009461Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:0 ... _DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:06.757973Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:627:2533], serverId# [3:638:2540], sessionId# [0:0:0] 2024-11-21T17:46:06.757990Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:06.757994Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:06.758000Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:06.758005Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:06.758028Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:06.758076Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:06.758091Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:06.758326Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:06.768600Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:06.768643Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:06.942333Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:665:2557], serverId# [3:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:06.942438Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 526 RawX2: 12884904347 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:46:06.942445Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:06.942618Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:06.942625Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:06.942633Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:46:06.942677Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:46:06.942701Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:06.942737Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:06.942746Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:46:06.942816Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:06.942882Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:06.943088Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:46:06.943092Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:06.943187Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:46:06.943192Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:46:06.943196Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:06.943287Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:06.943292Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:06.943295Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:46:06.943304Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:06.943310Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:46:06.943317Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:06.943441Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:06.943627Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:46:06.943691Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:46:06.943695Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:46:06.944854Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:699:2583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:06.944877Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:708:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:06.944990Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:06.945638Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:06.946231Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:07.131781Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:07.132084Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:713:2591], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:07.173562Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7x9kq06qkmh969pyda98gz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2U2ZmJlOGItNzJkOTdhNDYtODdiMmU4OTYtMjdhZWRiODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:07.173690Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:814:2652], serverId# [3:815:2653], sessionId# [0:0:0] 2024-11-21T17:46:07.173760Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:07.184480Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:07.184540Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:07.185611Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:822:2659], serverId# [3:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:07.185926Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T17:46:07.196413Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T17:46:07.196444Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:07.196523Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:07.196530Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2024-11-21T17:46:07.196587Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:07.196597Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:07.196609Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:07.196626Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:07.196667Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:822:2659], serverId# [3:823:2660], sessionId# [0:0:0] 2024-11-21T17:46:07.196910Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:07.197012Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:07.197050Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:07.197055Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:07.197062Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2024-11-21T17:46:07.197110Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:07.197117Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:07.197238Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2024-11-21T17:46:07.197332Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:46:07.197362Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2024-11-21T17:46:07.197367Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2024-11-21T17:46:07.197394Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:07.197399Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2024-11-21T17:46:07.197474Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:07.197481Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:07.197487Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2024-11-21T17:46:07.197520Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:07.197529Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:07.197536Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion >> KqpStats::SysViewCancelled [GOOD] >> KqpTypes::DyNumberCompare |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> test.py::test[window-full/noncompact_with_nulls--Debug] [GOOD] >> test.py::test[aggregate-ensure_count-default.txt-Debug] [GOOD] >> test.py::test[window-full/noncompact_with_nulls--Plan] >> test.py::test[aggregate-ensure_count-default.txt-Plan] [GOOD] >> test.py::test[aggregate-ensure_count-default.txt-Results] >> test.py::test[window-full/noncompact_with_nulls--Plan] [GOOD] >> test.py::test[window-full/noncompact_with_nulls--Results] >> TExternalTableTestReboots::ParallelCreateDrop |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> test.py::test[pg-select_win_max-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_max-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_max-default.txt-Results] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked >> test.py::test[simple_columns-simple_columns_union_all_qualified_star-default.txt-Results] [GOOD] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Debug] [SKIPPED] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Plan] [SKIPPED] >> test.py::test[stream_lookup_join-lookup_join-default.txt-Results] [SKIPPED] |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAttributesExtSubdomain >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> KqpTypes::DyNumberCompare [GOOD] >> KqpOlapBlobsSharing::MultipleSchemaVersions >> KqpOlapAggregations::JsonDoc_GetValue_ToInt >> TKeyValueTracingTest::WriteHuge [FAIL] ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] Test command err: 127.0.0.1 - - [21/Nov/2024 17:43:41] "GET /mylib.sql HTTP/1.1" 200 - |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> test.py::test[aggregate-ensure_count-default.txt-Results] [GOOD] >> KqpOlap::OlapUpsertImmediate [GOOD] >> BasicUsage::BrokenCredentialsProvider [GOOD] >> KqpOlap::OlapRead_UsesScanOnJoinWithDataShardTable >> KqpOlap::SelectLimit1ManyShards >> KqpOlapAggregations::CountAllPushdown+UseLlvm |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> KqpOlapSparsed::AccessorActualization >> test.py::test[aggr_factory-variance-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpTypes::DyNumberCompare [GOOD] Test command err: Trying to start YDB, gRPC: 27348, MsgBus: 31967 2024-11-21T17:45:54.727364Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790259618390133:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:54.727381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001670/r3tmp/tmpgsNfXh/pdisk_1.dat 2024-11-21T17:45:54.794443Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27348, node 1 2024-11-21T17:45:54.805035Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:54.805048Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:54.805050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:54.805088Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31967 2024-11-21T17:45:54.829892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:54.829916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:54.830763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:54.863492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:54.892140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:54.918064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:54.941574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:54.951113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:55.077256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790263913358966:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.077352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.083841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.097758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.109993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.115712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.123120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.130494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:55.139772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790263913359469:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.139795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.139927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790263913359474:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:55.140672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:55.143219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790263913359476:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:45:55.377433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:59.728419Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790259618390133:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:59.728453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:00.386208Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211160383, txId: 281474976710672] shutting down 2024-11-21T17:46:00.482888Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790285388197450:2603] TxId: 281474976710675. Ctx: { TraceId: 01jd7x9da36m3v1hkzzjjvxffz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhYzQ4MTktYjhiZTdjZGUtMzhhMTY0NmItODdmYjRmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T17:46:00.484379Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2RhYzQ4MTktYjhiZTdjZGUtMzhhMTY0NmItODdmYjRmYmY=, ActorId: [1:7439790285388197402:2603], ActorState: ExecuteState, TraceId: 01jd7x9da36m3v1hkzzjjvxffz, Create QueryResponse for error on request, msg: 2024-11-21T17:46:00.484498Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211160470, txId: 281474976710674] shutting down 2024-11-21T17:46:00.484561Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T17:46:00.485058Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790285388197454:2607], TxId: 281474976710675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=N2RhYzQ4MTktYjhiZTdjZGUtMzhhMTY0NmItODdmYjRmYmY=. CustomerSuppliedId : . TraceId : 01jd7x9da36m3v1hkzzjjvxffz. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439790285388197450:2603], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T17:46:00.485584Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790285388197456:2608], TxId: 281474976710675, task: 2. Ctx: { TraceId : 01jd7x9da36m3v1hkzzjjvxffz. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=N2RhYzQ4MTktYjhiZTdjZGUtMzhhMTY0NmItODdmYjRmYmY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439790285388197450:2603], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T17:46:00.485959Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790285388197457:2609], TxId: 281474976710675, task: 3. Ctx: { TraceId : 01jd7x9da36m3v1hkzzjjvxffz. SessionId : ydb://session/3?node_id=1&id=N2RhYzQ4MTktYjhiZTdjZGUtMzhhMTY0NmItODdmYjRmYmY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439790285388197450:2603], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T17:46:00.486233Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790285388197458:2610], TxId: 281474976710675, task: 4. Ctx: { TraceId : 01jd7x9da36m3v1hkzzjjvxffz. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=N2RhYzQ4MTktYjhiZTdjZGUtMzhhMTY0NmItODdmYjRmYmY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439790285388197450:2603], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T17:46:00.487719Z node 1 :TX_DATASHARD ERROR: TxId: 281474976710675. Snapshot is not valid, tabletId: 72075186224037919, step: 1732211160470 2024-11-21T17:46:00.489375Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439790285388197462:2614]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/LargeTable' scheme changed., code: 2028 , tablet id: 72075186224037919, actor_id: [1:7439790263913359831:2457] 2024-11-21T17:46:00.489426Z node 1 :TX_DATASHARD ERROR: TxId: 281474976710675. Snapshot is not valid, tabletId: 72075186224037924, step: 1732211160470 2024-11-21T17:46:00.489747Z node 1 :TX_DATASHARD ERROR: TxId: 281474976710675. Snapshot is not valid, tabletId: 72075186224037925, step: 1732211160470 2024-11-21T17:46:00.489770Z node 1 :TX_DATASHARD ERROR: TxId: 281474976710675. Snapshot is not valid, tabletId: 72075186224037926, step: 1732211160470 2024-11-21T17:46:00.489795Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439790285388197460:2612]. Got EvScanError scan state: , status: ABORT ... , suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:07.852707Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:07.868041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:07.875758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:07.888643Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:07.898615Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:07.988402Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790316145511916:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:07.988430Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:07.994286Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:08.000661Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:08.010102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:08.017273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:08.072166Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:08.080415Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:08.088050Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790320440479727:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:08.088073Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:08.088075Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790320440479732:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:08.088639Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:08.093290Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790320440479734:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:08.283447Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439790320440480032:2462], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Double
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Double
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Double
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Double 2024-11-21T17:46:08.283564Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODRkYWQyNy04NjNkZjc1OC0yYmJkZjk3OC03NTAwMzQzNA==, ActorId: [3:7439790320440480019:2454], ActorState: ExecuteState, TraceId: 01jd7x9n0s9p4mqna0geej1qdy, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Double
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Double
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Double
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Double 2024-11-21T17:46:08.286678Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439790320440480036:2464], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2024-11-21T17:46:08.286778Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODRkYWQyNy04NjNkZjc1OC0yYmJkZjk3OC03NTAwMzQzNA==, ActorId: [3:7439790320440480019:2454], ActorState: ExecuteState, TraceId: 01jd7x9n0w80q413nr02d2fa2j, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2024-11-21T17:46:08.290112Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439790320440480040:2466], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional 2024-11-21T17:46:08.290224Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODRkYWQyNy04NjNkZjc1OC0yYmJkZjk3OC03NTAwMzQzNA==, ActorId: [3:7439790320440480019:2454], ActorState: ExecuteState, TraceId: 01jd7x9n0z1wh4g4p7tn4g9sy2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional >> KqpOlapSysView::StatsSysViewEnumStringBytes >> KqpDecimalColumnShard::TestGroupByDecimal |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapUpsertImmediate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2024-11-21T17:45:43.220621Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1732211143220615 2024-11-21T17:45:43.380654Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790210579602772:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:43.381342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:43.396382Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790212508215528:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001eb8/r3tmp/tmpw2B0Ox/pdisk_1.dat 2024-11-21T17:45:43.426803Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:45:43.439502Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:45:43.440998Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:43.472702Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:43.483941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:43.483971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:43.488821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64713, node 1 2024-11-21T17:45:43.497684Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001eb8/r3tmp/yandex3sUlkX.tmp 2024-11-21T17:45:43.497701Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001eb8/r3tmp/yandex3sUlkX.tmp 2024-11-21T17:45:43.501335Z INFO: TTestServer started on Port 26500 GrpcPort 64713 TClient is connected to server localhost:26500 PQClient connected to localhost:64713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:45:43.516313Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001eb8/r3tmp/yandex3sUlkX.tmp 2024-11-21T17:45:43.516427Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:43.525526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:43.528058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:43.528079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:43.529259Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:45:43.529580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:45:43.552867Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:43.567638Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:43.697528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790212508215658:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:43.697559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:43.697712Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790212508215670:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:43.699338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T17:45:43.713088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790212508215672:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T17:45:43.757517Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790210579603589:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:43.757703Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGVmOGU5NTMtMzdlZWVlOTgtYjY5YTE1YjEtZmExNDMzZjQ=, ActorId: [1:7439790210579603548:2299], ActorState: ExecuteState, TraceId: 01jd7x8x1h3f41jxwsvfj48z2d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:43.758396Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:43.759299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:43.782477Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790212508215732:2287], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:43.782888Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTA2OWNiOWYtZjU1NDE2NmEtMzg4MTU5N2ItY2U3MWIzMDc=, ActorId: [2:7439790212508215656:2277], ActorState: ExecuteState, TraceId: 01jd7x8x0g5bbzm8jt85ewf74p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:43.783027Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:43.853718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:43.878592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:64713", true, true, 1000); 2024-11-21T17:45:43.923958Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd7x8x72bkyv6t7r4tkvf5jz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRlYThiZGItNzMyNGViLTc3ZmM4MjZjLTYzNzQ5OWIz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790210579603963:2916] 2024-11-21T17:45:48.380391Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790210579602772:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:48.380417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:45:48.394666Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790212508215528:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:48.394698Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:45:49.060769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:64713 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:45:49.090136Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:64713 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 ... 21T17:46:08.019258Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:28878 2024-11-21T17:46:08.019528Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T17:46:08.019675Z node 5 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:46:08.019688Z node 5 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T17:46:08.019786Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T17:46:08.019808Z node 5 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:47550 2024-11-21T17:46:08.019817Z node 5 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:47550 proto=v1 topic=test-topic durationSec=0 2024-11-21T17:46:08.019819Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:46:08.020145Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T17:46:08.020179Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:46:08.020188Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:46:08.020189Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:46:08.020194Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790317375612683:2479] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:46:08.020660Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790317375612683:2479] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:46:08.036911Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790317375612683:2479] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T17:46:08.036990Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439790317375612715:2479] connected; active server actors: 1 2024-11-21T17:46:08.037003Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790317375612683:2479] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T17:46:08.037007Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790317375612683:2479] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T17:46:08.037058Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439790317375612715:2479] disconnected; active server actors: 1 2024-11-21T17:46:08.037067Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439790317375612715:2479] disconnected no session 2024-11-21T17:46:08.051918Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790317375612683:2479] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:46:08.051936Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790317375612683:2479] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T17:46:08.051941Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790317375612683:2479] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T17:46:08.051950Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:46:08.052213Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:08.052252Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7439790317375612733:2479], now have 1 active actors on pipe 2024-11-21T17:46:08.052275Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2024-11-21T17:46:08.052363Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:08.052380Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:08.052421Z node 6 :PERSQUEUE INFO: new Cookie src|5425eba1-d6ea9a49-d79027db-a56ec432_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T17:46:08.052455Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:46:08.052481Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:46:08.052713Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:08.052722Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:08.052737Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:46:08.052837Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|5425eba1-d6ea9a49-d79027db-a56ec432_0 2024-11-21T17:46:08.053178Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211168053 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:08.053229Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|5425eba1-d6ea9a49-d79027db-a56ec432_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T17:46:08.053345Z :INFO: [] MessageGroupId [src] SessionId [src|5425eba1-d6ea9a49-d79027db-a56ec432_0] Write session: close. Timeout = 0 ms 2024-11-21T17:46:08.053356Z :INFO: [] MessageGroupId [src] SessionId [src|5425eba1-d6ea9a49-d79027db-a56ec432_0] Write session will now close 2024-11-21T17:46:08.053362Z :DEBUG: [] MessageGroupId [src] SessionId [src|5425eba1-d6ea9a49-d79027db-a56ec432_0] Write session: aborting 2024-11-21T17:46:08.053477Z :INFO: [] MessageGroupId [src] SessionId [src|5425eba1-d6ea9a49-d79027db-a56ec432_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:46:08.053482Z :DEBUG: [] MessageGroupId [src] SessionId [src|5425eba1-d6ea9a49-d79027db-a56ec432_0] Write session: destroy 2024-11-21T17:46:08.053600Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|5425eba1-d6ea9a49-d79027db-a56ec432_0 grpc read done: success: 0 data: 2024-11-21T17:46:08.053609Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|5425eba1-d6ea9a49-d79027db-a56ec432_0 grpc read failed 2024-11-21T17:46:08.053614Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|5425eba1-d6ea9a49-d79027db-a56ec432_0 grpc closed 2024-11-21T17:46:08.053618Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|5425eba1-d6ea9a49-d79027db-a56ec432_0 is DEAD 2024-11-21T17:46:08.053826Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:46:08.053946Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:08.053960Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439790317375612733:2479] destroyed 2024-11-21T17:46:08.053971Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:46:08.070929Z :INFO: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] Starting read session 2024-11-21T17:46:08.070949Z :DEBUG: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] Starting session to cluster null (localhost:28878) 2024-11-21T17:46:08.071256Z :DEBUG: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:08.071261Z :DEBUG: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:08.071264Z :DEBUG: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] [null] Reconnecting session to cluster null in 0.000000s 2024-11-21T17:46:08.071316Z :ERROR: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2024-11-21T17:46:08.071322Z :DEBUG: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:08.071324Z :DEBUG: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:08.071337Z :INFO: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2024-11-21T17:46:08.071379Z :NOTICE: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:46:08.071385Z :DEBUG: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2024-11-21T17:46:08.071393Z :INFO: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] Closing read session. Close timeout: 0.000000s 2024-11-21T17:46:08.071398Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:46:08.071403Z :INFO: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:08.071409Z :NOTICE: [/Root] [/Root] [713512dd-417ed16a-585563e0-6d470b92] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> KqpOlapAggregations::JsonDoc_GetValue_ToInt [GOOD] >> KqpOlapAggregations::Aggregation_Avg_GroupByNullMix >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots [GOOD] >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots [GOOD] >> KqpOlap::OlapRead_UsesScanOnJoinWithDataShardTable [GOOD] >> KqpOlap::OlapLayout >> test.py::test[pg-select_win_max-default.txt-Results] [GOOD] >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling-AllowSpilling >> KqpOlapIndexes::IndexesActualization ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_GetValue_ToInt [GOOD] Test command err: Trying to start YDB, gRPC: 25672, MsgBus: 6828 2024-11-21T17:46:08.498962Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790318657897810:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:08.498978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ea1/r3tmp/tmpuL27v6/pdisk_1.dat 2024-11-21T17:46:08.542092Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25672, node 1 2024-11-21T17:46:08.552782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:08.552797Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:08.552799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:08.552839Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6828 TClient is connected to server localhost:6828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:08.596194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:08.599225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:08.599252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:08.600385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:08.605837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:08.614896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790318657898440:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:08.614973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790318657898440:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:08.615025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790318657898440:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:08.615052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790318657898440:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:08.615081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790318657898440:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:08.615102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790318657898440:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:08.615125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790318657898440:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:08.615147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790318657898440:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:08.615171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790318657898440:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:08.615196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790318657898440:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:08.615219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790318657898440:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:08.615246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790318657898440:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:08.618850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790318657898437:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:08.618890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790318657898437:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:08.618928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790318657898437:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:08.618954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790318657898437:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:08.618977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790318657898437:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:08.618998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790318657898437:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:08.619021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790318657898437:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:08.619049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790318657898437:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:08.619079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790318657898437:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:08.619103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790318657898437:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:08.619125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790318657898437:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:08.619149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790318657898437:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:08.619619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:08.619636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:08.619649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:08.619658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:08.619674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:08.619682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:08.619691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:08.619700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:08.619710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:08.619719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:08.619725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888; ... ne=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:08.628439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:08.628471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:08.628481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:08.628495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:08.628503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:08.628513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:08.628522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:08.628536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:08.628544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:08.628553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:08.628561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int"), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 6; 2024-11-21T17:46:08.753078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790318657898718:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:08.753111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:08.753186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790318657898745:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:08.753928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:08.755501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790318657898747:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:08.899997Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211168807, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.obj.obj_col2_int"), JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.obj.obj_col2_int" RETURNING Int) = 16 AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == 16","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Int32":16},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == 16","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1290) '('"_id" '"e1853814-2a05ac11-e84d1d68-889f3d38") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $20 (Int32 '"6")) (let $21 (Just $20)) (let $22 (Int32 '1)) (let $23 '($21 $22)) (let $24 (If (== $20 (Int32 '2147483647)) $23 '((+ $21 $22) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($23 $24)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (OptionalType $6)) (let $8 (TupleType $7 $6)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"id" $6)) (let $11 (DataType 'Utf8)) (let $12 (OptionalType $11)) (let $13 (DqPhyStage '() (lambda '() (block '( (let $25 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $26 '('"id" '"jsondoc" '"jsonval")) (let $27 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $28 (OptionalType (DataType 'JsonDocument))) (let $29 (DataType 'Json)) (let $30 (TupleType (DataType 'Uint8) (DataType 'String))) (let $31 '((ResourceType '"JsonPath"))) (let $32 (ResourceType '"JsonNode")) (let $33 (DictType $11 $32)) (let $34 '($33)) (let $35 (CallableType '() '((VariantType (TupleType $30 (OptionalType (DataType 'Double))))) '($28) $31 $34)) (let $36 '('('"strict"))) (let $37 (Udf '"Json2.JsonDocumentSqlValueNumber" (Void) (VoidType) '"" $35 (VoidType) '"" $36)) (let $38 (CallableType '() $31 '($11))) (let $39 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $38 (VoidType) '"" '())) (let $40 (Apply $39 (Utf8 '"$.obj.obj_col2_int"))) (let $41 (Dict $33)) (let $42 (lambda '($53) (block '( (let $54 (Nothing $7)) (return $54) )))) (let $43 (lambda '($55) (If (Exists $55) (SafeCast $55 $7) (Nothing $7)))) (let $44 (KqpWideReadOlapTableRanges $25 %kqp%tx_result_binding_0_0 $26 '() $27 (lambda '($45) (block '( (let $46 (StructType $10 '('"jsondoc" $28) '('"jsonval" (OptionalType $29)))) (let $47 (KqpOlapApply $46 '('"jsondoc") (lambda '($50) (block '( (let $51 (Apply $37 $50 $40 $41)) (let $52 (Nothing $7)) (return (Visit $51 '0 $42 '1 $43)) ))))) (let $48 '('eq $47 (Int32 '"16"))) (let $49 '('?? $48 (Bool 'false))) (return (KqpOlapFilter $45 $49)) ))))) (return (FromFlow (NarrowMap $44 (lambda '($56 $57 $58) (block '( (let $59 (OptionalType $32)) (let $60 (CallableType '() '((VariantType (TupleType $30 $12))) '($59) $31 $34)) (let $61 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $60 (VoidType) '"" $36)) (let $62 (IfPresent $58 (lambda '($67) (block '( (let $68 '($29 '"" '1)) (let $69 (CallableType '() '($32) $68)) (let $70 (Udf '"Json2.Parse" (Void) (VoidType) '"" $69 (VoidType) '"" '())) (return (Just (Apply $70 $67))) ))) (Nothing $59))) (let $63 (Apply $61 $62 $40 $41)) (let $64 (Visit $63 '0 (lambda '($71) (Nothing $12)) '1 (lambda '($72) $72))) (let $65 (Apply $37 $57 $40 $41)) (let $66 (Visit $65 '0 $42 '1 $43)) (return (AsStruct '('"column1" $64) '('"column2" $66) '('"id" $56))) )))))) ))) '('('"_logical_id" '1361) '('"_id" '"ded595bb-67dbdc3d-aed2ff54-4581cec1")))) (let $14 (DqCnUnionAll (TDqOutput $13 '0))) (let $15 (DqPhyStage '($14) (lambda '($73) $73) '('('"_logical_id" '2334) '('"_id" '"fbae8dde-a381c474-d4309091-d6638f1f")))) (let $16 '('"id" '"column1" '"column2")) (let $17 (DqCnResult (TDqOutput $15 '0) $16)) (let $18 (KqpTxResultBinding $9 '0 '0)) (let $19 (KqpPhysicalTx '($13 $15) '($17) '('($5 $18)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $19) '((KqpTxResultBinding (ListType (StructType '('"column1" $12) '('"column2" $7) $10)) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_UsesScanOnJoinWithDataShardTable [GOOD] Test command err: Trying to start YDB, gRPC: 2408, MsgBus: 4313 2024-11-21T17:46:08.636462Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790316886685588:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:08.636728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e94/r3tmp/tmpdfccbu/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2408, node 1 2024-11-21T17:46:08.686571Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:08.690911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:08.690921Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:08.690922Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:08.690956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4313 TClient is connected to server localhost:4313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:08.735897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:08.737775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:08.737800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:08.738905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:08.747204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:08.755658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316886686231:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:08.755714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316886686231:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:08.755750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316886686231:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:08.755773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316886686231:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:08.755793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316886686231:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:08.755812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316886686231:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:08.755838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316886686231:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:08.755861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316886686231:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:08.755882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316886686231:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:08.755906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316886686231:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:08.755927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316886686231:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:08.755953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316886686231:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:08.757973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316886686232:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:08.757992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316886686232:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:08.758017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316886686232:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:08.758031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316886686232:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:08.758044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316886686232:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:08.758059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316886686232:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:08.758077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316886686232:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:08.758093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316886686232:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:08.758112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316886686232:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:08.758131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316886686232:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:08.758152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316886686232:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:08.758170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316886686232:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:08.759978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316886686236:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:08.759997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316886686236:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:08.760024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316886686236:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:08.760037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316886686236:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:08.760050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316886686236:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:08.760071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316886686236:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:08.760087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316886686236:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:08.760105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316886686236:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:08.760130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316886686236:22 ... ne=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:08.826849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:08.826867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:08.826876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:08.826887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:08.826896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:08.826906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:08.826915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:08.826921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:08.826930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:08.826976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:08.826986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:08.827005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:08.827009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:08.827027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:08.827036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:08.827054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:08.827062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:08.827072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:08.827080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:08.827151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:08.827160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:08.827167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:08.827171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:08.827183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:08.827191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:08.827199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:08.827207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:08.827215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:08.827223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:08.827230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:08.827239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:08.827263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:08.827272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:08.827286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:08.827294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:08.827306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:08.827315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:08.827330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:08.827338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:08.827348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:08.827355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:08.864946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T17:46:08.943643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790316886686741:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:08.943705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:08.957118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790316886686778:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:08.957169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:08.957172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790316886686783:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:08.957942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:08.959584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790316886686785:2421], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T17:46:09.117444Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211169010, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::CountAllPushdown+UseLlvm [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] >> KqpOlapAggregations::Aggregation_Sum_NullMix ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:45:56.632150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:56.632179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:56.632185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:56.632190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:56.632197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:56.632202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:56.632211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:56.632315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:56.641836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:56.641860Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:45:56.643996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:56.644089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:56.644125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:56.649800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:56.649911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:56.650057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:56.650332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:56.651260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:56.651565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:56.651577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:56.651591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:56.651599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:56.651605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:56.651658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:45:56.653773Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:45:56.672624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:56.672738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.672820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:56.672874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:56.672884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.673925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:56.673960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:56.674039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.674053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:56.674058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:56.674064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:56.674643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.674658Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:56.674662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:56.675045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.675052Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.675056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:56.675063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:56.675567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:56.676050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:56.676108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:56.676360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:56.676386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:56.676394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:56.676475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:56.676482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:56.676515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:56.676529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:56.676992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:56.677006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:56.677055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:56.677060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:56.677153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.677161Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:56.677173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:56.677178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:56.677185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:56.677191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:56.677196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:56.677200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:56.677227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:56.677233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:56.677237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... .069298Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2024-11-21T17:46:09.069483Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:46:09.069505Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 214748366952 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:46:09.069513Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000005 2024-11-21T17:46:09.069555Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:46:09.069568Z node 50 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2024-11-21T17:46:09.069596Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:46:09.069604Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:46:09.069609Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:46:09.070263Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:46:09.070343Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T17:46:09.070736Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:46:09.070745Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:46:09.070774Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:46:09.070792Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:46:09.070810Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:46:09.070818Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T17:46:09.070822Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T17:46:09.070826Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T17:46:09.070886Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:46:09.070894Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T17:46:09.070906Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:46:09.070910Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:46:09.070915Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T17:46:09.070920Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:46:09.070924Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:46:09.070927Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:46:09.070937Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:46:09.070941Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:46:09.070945Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2024-11-21T17:46:09.070949Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:46:09.070952Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T17:46:09.070955Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:46:09.071021Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:09.071032Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:09.071037Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:46:09.071041Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:46:09.071045Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:46:09.071126Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:46:09.071132Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:46:09.071141Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:46:09.071206Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:09.071215Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:09.071219Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:46:09.071223Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:46:09.071226Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:46:09.071351Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:09.071364Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:09.071368Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:46:09.071372Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:46:09.071376Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:46:09.071386Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T17:46:09.072251Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:46:09.072280Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:46:09.072462Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:46:09.072547Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:46:09.072595Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:46:09.072602Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:46:09.072655Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:46:09.072667Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:46:09.072670Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [50:383:2375] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:46:09.072713Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:46:09.072733Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 26us result status StatusPathDoesNotExist 2024-11-21T17:46:09.072754Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0xBE58B59) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+1945 (0xBBE03A9) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+138 (0xBBE546A) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xBBEC6E7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0xBE5AB0E) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+426 (0xBBEBE8A) NUnitTest::TTestFactory::Execute()+803 (0xBE5B283) NUnitTest::RunMain(int, char**)+3005 (0xBE6AB9D) ??+0 (0x7F45C75B5D90) __libc_start_main+128 (0x7F45C75B5E40) _start+41 (0xB04F029) >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:45:56.644633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:56.644653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:56.644658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:56.644662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:56.644668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:56.644672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:56.644681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:56.644757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:56.653707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:56.653731Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:45:56.656221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:56.656345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:56.656377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:56.658822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:56.658873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:56.658952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:56.659141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:56.660086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:56.660454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:56.660471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:56.660485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:56.660494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:56.660500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:56.660551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:45:56.662121Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:45:56.676144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:56.676269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.676336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:56.676382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:56.676390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.677219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:56.677248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:56.677307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.677319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:56.677323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:56.677329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:56.677779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.677790Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:56.677795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:56.678133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.678142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.678148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:56.678154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:56.678738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:56.679213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:56.679260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:56.679456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:56.679479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:56.679486Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:56.679545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:56.679556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:56.679583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:56.679595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:56.679978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:56.679988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:56.680027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:56.680032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:56.680109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:56.680116Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:56.680127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:56.680131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:56.680136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:56.680141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:56.680146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:56.680150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:56.680161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:56.680167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:56.680171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... .177668Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000007 2024-11-21T17:46:09.177836Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:46:09.177852Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 214748366952 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:46:09.177858Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1006:0 HandleReply TEvOperationPlan: step# 5000007 2024-11-21T17:46:09.177887Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:46:09.177899Z node 50 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 128 -> 240 2024-11-21T17:46:09.177918Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:46:09.177925Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:46:09.177930Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:46:09.178094Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:46:09.178296Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 FAKE_COORDINATOR: Erasing txId 1006 2024-11-21T17:46:09.178528Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:46:09.178534Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:46:09.178553Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:46:09.178563Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:46:09.178577Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:46:09.178580Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2024-11-21T17:46:09.178584Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 5 2024-11-21T17:46:09.178587Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 3 2024-11-21T17:46:09.178626Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:46:09.178631Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1006:0 ProgressState 2024-11-21T17:46:09.178641Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T17:46:09.178644Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:46:09.178649Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T17:46:09.178653Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:46:09.178656Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T17:46:09.178659Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T17:46:09.178668Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:46:09.178672Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:46:09.178675Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 3, subscribers: 0 2024-11-21T17:46:09.178678Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2024-11-21T17:46:09.178680Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T17:46:09.178683Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T17:46:09.178731Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:46:09.178739Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:46:09.178742Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:46:09.178746Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T17:46:09.178749Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:46:09.178806Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:46:09.178811Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:46:09.178818Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:46:09.178859Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:46:09.178866Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:46:09.178871Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:46:09.178874Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2024-11-21T17:46:09.178877Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:46:09.178969Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:46:09.178979Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:46:09.178983Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:46:09.178986Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:46:09.178989Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:46:09.178996Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T17:46:09.179763Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:46:09.179806Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:46:09.179847Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:46:09.180079Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T17:46:09.180126Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T17:46:09.180132Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T17:46:09.180186Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T17:46:09.180200Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T17:46:09.180204Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [50:442:2434] TestWaitNotification: OK eventTxId 1006 2024-11-21T17:46:09.180281Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:46:09.180301Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 28us result status StatusPathDoesNotExist 2024-11-21T17:46:09.180328Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery >> TSchemeShardSplitBySizeTest::Merge111Shards [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> test.py::test[blocks-date_less_or_equal--Debug] [GOOD] >> test.py::test[blocks-date_less_or_equal--Plan] [GOOD] >> test.py::test[blocks-date_less_or_equal--Results] >> test.py::test[window-full/noncompact_with_nulls--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> test.py::test[window-generic/aggregations_after_current--Debug] Test command err: 2024-11-21T17:46:04.144068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:04.144628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:04.144651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0023c5/r3tmp/tmpFjWY6l/pdisk_1.dat 2024-11-21T17:46:04.255755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.273940Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:04.316216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:04.316268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:04.326751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:04.430462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.445049Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:654:2553] 2024-11-21T17:46:04.445112Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:04.451840Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:04.451906Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:04.452053Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:04.452061Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:04.452068Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:04.452111Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:04.455679Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:04.455758Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:04.455782Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:685:2571] 2024-11-21T17:46:04.455788Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:04.455792Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:04.455797Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:04.456194Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:04.456222Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:04.456334Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:656:2555] 2024-11-21T17:46:04.456382Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:04.457257Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:642:2547], serverId# [1:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:04.457298Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:04.457303Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:04.457310Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:04.457316Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:04.457374Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:04.457418Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:04.457433Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:04.457657Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:04.457689Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:04.457766Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:46:04.457771Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:46:04.457775Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:46:04.457798Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:04.457803Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:46:04.457812Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:04.457821Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:703:2581] 2024-11-21T17:46:04.457823Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:46:04.457825Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:46:04.457828Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:46:04.457869Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:662:2557] 2024-11-21T17:46:04.457896Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:04.458595Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:46:04.458607Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:46:04.458666Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:04.458670Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:04.458675Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:46:04.458678Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:04.458748Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:04.458759Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:04.458816Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2024-11-21T17:46:04.458821Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2024-11-21T17:46:04.458824Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2024-11-21T17:46:04.458841Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:04.458845Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2024-11-21T17:46:04.458852Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:04.458858Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:712:2584] 2024-11-21T17:46:04.458860Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2024-11-21T17:46:04.458863Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2024-11-21T17:46:04.458865Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T17:46:04.458898Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2024-11-21T17:46:04.458902Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2024-11-21T17:46:04.458912Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T17:46:04.458914Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:04.458916Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2024-11-21T17:46:04.458919Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:46:04.458974Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:04.469274Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:04.469321Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:04.510458Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:644:2549], serverId# [1:719:2589], sessionId# [0:0:0] 2024-11-21T17:46:04.510538Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:46:04.510643Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:46:04.510676Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:46:04.510862Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:46:04.510876Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:645:2550], serverId# [1:720:2590], sessionId# [0:0:0] 2024-11-21T17:46:04.510895Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2024-11-21T17:46:04.510912Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2024-11-21T17:46:04.510923Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2024-11-21T17:46:04.510967Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T17:46:04.521268Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:46:04.521312Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:04.521408Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2024-11-21T17:46:04.521413Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:04.665720Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:736:2606], serverId# [1:740:2610], sessionId# [0:0:0] 2024-11-21T17:46:04.665795Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:738:2608], serverId# [1:741:2611], sessionId# [0:0:0] 2024-11-21T17:46:04.666424Z node 1 :TX_DAT ... [2000:281474976715663] at 72075186224037888 for LoadAndWaitInRS 2024-11-21T17:46:09.587887Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:09.587921Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715663 2024-11-21T17:46:09.587931Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2000 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 19 Seqno# 6 Flags# 0} 2024-11-21T17:46:09.587942Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2024-11-21T17:46:09.587971Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T17:46:09.587976Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:09.587984Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2000:281474976715663] at 72075186224037890 for LoadAndWaitInRS 2024-11-21T17:46:09.588045Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:09.598523Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:09.598578Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [3:1051:2803], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:46:09.598598Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2000 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 5} 2024-11-21T17:46:09.598607Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:09.598652Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2024-11-21T17:46:09.598679Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:46:09.598691Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715663] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1051:2803], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:46:09.598697Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037890 {TEvReadSet step# 2000 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 6} 2024-11-21T17:46:09.598701Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T17:46:09.598720Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1051:2803] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037888, status# 2 2024-11-21T17:46:09.598731Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1051:2803] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037890, status# 2 2024-11-21T17:46:09.598739Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1051:2803] Reply: txId# 281474976715663, status# OK, error# 2024-11-21T17:46:09.598761Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715663 2024-11-21T17:46:09.598806Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2024-11-21T17:46:09.598814Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2024-11-21T17:46:09.598902Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:09.598912Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:09.598921Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:46:09.598937Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:09.598956Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1046:2799], serverId# [3:1047:2800], sessionId# [0:0:0] 2024-11-21T17:46:09.599253Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:46:09.599359Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:46:09.599397Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:09.599402Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:09.599412Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037889 for WaitForStreamClearance 2024-11-21T17:46:09.599459Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:09.599467Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:09.599600Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976715666, MessageQuota: 1 2024-11-21T17:46:09.599665Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037889, TxId: 281474976715666, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:46:09.599686Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037889, TxId: 281474976715666, PendingAcks: 0 2024-11-21T17:46:09.599691Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715666, MessageQuota: 0 2024-11-21T17:46:09.599718Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2024-11-21T17:46:09.599722Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037889 2024-11-21T17:46:09.599788Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:09.599793Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:09.599798Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037889 for ReadTableScan 2024-11-21T17:46:09.599829Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:09.599836Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:09.599844Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:46:09.600106Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:09.600158Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:09.600184Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:09.600188Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:09.600192Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037888 for WaitForStreamClearance 2024-11-21T17:46:09.600214Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:09.600220Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:09.600340Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715667, MessageQuota: 1 2024-11-21T17:46:09.600384Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715667, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:46:09.600401Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715667, PendingAcks: 0 2024-11-21T17:46:09.600405Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715667, MessageQuota: 0 2024-11-21T17:46:09.600419Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:46:09.600422Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715667, at: 72075186224037888 2024-11-21T17:46:09.600458Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:09.600462Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:09.600467Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037888 for ReadTableScan 2024-11-21T17:46:09.600485Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:09.600491Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:09.600497Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:09.600704Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2024-11-21T17:46:09.600753Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2024-11-21T17:46:09.600781Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T17:46:09.600786Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:09.600791Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037890 for WaitForStreamClearance 2024-11-21T17:46:09.600815Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:09.600822Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:46:09.600917Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2024-11-21T17:46:09.600946Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:46:09.600962Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2024-11-21T17:46:09.600966Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2024-11-21T17:46:09.600980Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2024-11-21T17:46:09.600984Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2024-11-21T17:46:09.601022Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T17:46:09.601027Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:09.601032Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037890 for ReadTableScan 2024-11-21T17:46:09.601048Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:09.601055Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:46:09.601060Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllPushdown+UseLlvm [GOOD] Test command err: Trying to start YDB, gRPC: 24676, MsgBus: 12472 2024-11-21T17:46:08.633291Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790316352406512:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:08.633362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e90/r3tmp/tmpsEpQdl/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24676, node 1 2024-11-21T17:46:08.679848Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:08.682813Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:08.682823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:08.682824Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:08.682858Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12472 TClient is connected to server localhost:12472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:08.726876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:08.733663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:08.733693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:08.734707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:08.739798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:08.751519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316352406955:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:08.751574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316352406955:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:08.751608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316352406955:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:08.751628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316352406955:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:08.751644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316352406955:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:08.751662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316352406955:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:08.751678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316352406955:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:08.751691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316352406955:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:08.751710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316352406955:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:08.751734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316352406955:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:08.751761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316352406955:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:08.751778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790316352406955:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:08.754069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316352406958:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:08.754092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316352406958:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:08.754123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316352406958:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:08.754141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316352406958:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:08.754162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316352406958:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:08.754184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316352406958:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:08.754209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316352406958:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:08.754233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316352406958:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:08.754255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316352406958:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:08.754278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316352406958:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:08.754299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316352406958:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:08.754318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790316352406958:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:08.756538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316352406954:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:08.756556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316352406954:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:08.756585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316352406954:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:08.756608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316352406954:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:08.756629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316352406954:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:08.756652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316352406954:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:08.756673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316352406954:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:08.756694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316352406954:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:08.756714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74397903163524069 ... Chunks; 2024-11-21T17:46:08.760264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:08.760267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:08.760283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:08.760290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:08.760298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:08.760306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:08.760311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:08.760315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:08.760320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:08.760323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:08.760344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:08.760353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:08.760366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:08.760373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:08.760384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:08.760392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:08.760407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:08.760414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:08.760423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:08.760430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:08.798518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T17:46:09.015910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790320647374690:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.015910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790320647374698:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.015929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.016524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:09.017757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790320647374704:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T17:46:09.405901Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211169073, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '476) '('"_id" '"3edb59ad-6364597f-4558215-15be5531") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '() '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 '"count" '"*")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '995) '('"_id" '"9359ee1d-198b4877-6b6246d3-cd33f427")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1098) '('"_id" '"4654c520-5c15e393-eeffd0f4-844cdbad") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex |75.5%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling-AllowSpilling [GOOD] >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> KqpOlap::OlapLayout [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> KqpOlapAggregations::Aggregation_Sum_NullMix [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::Merge111Shards [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:45:46.678488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:46.678513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:46.678517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:46.678520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:46.678531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:46.678534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:46.678541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:46.678619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:46.687189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:46.687215Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:46.696674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:46.697635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:46.697679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:46.702520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:46.702697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:46.702807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.702880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:46.703724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.704008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:46.704018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.704059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:46.704067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:46.704073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:46.704088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.712782Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:45:46.730965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:46.731063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.731136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:46.731203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:46.731211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.736514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.736562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:46.736648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.736663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:46.736668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:46.736673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:46.739347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.739393Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:46.739402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:46.740045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.740058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.740065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:46.740073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:46.740845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:46.741354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:46.741417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:46.741621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.741648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:46.741657Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:46.741722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:46.741731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:46.741769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:46.741783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:46.742171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:46.742181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:46.742247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.742252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:45:46.742344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.742352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:46.742364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:46.742369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:46.742375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:46.742381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:46.742386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:46.742390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:46.742402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:46.742408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:46.742412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:45:46.742763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:46.742778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:46.742783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:45:46.742788Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:45:46.742793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:46.742805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 09710 2024-11-21T17:46:09.830096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710737:0 131 -> 132 2024-11-21T17:46:09.830126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 33 2024-11-21T17:46:09.830644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710737:0, at schemeshard: 72057594046678944 2024-11-21T17:46:09.831053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710737:0, at schemeshard: 72057594046678944 2024-11-21T17:46:09.831084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710737:0, at schemeshard: 72057594046678944 2024-11-21T17:46:09.831117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:46:09.831123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710737, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:46:09.831199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:46:09.831205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 281474976710737, path id: 2 2024-11-21T17:46:09.831294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710737:0, at schemeshard: 72057594046678944 2024-11-21T17:46:09.831301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710737:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:46:09.831308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186233409709 on partitioning changed splitOp# 281474976710737 at tablet 72057594046678944 2024-11-21T17:46:09.831314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186233409710 on partitioning changed splitOp# 281474976710737 at tablet 72057594046678944 2024-11-21T17:46:09.831317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186233409656 on partitioning changed splitOp# 281474976710737 at tablet 72057594046678944 2024-11-21T17:46:09.831438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 85 PathOwnerId: 72057594046678944, cookie: 281474976710737 2024-11-21T17:46:09.831452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 85 PathOwnerId: 72057594046678944, cookie: 281474976710737 2024-11-21T17:46:09.831456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710737 2024-11-21T17:46:09.831461Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710737, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 85 2024-11-21T17:46:09.831466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 34 2024-11-21T17:46:09.831485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710737, ready parts: 0/1, is published: true 2024-11-21T17:46:09.832127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710737:0 from tablet: 72057594046678944 to tablet: 72075186233409709 cookie: 72057594046678944:164 msg type: 269553158 2024-11-21T17:46:09.832144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710737:0 from tablet: 72057594046678944 to tablet: 72075186233409710 cookie: 72057594046678944:165 msg type: 269553158 2024-11-21T17:46:09.832148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710737:0 from tablet: 72057594046678944 to tablet: 72075186233409656 cookie: 72057594046678944:111 msg type: 269553158 2024-11-21T17:46:09.833025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710737 2024-11-21T17:46:09.833687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710737:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710737 TabletId: 72075186233409709 2024-11-21T17:46:09.833701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710737:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409709, at schemeshard: 72057594046678944 2024-11-21T17:46:09.833787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710737:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710737 TabletId: 72075186233409710 2024-11-21T17:46:09.833794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710737:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409710, at schemeshard: 72057594046678944 2024-11-21T17:46:09.834344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710737:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710737 TabletId: 72075186233409656 2024-11-21T17:46:09.834356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710737:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409656, at schemeshard: 72057594046678944 2024-11-21T17:46:09.834375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710737:0 progress is 1/1 2024-11-21T17:46:09.834380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710737 ready parts: 1/1 2024-11-21T17:46:09.834386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710737, ready parts: 1/1, is published: true 2024-11-21T17:46:09.834392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710737 ready parts: 1/1 2024-11-21T17:46:09.834397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710737:0 2024-11-21T17:46:09.834405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710737:0 2024-11-21T17:46:09.834451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 33 2024-11-21T17:46:09.835289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710737:0, at schemeshard: 72057594046678944 2024-11-21T17:46:09.835317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710737:0, at schemeshard: 72057594046678944 2024-11-21T17:46:09.835377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710737:0, at schemeshard: 72057594046678944 2024-11-21T17:46:09.835383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976710737:0 2024-11-21T17:46:09.835534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 13712 RawX2: 4294980690 } TabletId: 72075186233409709 State: 4 2024-11-21T17:46:09.835547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409709, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:46:09.835607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 13718 RawX2: 4294980695 } TabletId: 72075186233409710 State: 4 2024-11-21T17:46:09.835612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409710, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:46:09.835653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 1936 RawX2: 4294970646 } TabletId: 72075186233409656 State: 4 2024-11-21T17:46:09.835658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409656, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:46:09.836374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:164 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:46:09.836425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:165 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:46:09.836475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:111 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:46:09.836579Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 164 TxId_Deprecated: 164 TabletID: 72075186233409709 2024-11-21T17:46:09.837190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 164 ShardOwnerId: 72057594046678944 ShardLocalIdx: 164, at schemeshard: 72057594046678944 2024-11-21T17:46:09.837291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 32 Forgetting tablet 72075186233409709 2024-11-21T17:46:09.837553Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 165 TxId_Deprecated: 165 TabletID: 72075186233409710 2024-11-21T17:46:09.839409Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 111 TxId_Deprecated: 111 TabletID: 72075186233409656 2024-11-21T17:46:09.839584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 165 ShardOwnerId: 72057594046678944 ShardLocalIdx: 165, at schemeshard: 72057594046678944 2024-11-21T17:46:09.839656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 31 Forgetting tablet 72075186233409710 Forgetting tablet 72075186233409656 2024-11-21T17:46:09.840275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 111 ShardOwnerId: 72057594046678944 ShardLocalIdx: 111, at schemeshard: 72057594046678944 2024-11-21T17:46:09.840340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 30 2024-11-21T17:46:09.840992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:164 2024-11-21T17:46:09.841005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:164 tabletId 72075186233409709 2024-11-21T17:46:09.841490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:165 2024-11-21T17:46:09.841504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:165 tabletId 72075186233409710 2024-11-21T17:46:09.841525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:111 2024-11-21T17:46:09.841531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:111 tabletId 72075186233409656 Deleted tabletId 72075186233409656 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling-AllowSpilling [GOOD] Test command err: Trying to start YDB, gRPC: 25979, MsgBus: 19954 2024-11-21T17:46:09.603066Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790321269706364:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:09.603121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e7f/r3tmp/tmpk4qV7Y/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25979, node 1 2024-11-21T17:46:09.658545Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:09.658894Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:09.658901Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:09.658902Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:09.658928Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19954 TClient is connected to server localhost:19954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:09.704258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:09.704280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:09.705372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:09.729204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:09.736478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:09.748193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321269707019:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.748278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321269707019:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.748337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321269707019:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.748364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321269707019:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.748390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321269707019:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.748415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321269707019:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.748436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321269707019:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.748460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321269707019:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.748483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321269707019:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.748505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321269707019:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.748528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321269707019:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.748553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321269707019:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.749082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:09.749097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:09.749125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:09.749138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:09.749159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:09.749169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:09.749176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:09.749182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:09.749195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:09.749203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:09.749209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:09.749217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:09.749290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:09.749300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:09.749315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:09.749324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.749338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:09.749347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:09.749362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:09.749375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:09.749387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:09.749390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:09.752575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790321269707020:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.752605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790321269707020:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.752641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790321269707020:2289];tablet_id=7207518622 ... ss=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:09.760703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:09.760711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:09.760715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:09.760723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:09.760726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:09.760734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:09.760738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:09.760776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:09.760781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:09.760797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:09.760800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.760811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:09.760816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:09.760831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:09.760835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:09.760846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:09.760849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:09.760918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:09.760923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:09.760930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:09.760934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:09.760945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:09.760948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:09.760954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:09.760957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:09.760963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:09.760973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:09.760978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:09.760981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:09.761003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:09.761006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:09.761018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:09.761021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.761030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:09.761033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:09.761046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:09.761050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:09.761058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:09.761060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:09.794706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T17:46:10.004447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790325564674765:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.004459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790325564674755:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.004479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.005226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:10.006737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790325564674769:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } >> KqpOlapAggregations::Aggregation_Count_NullMixGroupBy >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapLayout [GOOD] Test command err: Trying to start YDB, gRPC: 2473, MsgBus: 1370 2024-11-21T17:46:09.412378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790322364497153:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:09.412553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e85/r3tmp/tmpHXpbXB/pdisk_1.dat 2024-11-21T17:46:09.469228Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2473, node 1 2024-11-21T17:46:09.478459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:09.478472Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:09.478474Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:09.478510Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1370 TClient is connected to server localhost:1370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:09.513578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:09.513603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:09.514731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:09.524446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:09.535195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:09.557301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322364497933:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.557380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322364497933:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.557431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322364497933:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.557459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322364497933:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.557482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322364497933:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.557510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322364497933:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.557538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322364497933:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.557566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322364497933:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.557599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322364497933:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.557623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322364497933:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.557669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322364497933:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.557697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322364497933:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.561535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7439790322364497954:2296];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.561565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7439790322364497954:2296];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.561638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7439790322364497954:2296];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.561663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7439790322364497954:2296];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.561687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7439790322364497954:2296];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.561711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7439790322364497954:2296];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.561733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7439790322364497954:2296];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.561755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7439790322364497954:2296];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.561779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7439790322364497954:2296];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.561803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7439790322364497954:2296];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.561825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7439790322364497954:2296];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.561846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7439790322364497954:2296];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.562363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:09.562377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:09.562387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:09.562395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:09.562409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:09.562417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:09.562424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:09.562433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:09.562445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:09.562453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:09.562465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;pr ... shards, code: 2003 } } } 2024-11-21T17:46:09.888627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322364499921:2594], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.888651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } {
: Error: Execution, code: 1060 subissue: {
:15:35: Error: Executing CREATE TABLE subissue: {
: Error: Cannot create table with 17 column shards, only 16 are available, code: 2003 } } } 2024-11-21T17:46:09.893101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322364499946:2601], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.893123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.895028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T17:46:09.900704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322364499994:2609], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.900738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.902913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T17:46:09.908709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322364500037:2617], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.908732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.910446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T17:46:09.923303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322364500091:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.923326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.924673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T17:46:09.937613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322364500145:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.937638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.939433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T17:46:09.951237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322364500189:2644], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.951263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.953099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T17:46:09.960420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322364500228:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.960448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.961936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T17:46:09.971885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322364500267:2662], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.971909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.974842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T17:46:09.986769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322364500325:2670], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.986795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.989548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T17:46:09.997856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateTabletsObjectReply for unknown txId 281474976710683 at schemeshard 72057594046644480 2024-11-21T17:46:10.001083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322364500367:2678], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.001113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } {
: Error: Execution, code: 1060 subissue: {
:15:35: Error: Executing CREATE TABLE subissue: {
: Error: cannot find appropriate group for 9 shards, code: 2003 } } } 2024-11-21T17:46:10.005212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790326659467681:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.005235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.006757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T17:46:10.015675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790326659467748:2694], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.015701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.017563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T17:46:10.028292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790326659467787:2703], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.028318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.030129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T17:46:10.042487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790326659467831:2712], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.042510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.046039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 |75.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Sum_NullMix [GOOD] Test command err: Trying to start YDB, gRPC: 64321, MsgBus: 17081 2024-11-21T17:46:09.785160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790321954938905:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:09.785175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e81/r3tmp/tmpZqpYP9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64321, node 1 2024-11-21T17:46:09.839419Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:09.845054Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:09.845073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:09.845074Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:09.845110Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17081 TClient is connected to server localhost:17081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:09.886078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:09.886109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:09.887286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:09.890512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:09.902252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:09.909669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321954939537:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.909721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321954939537:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.909762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321954939537:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.909782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321954939537:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.909793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321954939537:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.909805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321954939537:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.909816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321954939537:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.909830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321954939537:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.909847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321954939537:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.909861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321954939537:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.909875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321954939537:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.909886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321954939537:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.910217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:09.910227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:09.910234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:09.910237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:09.910245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:09.910252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:09.910257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:09.910261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:09.910270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:09.910276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:09.910279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:09.910285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:09.910323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:09.910331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:09.910340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:09.910345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.910352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:09.910358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:09.910366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:09.910372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:09.910378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:09.910384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:09.912643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790321954939538:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.912665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790321954939538:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.912702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790321954939538:2289];tablet_id=7207518622 ... 37889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:09.919877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:09.919886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:09.919897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:09.919901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:09.920069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:09.920084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:09.920093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:09.920096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:09.920110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:09.920113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:09.920121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:09.920124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:09.920131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:09.920134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:09.920140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:09.920143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:09.920176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:09.920185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:09.920200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:09.920208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.920218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:09.920225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:09.920252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:09.920260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:09.920269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:09.920277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SUM(level) FROM `/Root/tableWithNulls`; 2024-11-21T17:46:10.086757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790326249907129:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.086785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790326249907137:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.086792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.087489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:10.089480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790326249907143:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:10.311382Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211170144, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SUM(level) FROM `/Root/tableWithNulls`; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Int64)))))) (let $1 (OptionalType (DataType 'Int64))) (let $2 '('('"_logical_id" '663) '('"_id" '"438047df-59ea891d-ec09da40-74685c8e") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '('"level") '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 'sum '"level")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Sum0 $28)))) (Nothing (OptionalType (StructType '('Sum0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Member $33 'Sum0)))))))) ))) '('('"_logical_id" '1334) '('"_id" '"fd1fd18c-6486fcf0-5d435bba-cc34011e")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1430) '('"_id" '"4b4afb44-2fb2ffe8-330c1fc5-78ddf05a") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlap::PredicatePushdown_LikePushedDownForStringType >> KqpOlapAggregations::JsonDoc_GetValue >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility-EnableLlvm >> KqpOlapAggregations::Aggregation_Count_NullGroupBy >> KqpDecimalColumnShard::TestFilterNulls >> KqpOlapStats::AddRowsTableStandalone >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain >> KqpOlapAggregations::Filter_NotAllUsedFieldsInResultSet >> KqpOlap::PredicatePushdown_LikePushedDownForStringType [GOOD] >> KqpOlapAggregations::JsonDoc_GetValue [GOOD] >> KqpOlapSysView::StatsSysViewTable >> KqpOlapSysView::StatsSysViewBytesColumnActualization >> KqpDecimalColumnShard::TestGroupByDecimal [GOOD] >> KqpOlapBlobsSharing::HugeSchemeHistory ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdown_LikePushedDownForStringType [GOOD] Test command err: Trying to start YDB, gRPC: 6160, MsgBus: 19876 2024-11-21T17:46:10.846331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790328739792875:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:10.846411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e76/r3tmp/tmp4Ur7LV/pdisk_1.dat 2024-11-21T17:46:10.902930Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6160, node 1 2024-11-21T17:46:10.916102Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:10.916115Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:10.916117Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:10.916154Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19876 2024-11-21T17:46:10.947483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:10.947510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:10.948588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:10.959113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:10.970730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:10.978874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790328739793508:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:10.978937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790328739793508:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:10.978973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790328739793508:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:10.978993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790328739793508:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:10.979015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790328739793508:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:10.979034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790328739793508:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:10.979049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790328739793508:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:10.979069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790328739793508:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:10.979085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790328739793508:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:10.979100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790328739793508:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:10.979119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790328739793508:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:10.979136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790328739793508:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:10.981379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790328739793507:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:10.981401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790328739793507:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:10.981436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790328739793507:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:10.981457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790328739793507:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:10.981474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790328739793507:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:10.981494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790328739793507:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:10.981514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790328739793507:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:10.981538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790328739793507:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:10.981559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790328739793507:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:10.981579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790328739793507:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:10.981603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790328739793507:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:10.981622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790328739793507:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:10.984077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790328739793509:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:10.984111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790328739793509:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:10.984155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790328739793509:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:10.984191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790328739793509:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:10.984217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790328739793509:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:10.984261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790328739793509:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:10.984287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790328739793509:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:10.984313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790328739793509:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:10.984341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790328739793509 ... RN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:10.987620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:10.987627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:10.987630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:10.987678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:10.987689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:10.987695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:10.987698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:10.987707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:10.987711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:10.987717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:10.987724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:10.987736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:10.987746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:10.987749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:10.987753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:10.987770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:10.987778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:10.987793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:10.987802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:10.987809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:10.987813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:10.987829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:10.987843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:10.987851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:10.987859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:10.987924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:10.987938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:10.987944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:10.987947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:10.987957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:10.987964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:10.987969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:10.987972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:10.987976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:10.987987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:10.987995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:10.988003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:10.988020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:10.988031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:10.988042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:10.988051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:10.988059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:10.988063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:10.988072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:10.988084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:10.988092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:10.988101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; 2024-11-21T17:46:11.131763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790333034761098:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.131849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790333034761106:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.131860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.132575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:11.134281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790333034761112:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } >> KqpOlap::TableSinkWithOlapStore >> KqpOlapIndexes::SchemeActualizationOnceOnStart >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility-EnableLlvm [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Debug] >> KqpOlapAggregations::Aggregation_Count_NullMixGroupBy [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_GetValue [GOOD] Test command err: Trying to start YDB, gRPC: 24508, MsgBus: 21406 2024-11-21T17:46:10.990281Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790326272530228:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:10.990469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e72/r3tmp/tmp9HwNi0/pdisk_1.dat 2024-11-21T17:46:11.038432Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24508, node 1 2024-11-21T17:46:11.047863Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:11.047876Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:11.047877Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:11.047910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21406 TClient is connected to server localhost:21406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:11.091811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:11.091838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:11.092914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:11.097144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:11.124648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:11.132904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330567498163:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:11.132981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330567498163:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:11.133033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330567498163:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:11.133057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330567498163:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:11.133076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330567498163:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:11.133093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330567498163:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:11.133109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330567498163:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:11.133126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330567498163:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:11.133143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330567498163:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:11.133159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330567498163:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.133182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330567498163:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:11.133199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330567498163:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:11.136669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330567498164:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:11.136692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330567498164:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:11.136730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330567498164:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:11.136753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330567498164:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:11.136780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330567498164:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:11.136802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330567498164:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:11.136823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330567498164:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:11.136843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330567498164:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:11.136866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330567498164:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:11.136890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330567498164:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.136918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330567498164:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:11.136934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330567498164:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:11.137307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:11.137322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:11.137331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:11.137335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:11.137348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:11.137357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:11.137365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:11.137372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:11.137377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:11.137380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:11.137386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... -21T17:46:11.145855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:11.145859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:11.145861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:11.145879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:11.145883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:11.145893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:11.145900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.145907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:11.145910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:11.145920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:11.145926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:11.145933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:11.145939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1") = "val1" AND id = 6; 2024-11-21T17:46:11.329343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790330567498468:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.329363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790330567498460:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.329379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.329969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:11.331334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790330567498474:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:11.511010Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211171383, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1") = "val1" AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == val1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == val1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1286) '('"_id" '"c5752da2-547afd0-d98115ad-a82e97f9") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $19 (Int32 '"6")) (let $20 (Just $19)) (let $21 (Int32 '1)) (let $22 '($20 $21)) (let $23 (If (== $19 (Int32 '2147483647)) $22 '((+ $20 $21) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($22 $23)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'Utf8)) (let $11 (OptionalType $10)) (let $12 (DqPhyStage '() (lambda '() (block '( (let $24 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $25 '('"id" '"jsondoc" '"jsonval")) (let $26 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $27 (OptionalType (DataType 'JsonDocument))) (let $28 (DataType 'Json)) (let $29 '((VariantType (TupleType (TupleType (DataType 'Uint8) (DataType 'String)) $11)))) (let $30 '((ResourceType '"JsonPath"))) (let $31 (ResourceType '"JsonNode")) (let $32 (DictType $10 $31)) (let $33 '($32)) (let $34 (CallableType '() $29 '($27) $30 $33)) (let $35 '('('"strict"))) (let $36 (Udf '"Json2.JsonDocumentSqlValueConvertToUtf8" (Void) (VoidType) '"" $34 (VoidType) '"" $35)) (let $37 (CallableType '() $30 '($10))) (let $38 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $37 (VoidType) '"" '())) (let $39 (Apply $38 (Utf8 '"$.col1"))) (let $40 (Dict $32)) (let $41 (lambda '($51) (Nothing $11))) (let $42 (lambda '($52) $52)) (let $43 (KqpWideReadOlapTableRanges $24 %kqp%tx_result_binding_0_0 $25 '() $26 (lambda '($44) (block '( (let $45 (StructType $9 '('"jsondoc" $27) '('"jsonval" (OptionalType $28)))) (let $46 (KqpOlapApply $45 '('"jsondoc") (lambda '($49) (block '( (let $50 (Apply $36 $49 $39 $40)) (return (Visit $50 '0 $41 '1 $42)) ))))) (let $47 '('eq $46 (String '"val1"))) (let $48 '('?? $47 (Bool 'false))) (return (KqpOlapFilter $44 $48)) ))))) (return (FromFlow (NarrowMap $43 (lambda '($53 $54 $55) (block '( (let $56 (OptionalType $31)) (let $57 (CallableType '() $29 '($56) $30 $33)) (let $58 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $57 (VoidType) '"" $35)) (let $59 (IfPresent $55 (lambda '($64) (block '( (let $65 '($28 '"" '1)) (let $66 (CallableType '() '($31) $65)) (let $67 (Udf '"Json2.Parse" (Void) (VoidType) '"" $66 (VoidType) '"" '())) (return (Just (Apply $67 $64))) ))) (Nothing $56))) (let $60 (Apply $58 $59 $39 $40)) (let $61 (Visit $60 '0 $41 '1 $42)) (let $62 (Apply $36 $54 $39 $40)) (let $63 (Visit $62 '0 $41 '1 $42)) (return (AsStruct '('"column1" $61) '('"column2" $63) '('"id" $53))) )))))) ))) '('('"_logical_id" '1357) '('"_id" '"ef7af1e6-8519cf12-ca733f46-666d396c")))) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($68) $68) '('('"_logical_id" '2174) '('"_id" '"78e8197d-4f5f504c-1a5654ec-dd35b098")))) (let $15 '('"id" '"column1" '"column2")) (let $16 (DqCnResult (TDqOutput $14 '0) $15)) (let $17 (KqpTxResultBinding $8 '0 '0)) (let $18 (KqpPhysicalTx '($12 $14) '($16) '('($5 $17)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $18) '((KqpTxResultBinding (ListType (StructType '('"column1" $11) '('"column2" $11) $9)) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestGroupByDecimal [GOOD] Test command err: Trying to start YDB, gRPC: 30289, MsgBus: 7132 2024-11-21T17:46:08.953884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790318260729673:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:08.954094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e8a/r3tmp/tmpwu1cS5/pdisk_1.dat 2024-11-21T17:46:09.006845Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30289, node 1 2024-11-21T17:46:09.012984Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:09.013001Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:09.013002Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:09.013037Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7132 TClient is connected to server localhost:7132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:09.054622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:09.054653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:09.055750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:09.084649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:46:09.237066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322555697572:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.237089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.257516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:09.262896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322555697649:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.262931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322555697649:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.262969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322555697649:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.262986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322555697649:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.263002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322555697649:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.263017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322555697649:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.263026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322555697649:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.263044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322555697649:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.263060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322555697649:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.263075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322555697649:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.263091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322555697649:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.263106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322555697649:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.263446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:09.263462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:09.263472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:09.263475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:09.263487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:09.263494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:09.263500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:09.263506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:09.263515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:09.263517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:09.263521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:09.263527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:09.263572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:09.263580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:09.263590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:09.263593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.263599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:09.263604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:09.263614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:09.263619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:09.263624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:09.263630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBA ... .622870Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439790321428050460:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.622887Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439790321428050460:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.623239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:09.623252Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:09.623263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:09.623270Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:09.623284Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:09.623292Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:09.623301Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:09.623313Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:09.623321Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:09.623329Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:09.623334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:09.623342Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:09.623387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:09.623401Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:09.623421Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:09.623429Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.623440Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:09.623449Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:09.623469Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:09.623477Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:09.623491Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:09.623499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; 2024-11-21T17:46:09.715595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322555697813:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.715620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.715652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790322555697818:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.716358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:09.717786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790322555697820:2386], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:10.548607Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211169773, txId: 18446744073709551615] shutting down 2024-11-21T17:46:10.558016Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790325723017880:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.558061Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.558252Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790325723017885:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.559832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:10.565160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790325723017887:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:11.444057Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211170613, txId: 18446744073709551615] shutting down >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] >> KqpOlapAggregations::Filter_NotAllUsedFieldsInResultSet [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_NullMixGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 26473, MsgBus: 22475 2024-11-21T17:46:10.731581Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790325945605436:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:10.731597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e75/r3tmp/tmpcFWiQB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26473, node 1 2024-11-21T17:46:10.799859Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:10.802263Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:10.802275Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:10.802277Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:10.802307Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22475 2024-11-21T17:46:10.833486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:10.833521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:10.834664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:10.873174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:10.876611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:10.885847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790325945606077:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:10.885928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790325945606077:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:10.885984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790325945606077:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:10.886014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790325945606077:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:10.886037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790325945606077:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:10.886061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790325945606077:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:10.886086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790325945606077:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:10.886104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790325945606077:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:10.886132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790325945606077:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:10.886155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790325945606077:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:10.886181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790325945606077:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:10.886202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790325945606077:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:10.886720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:10.886741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:10.886753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:10.886758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:10.886773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:10.886778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:10.886787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:10.886801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:10.886811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:10.886820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:10.886827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:10.886836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:10.886894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:10.886906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:10.886929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:10.886938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:10.886955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:10.886964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:10.886985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:10.886994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:10.887009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:10.887018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:10.890221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790325945606084:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:10.890247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790325945606084:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:10.890287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790325945606084:2290];tablet_id=7207518622 ... 91;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:10.899369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:10.899382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:10.899391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:10.899395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:10.899420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:10.899432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:10.899440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:10.899445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:10.899459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:10.899468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:10.899479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:10.899490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:10.899525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:10.899535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:10.899560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:10.899570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:10.899582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:10.899590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:10.899606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:10.899622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:10.899639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:10.899647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; 2024-11-21T17:46:10.999504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790325945606377:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.999524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790325945606372:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.999540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.000288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:11.002171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790325945606386:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:11.786454Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211171054, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (4, 7)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '836) '('"_id" '"7091e60e-22f8af6c-58940510-8cd1c282") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '0)) (let $25 '((Nothing $2) $24)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"7")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Uint64)) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '895) '('"_id" '"ef232f5b-357e676d-b5b40c38-620d9954") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'count '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1318) '('"_id" '"fc2439a8-4a7fde07-74f754b1-6e1e6523") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1330) '('"_id" '"c7ce1e1b-696cc5e2-bf9170b0-8799a6c3")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility-EnableLlvm [GOOD] Test command err: Trying to start YDB, gRPC: 28678, MsgBus: 16734 2024-11-21T17:46:11.125483Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790329422377244:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:11.125551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e6f/r3tmp/tmp1ZStAQ/pdisk_1.dat 2024-11-21T17:46:11.179667Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28678, node 1 2024-11-21T17:46:11.189987Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:11.190001Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:11.190003Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:11.190036Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16734 2024-11-21T17:46:11.227235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:11.227271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:11.228568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:11.259481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:11.262087Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:11.272250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:11.283378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790329422377751:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:11.283460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790329422377751:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:11.283523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790329422377751:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:11.283545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790329422377751:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:11.283561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790329422377751:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:11.283582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790329422377751:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:11.283604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790329422377751:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:11.283632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790329422377751:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:11.283657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790329422377751:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:11.283682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790329422377751:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.283709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790329422377751:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:11.283732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790329422377751:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:11.284298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:11.284317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:11.284330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:11.284339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:11.284356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:11.284366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:11.284376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:11.284385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:11.284396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:11.284404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:11.284411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:11.284420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:11.284483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:11.284495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:11.284512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:11.284524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.284536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:11.284540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:11.284562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:11.284566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:11.284577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:11.284580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:11.286570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329422377753:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:11.286580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329422377753:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLA ... hunks; 2024-11-21T17:46:11.292193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:11.292195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:11.292202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:11.292204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:11.292209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:11.292213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:11.292217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:11.292220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:11.292223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:11.292225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:11.292301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:11.292307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:11.292323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:11.292327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.292338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:11.292342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:11.292357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:11.292361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:11.292371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:11.292374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:11.330919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T17:46:11.544097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790329422378200:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.544101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790329422378192:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.544115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.544851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:11.546206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790329422378206:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:46:11.728025Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211171600, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '475) '('"_id" '"ef754bae-4168d86b-1006fedc-8944f59b") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '() '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 '"count" '"*")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '994) '('"_id" '"2b2b18a5-e397984d-35049c16-288c8689")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1097) '('"_id" '"ce9e9554-11647225-db3b051a-6cd6a685") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartGroupBtreeIndexIter::NoNodes >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: 2024-11-21T17:46:04.813966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:04.814335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:04.814349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0023a0/r3tmp/tmpQZeTkP/pdisk_1.dat 2024-11-21T17:46:04.904532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:04.920925Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:04.963590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:04.963624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:04.974113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:05.078045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:05.092471Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:654:2553] 2024-11-21T17:46:05.092529Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:05.097764Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:05.097809Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:05.097904Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:05.097908Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:05.097913Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:05.097942Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:05.100009Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:05.100058Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:05.100072Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:685:2571] 2024-11-21T17:46:05.100076Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:05.100079Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:05.100082Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:05.100396Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:05.100420Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:05.100510Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:656:2555] 2024-11-21T17:46:05.100547Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:05.101854Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:642:2547], serverId# [1:667:2559], sessionId# [0:0:0] 2024-11-21T17:46:05.101915Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:05.101922Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:05.101931Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:05.101940Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:05.102015Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:05.102072Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:05.102091Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:05.102451Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:05.102505Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:05.102629Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:46:05.102635Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:46:05.102642Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:46:05.102680Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:05.102687Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:46:05.102703Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:05.102716Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:703:2581] 2024-11-21T17:46:05.102720Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:46:05.102724Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:46:05.102728Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:46:05.102805Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:662:2557] 2024-11-21T17:46:05.102838Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:05.103608Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:46:05.103624Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:46:05.103689Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:05.103693Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:05.103699Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:46:05.103703Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:05.103778Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:05.103788Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:05.103848Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2024-11-21T17:46:05.103852Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2024-11-21T17:46:05.103855Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2024-11-21T17:46:05.103872Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:05.103875Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2024-11-21T17:46:05.103883Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:05.103891Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:712:2584] 2024-11-21T17:46:05.103893Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2024-11-21T17:46:05.103895Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2024-11-21T17:46:05.103897Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T17:46:05.103932Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2024-11-21T17:46:05.103936Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2024-11-21T17:46:05.103945Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T17:46:05.103947Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:05.103950Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2024-11-21T17:46:05.103952Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:46:05.104002Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:05.114223Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:05.114259Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:05.155228Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:644:2549], serverId# [1:719:2589], sessionId# [0:0:0] 2024-11-21T17:46:05.155297Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:46:05.155358Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:46:05.155394Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:46:05.155601Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:46:05.155617Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:645:2550], serverId# [1:720:2590], sessionId# [0:0:0] 2024-11-21T17:46:05.155635Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2024-11-21T17:46:05.155653Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2024-11-21T17:46:05.155662Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2024-11-21T17:46:05.155708Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T17:46:05.165979Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:46:05.166014Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:05.166120Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2024-11-21T17:46:05.166126Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:05.309516Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:736:2606], serverId# [1:740:2610], sessionId# [0:0:0] 2024-11-21T17:46:05.309596Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:738:2608], serverId# [1:741:2611], sessionId# [0:0:0] 2024-11-21T17:46:05.310448Z node 1 :TX_DAT ... rsion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2024-11-21T17:46:11.915804Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 2500 TxId: 281474976715667 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2024-11-21T17:46:11.915852Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:11.915875Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2024-11-21T17:46:11.915880Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:11.915885Z node 3 :TX_DATASHARD DEBUG: Found ready operation [2500:281474976715667] in PlanQueue unit at 72075186224037893 2024-11-21T17:46:11.915907Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037893 loaded tx from db 2500:281474976715667 keys extracted: 0 2024-11-21T17:46:11.915927Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:11.926368Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037893 step# 2500 txid# 281474976715667} 2024-11-21T17:46:11.926402Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037893 step# 2500} 2024-11-21T17:46:11.926414Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2024-11-21T17:46:11.936829Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037891 step# 2500 txid# 281474976715667} 2024-11-21T17:46:11.936855Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 2500} 2024-11-21T17:46:11.936868Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2024-11-21T17:46:11.936877Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037891 from 72075186224037891 to 72075186224037893 txId 281474976715667 2024-11-21T17:46:11.936886Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2024-11-21T17:46:11.936905Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715667] from 72075186224037891 at tablet 72075186224037891 send result to client [3:1377:3027], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:11.936922Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037891, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 } 2024-11-21T17:46:11.936929Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2024-11-21T17:46:11.937034Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1377:3027] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715667, shard# 72075186224037891, status# 2 2024-11-21T17:46:11.937137Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037893 source 72075186224037891 dest 72075186224037893 producer 72075186224037891 txId 281474976715667 2024-11-21T17:46:11.937147Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037893 got read set: {TEvReadSet step# 2500 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletProducer# 72075186224037891 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2024-11-21T17:46:11.937163Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037893 2024-11-21T17:46:11.937199Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037891 2024-11-21T17:46:11.937236Z node 3 :TX_DATASHARD DEBUG: Send 3 change records: to# [3:1186:2912], at tablet# 72075186224037891 2024-11-21T17:46:11.937240Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037891 2024-11-21T17:46:11.937247Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2024-11-21T17:46:11.937253Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:11.937259Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2500:281474976715667] at 72075186224037893 for LoadAndWaitInRS 2024-11-21T17:46:11.937358Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:11.937450Z node 3 :TX_DATASHARD DEBUG: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037891, generation# 1, at tablet# 72075186224037892 2024-11-21T17:46:11.947874Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2024-11-21T17:46:11.947921Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715667] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1377:3027], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:46:11.947941Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037893 {TEvReadSet step# 2500 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletConsumer# 72075186224037893 Flags# 0 Seqno# 2} 2024-11-21T17:46:11.947953Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2024-11-21T17:46:11.947992Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1377:3027] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715667, shard# 72075186224037893, status# 2 2024-11-21T17:46:11.948001Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1377:3027] Reply: txId# 281474976715667, status# OK, error# 2024-11-21T17:46:11.948039Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037891 source 72075186224037891 dest 72075186224037893 consumer 72075186224037893 txId 281474976715667 2024-11-21T17:46:11.948108Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2024-11-21T17:46:11.948115Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037891 2024-11-21T17:46:11.948204Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2024-11-21T17:46:11.948213Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:11.948222Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2024-11-21T17:46:11.948253Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2024-11-21T17:46:11.948279Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1371:3022], serverId# [3:1372:3023], sessionId# [0:0:0] 2024-11-21T17:46:11.948292Z node 3 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037891 2024-11-21T17:46:11.948296Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037891 2024-11-21T17:46:11.948323Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037891 2024-11-21T17:46:11.948328Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037891 2024-11-21T17:46:11.948609Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037893 2024-11-21T17:46:11.948701Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037893 2024-11-21T17:46:11.948739Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2024-11-21T17:46:11.948744Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:11.948751Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037893 for WaitForStreamClearance 2024-11-21T17:46:11.948791Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:11.948803Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2024-11-21T17:46:11.948930Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037893, TxId: 281474976715668, MessageQuota: 1 2024-11-21T17:46:11.948957Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037893, TxId: 281474976715668, MessageQuota: 1 2024-11-21T17:46:11.948977Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037893 2024-11-21T17:46:11.948982Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037893 2024-11-21T17:46:11.949002Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2024-11-21T17:46:11.949006Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:11.949011Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037893 for ReadTableScan 2024-11-21T17:46:11.949036Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:11.949047Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2024-11-21T17:46:11.949053Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2024-11-21T17:46:11.949260Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2024-11-21T17:46:11.949324Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2024-11-21T17:46:11.949355Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2024-11-21T17:46:11.949361Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:11.949365Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715669] at 72075186224037892 for WaitForStreamClearance 2024-11-21T17:46:11.949392Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:11.949398Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2024-11-21T17:46:11.949487Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715669, MessageQuota: 1 2024-11-21T17:46:11.949503Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037892, TxId: 281474976715669, MessageQuota: 1 2024-11-21T17:46:11.949520Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2024-11-21T17:46:11.949524Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715669, at: 72075186224037892 2024-11-21T17:46:11.949541Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2024-11-21T17:46:11.949545Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:46:11.949549Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715669] at 72075186224037892 for ReadTableScan 2024-11-21T17:46:11.949564Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:11.949569Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2024-11-21T17:46:11.949575Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 |75.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Filter_NotAllUsedFieldsInResultSet [GOOD] Test command err: Trying to start YDB, gRPC: 24293, MsgBus: 6960 2024-11-21T17:46:11.552852Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790333436850094:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:11.552871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e69/r3tmp/tmp7hhfhD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24293, node 1 2024-11-21T17:46:11.604450Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:11.608140Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:11.608153Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:11.608155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:11.608186Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6960 TClient is connected to server localhost:6960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:11.652613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:11.653807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:11.653835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:46:11.654853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:11.663500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:11.672051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333436850732:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:11.672105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333436850732:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:11.672145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333436850732:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:11.672175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333436850732:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:11.672199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333436850732:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:11.672224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333436850732:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:11.672263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333436850732:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:11.672290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333436850732:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:11.672310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333436850732:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:11.672339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333436850732:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.672358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333436850732:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:11.672380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333436850732:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:11.672854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:11.672874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:11.672887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:11.672892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:11.672907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:11.672918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:11.672929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:11.672941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:11.672957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:11.672968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:11.672975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:11.672985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:11.673043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:11.673055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:11.673073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:11.673083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.673095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:11.673105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:11.673123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:11.673133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:11.673144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:11.673153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:11.676119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333436850740:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:11.676147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333436850740:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:11.676180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333436850740:2291];tablet_id=7207518622403 ... MNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790333436850730:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:11.683670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790333436850730:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:11.683692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790333436850730:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.683706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790333436850730:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:11.683719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790333436850730:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:11.684280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:11.684296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:11.684306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:11.684309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:11.684323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:11.684327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:11.684335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:11.684339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:11.684347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:11.684351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:11.684356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:11.684359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:11.684398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:11.684407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:11.684422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:11.684431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.684442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:11.684450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:11.684465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:11.684473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:11.684484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:11.684492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; 2024-11-21T17:46:11.823553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790333436851025:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.823554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790333436851036:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.823572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.824131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:11.825643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790333436851039:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:11.958356Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211171880, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 '('"id" '"resource_id")) (let $2 (DqPhyStage '() (lambda '() (block '( (let $6 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $7 '('"id" '"level" '"resource_id")) (let $8 (KqpWideReadOlapTableRanges $6 (Void) $7 '() '() (lambda '($9) (block '( (let $10 '('eq '"level" (Int32 '"5"))) (let $11 '('?? $10 (Bool 'false))) (return (TKqpOlapExtractMembers (KqpOlapFilter $9 $11) $1)) ))))) (return (FromFlow (NarrowMap $8 (lambda '($12 $13) (AsStruct '('"id" $12) '('"resource_id" $13)))))) ))) '('('"_logical_id" '551) '('"_id" '"31cd0290-3df86b5-30316507-8a653fbd")))) (let $3 (DqCnUnionAll (TDqOutput $2 '"0"))) (let $4 (DqPhyStage '($3) (lambda '($14) $14) '('('"_logical_id" '730) '('"_id" '"e0d4811e-10084f0e-34a9a411-13dc306a")))) (let $5 (DqCnResult (TDqOutput $4 '"0") $1)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($2 $4) '($5) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"id" (DataType 'Int32)) '('"resource_id" (OptionalType (DataType 'Utf8))))) '"0" '"0")) '('('"type" '"scan_query")))) ) >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort >> KqpOlap::TableSinkWithOlapStore [GOOD] >> KqpOlapAggregations::Aggregation_Count_NullGroupBy [GOOD] |75.6%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |75.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut >> KqpDecimalColumnShard::TestFilterNulls [GOOD] >> KqpOlapAggregations::Aggregation_Some_Null >> KqpOlapAggregations::Aggregation_Avg_GroupByNullMix [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::TableSinkWithOlapStore [GOOD] Test command err: Trying to start YDB, gRPC: 25147, MsgBus: 24006 2024-11-21T17:46:12.003726Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790333759188866:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:12.003983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e63/r3tmp/tmpv6q0CW/pdisk_1.dat 2024-11-21T17:46:12.048375Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25147, node 1 2024-11-21T17:46:12.060716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:12.060731Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:12.060733Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:12.060774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24006 TClient is connected to server localhost:24006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:12.104619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:12.104654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:12.105804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:12.107678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:12.118303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:12.129197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333759189513:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:12.129269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333759189513:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:12.129330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333759189513:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:12.129358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333759189513:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:12.129378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333759189513:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:12.129397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333759189513:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:12.129415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333759189513:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:12.129439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333759189513:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:12.129460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333759189513:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:12.129480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333759189513:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:12.129499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333759189513:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:12.129522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790333759189513:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:12.130080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:12.130096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:12.130109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:12.130113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:12.130129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:12.130138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:12.130147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:12.130159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:12.130168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:12.130178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:12.130184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:12.130192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:12.130252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:12.130264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:12.130280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:12.130298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:12.130315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:12.130323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:12.130344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:12.130353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:12.130368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:12.130375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:12.133352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333759189511:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:12.133373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333759189511:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:12.133413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790333759189511:2288];tablet_id=7207518622 ... tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:12.141170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:12.141180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:12.141187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:12.141244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:12.141247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:12.141254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:12.141257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:12.141268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:12.141270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:12.141293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:12.141303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:12.141308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:12.141311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:12.141315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:12.141318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:12.141338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:12.141341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:12.141353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:12.141356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:12.141364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:12.141367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:12.141378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:12.141381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:12.141389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:12.141391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:12.177046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T17:46:12.184252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2616;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2616;columns=5; 2024-11-21T17:46:12.266625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790333759189862:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.266647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790333759189870:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.266654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.267466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:12.269316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790333759189876:2385], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T17:46:12.394746Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790333759189969:2380] TxId: 281474976710663. Ctx: { TraceId: 01jd7x9rvfdxpmjfevnv0bhdz2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjUzZTZmMWYtMmRlOGJiOGItN2RiNGY0MjQtNDA1OGZiOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T17:46:12.416151Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790333759190255:2380] TxId: 281474976710664. Ctx: { TraceId: 01jd7x9rvfdxpmjfevnv0bhdz2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjUzZTZmMWYtMmRlOGJiOGItN2RiNGY0MjQtNDA1OGZiOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T17:46:12.432928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=9;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000002 ] uid: [ "uid_1000002" ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"1970-01-01 00:00:01.000002;uid_1000002;"}}]}; 2024-11-21T17:46:12.432932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;local_tx_no=8;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000000, 1970-01-01 00:00:01.000001 ] uid: [ "uid_1000000", "uid_1000001" ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"1970-01-01 00:00:01.000000;uid_1000000;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"1970-01-01 00:00:01.000001;uid_1000001;"}}]}; 2024-11-21T17:46:12.434517Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790333759190738:2380] TxId: 281474976710665. Ctx: { TraceId: 01jd7x9rvfdxpmjfevnv0bhdz2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjUzZTZmMWYtMmRlOGJiOGItN2RiNGY0MjQtNDA1OGZiOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T17:46:12.445518Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790333759191013:2380] TxId: 281474976710666. Ctx: { TraceId: 01jd7x9rvfdxpmjfevnv0bhdz2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjUzZTZmMWYtMmRlOGJiOGItN2RiNGY0MjQtNDA1OGZiOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T17:46:12.461007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=7;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000002 ] uid: [ "uid_1000002" ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976710663}],"starts":[{"inc":{"count_not_include":2},"id":281474976710663}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976710663}]},"p":{"include":2147483647}}]}; 2024-11-21T17:46:12.461018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=10;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=timestamp: [ 1970-01-01 00:00:01.000000, 1970-01-01 00:00:01.000001 ] uid: [ "uid_1000000", "uid_1000001" ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976710663}],"starts":[{"inc":{"count_not_include":2},"id":281474976710663}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976710663}]},"p":{"include":2147483647}}]}; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 9306, MsgBus: 4240 2024-11-21T17:46:11.142133Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790330371074466:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:11.142620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e71/r3tmp/tmpjKym4m/pdisk_1.dat 2024-11-21T17:46:11.195708Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9306, node 1 2024-11-21T17:46:11.212429Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:11.212445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:11.212447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:11.212482Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4240 TClient is connected to server localhost:4240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:11.273001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:11.273041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:11.274229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:11.277685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:11.280464Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:11.286672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:11.296088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330371075112:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:11.296172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330371075112:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:11.296214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330371075112:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:11.296250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330371075112:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:11.296272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330371075112:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:11.296294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330371075112:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:11.296316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330371075112:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:11.296340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330371075112:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:11.296375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330371075112:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:11.296398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330371075112:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.296423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330371075112:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:11.296445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790330371075112:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:11.299847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330371075111:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:11.299881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330371075111:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:11.299919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330371075111:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:11.299944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330371075111:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:11.299967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330371075111:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:11.299988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330371075111:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:11.300009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330371075111:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:11.300031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330371075111:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:11.300088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330371075111:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:11.300112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330371075111:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.300134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330371075111:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:11.300156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790330371075111:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:11.300748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:11.300765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:11.300778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:11.300788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:11.300804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:11.300813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:11.300824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:11.300839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:11.300854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:11.300859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switc ... ess=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:11.308956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:11.308964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:11.308972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:11.308980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:11.308993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:11.309002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:11.309009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:11.309017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:11.309024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:11.309032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:11.309038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:11.309044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:11.309068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:11.309077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:11.309096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:11.309104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.309119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:11.309129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:11.309148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:11.309160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:11.309169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:11.309172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2024-11-21T17:46:11.475461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790330371075416:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.475479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790330371075408:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.475496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.476246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:11.478001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790330371075422:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:12.330458Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211171530, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, COUNT(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '836) '('"_id" '"d6bd72a4-d6fd836b-b7bfaf4d-e3cd5d0") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $24 (Int32 '1)) (let $25 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $24) $25))) (RangeCreate (AsList '($25 '((Just (Int32 '"7")) $24)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Uint64)) (let $11 '('"id" $1)) (let $12 '('('"_logical_id" '895) '('"_id" '"a115d646-b328483a-acc3cfe-fc55f5c4") '('"_wide_channels" (StructType '('_yql_agg_0 $10) $11)))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id")) (let $28 '('('"UsedKeyColumns" $27) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $29 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $28 (lambda '($30) (TKqpOlapAgg $30 '('('_yql_agg_0 'count '"level")) $27)))) (return (FromFlow $29)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('1))) (let $15 (StructType '('"column1" $10) $11)) (let $16 '('('"_logical_id" '1318) '('"_id" '"41073243-384be1a2-8ae25f0a-57ac32f6") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($31) (block '( (let $32 (lambda '($43 $44) $44 $43)) (let $33 (WideCombiner (ToFlow $31) '"" (lambda '($34 $35) $35) (lambda '($36 $37 $38) $37) (lambda '($39 $40 $41 $42) (AggrAdd $40 $42)) $32)) (return (FromFlow (WideSort $33 '('('1 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('1 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($45) (FromFlow (NarrowMap (ToFlow $45) (lambda '($46 $47) (AsStruct '('"column1" $46) '('"id" $47)))))) '('('"_logical_id" '1330) '('"_id" '"c342e422-507d01af-5ab48160-3385663a")))) (let $20 '($13 $17 $19)) (let $21 (DqCnResult (TDqOutput $19 '0) '('"id" '"column1"))) (let $22 (KqpTxResultBinding $9 '0 '0)) (let $23 (KqpPhysicalTx $20 '($21) '('($7 $22)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $23) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapStats::AddRowsTableStandalone [GOOD] >> KqpOlapIndexes::SchemeActualizationOnceOnStart [GOOD] >> KqpOlapAggregations::BlockGenericSimpleAggregation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestFilterNulls [GOOD] Test command err: Trying to start YDB, gRPC: 61215, MsgBus: 24023 2024-11-21T17:46:11.263301Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790330140989659:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:11.263323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e6c/r3tmp/tmpVYVx4N/pdisk_1.dat 2024-11-21T17:46:11.311275Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61215, node 1 2024-11-21T17:46:11.323784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:11.323798Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:11.323799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:11.323831Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24023 TClient is connected to server localhost:24023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:11.364186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:11.364248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:11.365152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:11.394158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, dec Decimal(22,9), PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:46:11.518068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790330140990266:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.518095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.544407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:11.551058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790330140990342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:11.551093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790330140990342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:11.551120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790330140990342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:11.551137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790330140990342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:11.551152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790330140990342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:11.551166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790330140990342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:11.551177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790330140990342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:11.551191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790330140990342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:11.551206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790330140990342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:11.551223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790330140990342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.551239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790330140990342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:11.551254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790330140990342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:11.551552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:11.551562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:11.551570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:11.551573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:11.551585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:11.551591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:11.551597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:11.551600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:11.551610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:11.551612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:11.551616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:11.551622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:11.551664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:11.551673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:11.551684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:11.551690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.551697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:11.551703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:11.551713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:11.551719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:11.551725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:11.551727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FAL ... ;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.935960Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439790330629353554:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:11.935975Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439790330629353554:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:11.936326Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:11.936338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:11.936347Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:11.936353Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:11.936362Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:11.936368Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:11.936373Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:11.936377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:11.936382Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:11.936384Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:11.936389Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:11.936391Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:11.936432Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:11.936443Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:11.936453Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:11.936455Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.936462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:11.936464Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:11.936475Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:11.936483Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:11.936489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:11.936495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=304;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=304;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=304;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=304;columns=3; 2024-11-21T17:46:12.029907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790334435957804:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.029930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.029996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790334435957809:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.030695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:12.032213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790334435957811:2386], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:12.143273Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211172083, txId: 18446744073709551615] shutting down 2024-11-21T17:46:12.180272Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211172167, txId: 18446744073709551615] shutting down 2024-11-21T17:46:12.184404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790334924320961:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.184453Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.184554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790334924320966:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.189588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:12.191142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790334924320968:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:12.294993Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211172244, txId: 18446744073709551615] shutting down 2024-11-21T17:46:12.329715Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211172321, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{7} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b (0, 1) | 3 39 620b (5, 7) + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b (0, 1) | 1 39 2466b (5, 7) + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b (0, 1) | 1 2 120b (0, 4) | 2 4 120b (0, 7) | 3 6 120b (0, 10) | 4 8 120b (1, 3) | 5 10 122b (1, 6) | 6 12 122b (1, 8) | 7 14 122b (2, NULL) | 8 16 122b (2, 4) | 9 18 122b (2, 7) | 10 20 122b (2, 10) | 11 22 122b (3, 3) | 12 24 122b (3, 6) | 13 26 122b (3, 8) | 14 28 122b (4, NULL) | 15 30 122b (4, 4) | 16 32 122b (4, 7) | 17 34 122b (4, 10) | 18 36 122b (5, 3) | 19 38 122b (5, 6) | 19 39 122b (5, 7) + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > (0, 4) | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > (0, 7) | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > (0, 10) | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > (1, 3) | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > (1, 6) | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > (1, 8) | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > (2, NULL) | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > (2, 4) | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > (2, 7) | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > (2, 10) | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > (3, 3) | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > (3, 6) | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > (3, 8) | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > (4, NULL) | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > (4, 4) | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > (4, 7) | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > (4, 10) | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > (5, 3) | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > (5, 6) | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 ... xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b (0, 1) | 1 2 120b (0, 4) | 2 4 120b (0, 7) | 3 6 120b (0, 10) | 4 8 120b (1, 3) | 5 10 122b (1, 6) | 7 12 122b (1, 8) | 8 14 122b (2, NULL) | 9 16 122b (2, 4) | 11 18 122b (2, 7) | 12 20 122b (2, 10) | 13 22 122b (3, 3) | 15 24 122b (3, 6) | 16 26 122b (3, 8) | 17 28 122b (4, NULL) | 19 30 122b (4, 4) | 20 32 122b (4, 7) | 21 34 122b (4, 10) | 24 36 122b (5, 3) | 25 38 122b (5, 6) | 25 39 122b (5, 7) + BTreeIndex{PageId: 29 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > (0, 4) | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > (0, 7) | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > (0, 10) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > (1, 3) | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > (1, 6) | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > (1, 8) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > (2, NULL) | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > (2, 4) | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > (2, 7) | + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > (2, 10) | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > (3, 3) | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > (3, 6) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > (3, 8) | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > (4, NULL) | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > (4, 4) | | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > (4, 7) | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > (4, 10) | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > (5, 3) | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > (5, 6) | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} >> test.py::test[blocks-date_less_or_equal--Results] [GOOD] >> test.py::test[blocks-decimal_avg--Debug] >> KqpOlap::ScanFailedSnapshotTooOld >> KqpOlapStats::AddRowsTableInTableStore >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::AddRowsTableStandalone [GOOD] Test command err: Trying to start YDB, gRPC: 11049, MsgBus: 3398 2024-11-21T17:46:11.352974Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790329281248568:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:11.353156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e6b/r3tmp/tmpd938lS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11049, node 1 2024-11-21T17:46:11.409741Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:11.410194Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:11.410208Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:11.410210Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:11.410249Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3398 TClient is connected to server localhost:3398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:11.453853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:11.453883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:11.454997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:11.478763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:46:11.638575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790329281249174:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.638599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:11.665698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:11.673171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329281249251:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:11.673213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329281249251:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:11.673253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329281249251:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:11.673288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329281249251:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:11.673304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329281249251:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:11.673319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329281249251:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:11.673334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329281249251:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:11.673350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329281249251:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:11.673368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329281249251:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:11.673384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329281249251:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.673403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329281249251:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:11.673425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790329281249251:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:11.673842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:11.673858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:11.673872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:11.673876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:11.673891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:11.673900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:11.673909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:11.673919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:11.673928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:11.673937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:11.673943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:11.673951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:11.674006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:11.674024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:11.674045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:11.674054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.674064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:11.674072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:11.674088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:11.674098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:11.674108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:11.674115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finish ... 2TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=24288;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=24288;columns=3; |75.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[stream_lookup_join-lookup_join-default.txt-Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_GroupByNullMix [GOOD] Test command err: Trying to start YDB, gRPC: 24814, MsgBus: 8738 2024-11-21T17:46:09.259149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790324904156990:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:09.259296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e87/r3tmp/tmp1GdYvV/pdisk_1.dat 2024-11-21T17:46:09.305243Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24814, node 1 2024-11-21T17:46:09.319523Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:09.319535Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:09.319537Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:09.319572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8738 TClient is connected to server localhost:8738 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:09.360109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:09.360141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:09.361164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:09.381874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:09.387743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:09.395272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790324904157633:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.395331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790324904157633:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.395364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790324904157633:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.395382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790324904157633:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.395398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790324904157633:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.395413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790324904157633:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.395434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790324904157633:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.395459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790324904157633:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.395481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790324904157633:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.395505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790324904157633:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.395523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790324904157633:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.395543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790324904157633:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.397995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790324904157634:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.398020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790324904157634:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.398047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790324904157634:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.398067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790324904157634:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.398079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790324904157634:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.398091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790324904157634:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.398102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790324904157634:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.398121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790324904157634:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.398141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790324904157634:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.398160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790324904157634:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.398180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790324904157634:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.398199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790324904157634:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.398545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:09.398557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:09.398568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:09.398571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:09.398584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:09.398595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:09.398605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:09.398619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:09.398631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:09.398639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:09.398645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888; ... xecute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:09.404963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:09.404969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:09.404972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:09.404976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:09.404979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:09.404999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:09.405002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:09.405013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:09.405015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.405021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:09.405024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:09.405035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:09.405037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:09.405045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:09.405047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, AVG(id), AVG(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; 2024-11-21T17:46:09.539373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790324904157930:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.539394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.539443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790324904157942:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:09.539955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:09.541206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790324904157944:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:12.722790Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211169591, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, AVG(id), AVG(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":1}]},"Column":{"Id":8}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":9}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":10}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":10},{"Id":9},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":1}]},"Column":{"Id":8}},{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":9}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":10}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":10},{"Id":9},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '2730) '('"_id" '"83e3ebcb-714d4182-c02981f5-b999d404") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) (Int32 '1)) '((Nothing $2) (Int32 '0)))))))))))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (TupleType $10 (DataType 'Uint64))) (let $12 (OptionalType $11)) (let $13 '('"level" $2)) (let $14 (StructType '('_yql_agg_0 $11) '('_yql_agg_1 $12) $13)) (let $15 '('('"_logical_id" '2789) '('"_id" '"46630eb7-2f82cb68-48ed6874-a4e20939") '('"_wide_channels" $14))) (let $16 (DqPhyStage '() (lambda '() (block '( (let $29 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $30 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $29 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"id")) (let $35 '('"_yql_agg_0_cnt" '"count" '"id")) (let $36 '('"_yql_agg_1_sum" '"sum" '"level")) (let $37 '('"_yql_agg_1_cnt" '"count" '"level")) (let $38 '($34 $35 $36 $37)) (return (TKqpOlapAgg $33 $38 '('"level"))) ))))) (let $32 (lambda '($39 $40 $41 $42 $43) (block '( (let $44 (IfPresent $42 (lambda '($45) (Just '((Convert $45 'Double) $41))) (Nothing $12))) (return '((Convert $40 'Double) $39) $44 $43) )))) (return (FromFlow (WideMap $31 $32))) ))) $15)) (let $17 (DqCnHashShuffle (TDqOutput $16 '0) '('2))) (let $18 (OptionalType $10)) (let $19 (StructType '('"column1" $10) '('"column2" $18) $13)) (let $20 '('('"_logical_id" '5519) '('"_id" '"60393828-3a525d09-8af9558d-ea85e843") '('"_wide_channels" $19))) (let $21 (DqPhyStage '($17) (lambda '($46) (block '( (let $47 (lambda '($54 $55 $56 $57) (Nth $55 '0) (Nth $55 '1) $56)) (let $48 (lambda '($58 $59 $60 $61 $62 $63 $64) (block '( (let $65 (IfPresent $60 (lambda '($66) (IfPresent $64 (lambda '($67) (Just '((AggrAdd (Nth $66 '0) (Nth $67 '0)) (AggrAdd (Nth $66 '1) (Nth $67 '1))))) $60)) $64)) (return (AggrAdd (Nth $59 '0) $62) (AggrAdd (Nth $59 '1) $63) $65) )))) (let $49 (lambda '($68 $69 $70 $71) (block '( (let $72 (IfPresent $71 (lambda '($73) (Just (Div (Nth $73 '0) (Nth $73 '1)))) (Nothing $18))) (return (Div $69 $70) $72 $68) )))) (let $50 (WideCombiner (ToFlow $46) '"" (lambda '($51 $52 $53) $53) $47 $48 $49)) (return (FromFlow (WideSort $50 '('('2 (Bool 'true)))))) ))) $20)) (let $22 (DqCnMerge (TDqOutput $21 '0) '('('2 '"Asc")))) (let $23 (DqPhyStage '($22) (lambda '($74) (FromFlow (NarrowMap (ToFlow $74) (lambda '($75 $76 $77) (AsStruct '('"column1" $75) '('"column2" $76) '('"level" $77)))))) '('('"_logical_id" '5531) '('"_id" '"116b0e1d-c81ebf55-affcad7a-1b529ba7")))) (let $24 '($16 $21 $23)) (let $25 '('"level" '"column1" '"column2")) (let $26 (DqCnResult (TDqOutput $23 '0) $25)) (let $27 (KqpTxResultBinding $9 '0 '0)) (let $28 (KqpPhysicalTx $24 '($26) '('($7 $27)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $28) '((KqpTxResultBinding (ListType $19) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::SchemeActualizationOnceOnStart [GOOD] Test command err: Trying to start YDB, gRPC: 7891, MsgBus: 30104 2024-11-21T17:46:12.049038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790336005960009:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:12.049215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e60/r3tmp/tmpUh1Va0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7891, node 1 2024-11-21T17:46:12.108338Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:12.112296Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:12.112312Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:12.112313Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:12.112360Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30104 TClient is connected to server localhost:30104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:12.149849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:12.149877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:12.151013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:12.185123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:12.198360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:12.207180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005960659:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:12.207229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005960659:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:12.207269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005960659:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:12.207285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005960659:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:12.207304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005960659:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:12.207318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005960659:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:12.207331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005960659:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:12.207348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005960659:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:12.207367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005960659:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:12.207377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005960659:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:12.207386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005960659:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:12.207401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005960659:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:12.210588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005960660:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:12.210613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005960660:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:12.210654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005960660:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:12.210671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005960660:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:12.210691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005960660:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:12.210711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005960660:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:12.210731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005960660:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:12.210755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005960660:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:12.210772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005960660:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:12.210789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005960660:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:12.210809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005960660:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:12.210824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005960660:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:12.212911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790336005960663:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:12.212930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790336005960663:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:12.212952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790336005960663:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:12.212961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790336005960663:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:12.212969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790336005960663:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:12.212978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790336005960663:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:12.212986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790336005960663:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:12.213001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790336005960663:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:12.213014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790336005960663 ... e itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2024-11-21T17:46:12.458814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=7f968112-a83011ef-b15bcb72-5634045;fline=with_appended.cpp:80;portions=3,;task_id=7f968112-a83011ef-b15bcb72-5634045; 2024-11-21T17:46:12.460609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=7f967ad2-a83011ef-a3f71718-49f6a1ca;fline=with_appended.cpp:80;portions=3,;task_id=7f967ad2-a83011ef-a3f71718-49f6a1ca; 2024-11-21T17:46:12.487327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790336005961020:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.487408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.488756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=7f9a6908-a83011ef-ba74a1bb-fbf06fe2;fline=with_appended.cpp:80;portions=3,;task_id=7f9a6908-a83011ef-ba74a1bb-fbf06fe2; 2024-11-21T17:46:12.489601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:46:12.496261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790336005961079:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.496286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.497940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:46:12.506588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790336005961126:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.506613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.508247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:12.520954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790336005961173:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.520984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.522533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:12.534799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790336005961220:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.534821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.536483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:12.549052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790336005961267:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.549076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.550659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:12.562959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790336005961314:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.562983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.564610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:12.577856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790336005961361:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.577878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.579113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:12.590282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790336005961408:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.590298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.591763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715668:0, at schemeshard: 72057594046644480 2024-11-21T17:46:12.597408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790336005961455:2453], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.597427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.598913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T17:46:12.613148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790336005961513:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.613182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.613311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790336005961530:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:12.614332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480 2024-11-21T17:46:12.618576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790336005961517:2461];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:12.618589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790336005961515:2459];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:12.623624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790336005961516:2460];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:12.625217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790336005961538:2464];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:12.625722Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2024-11-21T17:46:12.625938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790336005961542:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2024-11-21T17:46:12.832412Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211172678, txId: 18446744073709551615] shutting down >> KqpOlapSparsed::AccessorActualization [GOOD] >> KqpOlapAggregations::Aggregation_ResultDistinctCountRI_GroupByL >> KqpOlapAggregations::JsonDoc_GetValue_ToString >> test.py::test[window-generic/aggregations_after_current--Debug] [GOOD] >> test.py::test[window-generic/aggregations_after_current--Plan] >> KqpOlapAggregations::Aggregation_Some_Null [GOOD] >> KqpOlapAggregations::Aggregation_Avg_NullMixGroupBy >> test.py::test[window-generic/aggregations_after_current--Plan] [GOOD] >> test.py::test[window-generic/aggregations_after_current--Results] >> KqpOlapAggregations::BlockGenericSimpleAggregation [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSparsed::AccessorActualization [GOOD] Test command err: Trying to start YDB, gRPC: 6832, MsgBus: 21234 2024-11-21T17:46:08.829222Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790316509396288:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:08.829278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e8c/r3tmp/tmpVGTo4e/pdisk_1.dat 2024-11-21T17:46:08.873229Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6832, node 1 2024-11-21T17:46:08.886512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:08.886531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:08.886533Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:08.886572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21234 TClient is connected to server localhost:21234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:08.930088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:08.930121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:08.931176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:08.932116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:08.942774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:08.953778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790316509396933:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:08.953849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790316509396933:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:08.953911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790316509396933:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:08.953945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790316509396933:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:08.953979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790316509396933:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:08.954006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790316509396933:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:08.954020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790316509396933:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:08.954041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790316509396933:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:08.954061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790316509396933:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:08.954081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790316509396933:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:08.954100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790316509396933:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:08.954116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790316509396933:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:08.954636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:08.954652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:08.954665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:08.954669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:08.954685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:08.954690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:08.954701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:08.954709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:08.954724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:08.954732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:08.954739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:08.954748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:08.954804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:08.954815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:08.954831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:08.954835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:08.954845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:08.954854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:08.954868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:08.954877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:08.954887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:08.954895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:08.957889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316509396931:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:08.957917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316509396931:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:08.957954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790316509396931:2288];tablet_id=720751862240 ... date_portion;blobs_size=140800;portion_bytes=140800;portion_raw_bytes=4058910; 2024-11-21T17:46:12.971856Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439790316509397001:2303];tablet_id=72075186224037888;parent=[1:7439790316509396952:2291];fline=manager.h:99;event=ask_data;request=request_id=36;3={portions_count=1};; 2024-11-21T17:46:12.971878Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;task_id=7fe95db0-a83011ef-9e1b382b-81e7891a;fline=manager.cpp:14;event=unlock;process_id=CS::TTL::7fe95db0-a83011ef-9e1b382b-81e7891a; 2024-11-21T17:46:12.971896Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;task_id=7fe95db0-a83011ef-9e1b382b-81e7891a;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:12.971927Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;task_id=7fe95db0-a83011ef-9e1b382b-81e7891a;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:12.971942Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;task_id=7fe95db0-a83011ef-9e1b382b-81e7891a;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=1; 2024-11-21T17:46:12.971959Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;task_id=7fe95db0-a83011ef-9e1b382b-81e7891a;tablet_id=72075186224037890;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732210873000;tx_id=18446744073709551615;;current_snapshot_ts=1732211173000; 2024-11-21T17:46:12.971968Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;task_id=7fe95db0-a83011ef-9e1b382b-81e7891a;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:12.971976Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;task_id=7fe95db0-a83011ef-9e1b382b-81e7891a;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:12.971985Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;task_id=7fe95db0-a83011ef-9e1b382b-81e7891a;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:12.972025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;task_id=7fe95db0-a83011ef-9e1b382b-81e7891a;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:12.972033Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439790316509396987:2297];tablet_id=72075186224037890;parent=[1:7439790316509396931:2288];fline=manager.h:99;event=ask_data;request=request_id=38;3={portions_count=1};; 2024-11-21T17:46:12.972047Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;task_id=7fe95db0-a83011ef-9e1b382b-81e7891a;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:12.972087Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037890 Save Batch GenStep: 1:2 Blob count: 1 2024-11-21T17:46:12.972105Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:12.972124Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=3;external_task_id=7fe95db0-a83011ef-9e1b382b-81e7891a;mem=4186122;cpu=0; 2024-11-21T17:46:13.204858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790337984233925:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.204881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.204896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790337984233930:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.205672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:46:13.207591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790337984233932:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:46:13.340905Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976710665 scanId: 1 version: {1732211173259:max} readable: {1732211173378:max} at tablet 72075186224037888 2024-11-21T17:46:13.340946Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976710665 scanId: 1 at tablet 72075186224037888 2024-11-21T17:46:13.341025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790316509396952:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1732211173259:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 6 } Function { Id: 2 Arguments { Id: 3 } } } } } Command { Projection { Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2024-11-21T17:46:13.345367Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790316509396952:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1732211173259:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{group_by_assignes=[{op=count;arguments=[uid;];options={skip_nulls=true, min_count=1};column=G:6;};];projections=[];};{projections=[G:6;];};]; 2024-11-21T17:46:13.345591Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976710665 scanId: 1 version: {1732211173259:max} readable: {1732211173378:max} at tablet 72075186224037889 2024-11-21T17:46:13.345625Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976710665 scanId: 1 at tablet 72075186224037889 2024-11-21T17:46:13.345715Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790316509396932:2289];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1732211173259:max};tablet=72075186224037889;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 6 } Function { Id: 2 Arguments { Id: 3 } } } } } Command { Projection { Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2024-11-21T17:46:13.345782Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790316509396932:2289];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1732211173259:max};tablet=72075186224037889;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{group_by_assignes=[{op=count;arguments=[uid;];options={skip_nulls=true, min_count=1};column=G:6;};];projections=[];};{projections=[G:6;];};]; 2024-11-21T17:46:13.345902Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976710665 scanId: 1 version: {1732211173259:max} readable: {1732211173378:max} at tablet 72075186224037890 2024-11-21T17:46:13.345927Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976710665 scanId: 1 at tablet 72075186224037890 2024-11-21T17:46:13.345974Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790316509396931:2288];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1732211173259:max};tablet=72075186224037890;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 6 } Function { Id: 2 Arguments { Id: 3 } } } } } Command { Projection { Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2024-11-21T17:46:13.346029Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790316509396931:2288];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710665;scan_id=1;gen=1;table=/Root/olapStore/olapTable;snapshot={1732211173259:max};tablet=72075186224037890;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{group_by_assignes=[{op=count;arguments=[uid;];options={skip_nulls=true, min_count=1};column=G:6;};];projections=[];};{projections=[G:6;];};]; 2024-11-21T17:46:13.346749Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439790316509397001:2303];tablet_id=72075186224037888;parent=[1:7439790316509396952:2291];fline=manager.h:99;event=ask_data;request=request_id=39;3={portions_count=1};; 2024-11-21T17:46:13.346779Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439790316509396994:2300];tablet_id=72075186224037889;parent=[1:7439790316509396932:2289];fline=manager.h:99;event=ask_data;request=request_id=40;3={portions_count=1};; 2024-11-21T17:46:13.346787Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439790316509396987:2297];tablet_id=72075186224037890;parent=[1:7439790316509396931:2288];fline=manager.h:99;event=ask_data;request=request_id=41;3={portions_count=1};; 2024-11-21T17:46:13.346970Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T17:46:13.346982Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T17:46:13.346985Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T17:46:13.347107Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T17:46:13.347158Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T17:46:13.347199Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T17:46:13.351791Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2024-11-21T17:46:13.356245Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037890 2024-11-21T17:46:13.356520Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037889 2024-11-21T17:46:13.402367Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211173259, txId: 18446744073709551615] shutting down [[10000u]] >> test.py::test[aggregate-group_by_gs_alt_duo--Debug] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Plan] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_Null [GOOD] Test command err: Trying to start YDB, gRPC: 12798, MsgBus: 28137 2024-11-21T17:46:12.982407Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790334897440762:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:12.982436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e5d/r3tmp/tmpQFjxZw/pdisk_1.dat 2024-11-21T17:46:13.028132Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12798, node 1 2024-11-21T17:46:13.034533Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:13.034548Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:13.034550Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:13.034582Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28137 TClient is connected to server localhost:28137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:13.078674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:13.082864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:13.082893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:13.084011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:13.090043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:13.098991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339192408487:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.099037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339192408487:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.099070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339192408487:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:13.099087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339192408487:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:13.099099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339192408487:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:13.099120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339192408487:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:13.099139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339192408487:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:13.099154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339192408487:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:13.099165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339192408487:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:13.099190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339192408487:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.099205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339192408487:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:13.099216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339192408487:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:13.101289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339192408485:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.101303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339192408485:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.101322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339192408485:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:13.101335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339192408485:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:13.101348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339192408485:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:13.101361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339192408485:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:13.101379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339192408485:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:13.101397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339192408485:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:13.101419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339192408485:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:13.101432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339192408485:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.101444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339192408485:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:13.101456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339192408485:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:13.101782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:13.101793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:13.101800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:13.101803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:13.101811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:13.101814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:13.101820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:13.101823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:13.101828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:13.101835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:13.101839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... ARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:13.106480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:13.106488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:13.106491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:13.106503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:13.106510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:13.106516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:13.106521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:13.106528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:13.106536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:13.106540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:13.106543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:13.106558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:13.106564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:13.106572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:13.106577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.106583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:13.106585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:13.106593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:13.106598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:13.106603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:13.106605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SOME(level) FROM `/Root/tableWithNulls` WHERE id > 5 2024-11-21T17:46:13.235753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790339192408773:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.235778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.235894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790339192408792:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.236566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:13.238116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790339192408794:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:13.420750Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211173294, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SOME(level) FROM `/Root/tableWithNulls` WHERE id > 5 JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (OptionalType (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '697) '('"_id" '"e2292507-f6c33dc-933e5afd-57e479a") $3)) (let $5 (DqPhyStage '() (lambda '() (block '( (let $27 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $27) '((Nothing $2) $27))))))))))) ))) $4)) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 (KqpPhysicalTx '($5) '($6) '() '('('"type" '"compute")))) (let $8 '"%kqp%tx_result_binding_0_0") (let $9 (TupleType $2 $1)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 '('('"_logical_id" '755) '('"_id" '"27d00e7b-d047a4a2-874ec2d1-ac569634") '('"_wide_channels" (StructType '('_yql_agg_0 $2))))) (let $12 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $30 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"level") '() $29 (lambda '($31) (TKqpOlapAgg $31 '('('_yql_agg_0 'some '"level")) '())))) (return (FromFlow $30)) ))) $11)) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($32) (block '( (let $33 (Bool 'false)) (let $34 (WideCondense1 (ToFlow $32) (lambda '($36) $36) (lambda '($37 $38) $33) (lambda '($39 $40) (Coalesce $40 $39)))) (let $35 (Condense (NarrowMap (Take $34 (Uint64 '1)) (lambda '($41) (AsStruct '('Some0 $41)))) (Nothing (OptionalType (StructType '('Some0 $2)))) (lambda '($42 $43) $33) (lambda '($44 $45) (Just $44)))) (return (FromFlow (Map $35 (lambda '($46) (AsList (AsStruct '('"column0" (Member $46 'Some0)))))))) ))) '('('"_logical_id" '1289) '('"_id" '"d8e61e74-5c6eee47-67bd93bc-f3c0553")))) (let $15 (DqCnValue (TDqOutput $14 '0))) (let $16 (KqpTxResultBinding $10 '0 '0)) (let $17 '('('"type" '"scan"))) (let $18 (KqpPhysicalTx '($12 $14) '($15) '('($8 $16)) $17)) (let $19 '"%kqp%tx_result_binding_1_0") (let $20 (ListType (StructType '('"column0" $2)))) (let $21 '('('"_logical_id" '1385) '('"_id" '"8419ae18-3a9fc4f-edb1fb9-56e99f6c") $3)) (let $22 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $21)) (let $23 (DqCnResult (TDqOutput $22 '0) '('"column0"))) (let $24 (KqpTxResultBinding $20 '1 '0)) (let $25 (KqpPhysicalTx '($22) '($23) '('($19 $24)) $17)) (let $26 '($7 $18 $25)) (return (KqpPhysicalQuery $26 '((KqpTxResultBinding $20 '"2" '0)) '('('"type" '"scan_query")))) ) >> KqpOlap::SimpleQueryOlapStats >> KqpOlapAggregations::JsonDoc_Exists >> KqpOlapStats::AddRowsSomeTablesInTableStore >> KqpOlap::BlockChannelForce ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlockGenericSimpleAggregation [GOOD] Test command err: Trying to start YDB, gRPC: 29397, MsgBus: 26680 2024-11-21T17:46:13.155985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790341803406663:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:13.156003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e5b/r3tmp/tmpiqiGIU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29397, node 1 2024-11-21T17:46:13.209569Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:13.212746Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:13.212759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:13.212760Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:13.212786Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26680 TClient is connected to server localhost:26680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:13.255357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:13.256152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:13.256175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:46:13.257312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:13.265654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:13.273469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341803407293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.273542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341803407293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.273573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341803407293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:13.273590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341803407293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:13.273608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341803407293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:13.273623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341803407293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:13.273638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341803407293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:13.273658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341803407293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:13.273674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341803407293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:13.273690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341803407293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.273704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341803407293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:13.273720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341803407293:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:13.276461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790341803407295:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.276485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790341803407295:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.276515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790341803407295:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:13.276530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790341803407295:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:13.276544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790341803407295:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:13.276564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790341803407295:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:13.276583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790341803407295:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:13.276602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790341803407295:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:13.276624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790341803407295:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:13.276645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790341803407295:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.276666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790341803407295:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:13.276685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790341803407295:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:13.279640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790341803407294:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.279658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790341803407294:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.279687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790341803407294:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:13.279707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790341803407294:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:13.279722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790341803407294:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:13.279741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790341803407294:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:13.279760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790341803407294:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:13.279779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790341803407294:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:13.279805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74397903418034072 ... 1-21T17:46:13.283912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:13.283916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:13.283918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:13.283925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:13.283927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:13.283931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:13.283933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:13.283936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:13.283938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:13.283941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:13.283943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:13.283956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:13.283958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:13.283965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:13.283968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.283974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:13.283980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:13.283987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:13.283994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:13.284000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:13.284002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, COUNT(*), SUM(id) FROM `/Root/tableWithNulls` WHERE level = 5 GROUP BY level ORDER BY level; 2024-11-21T17:46:13.412649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790341803407586:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.412678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790341803407597:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.412687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.413483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:13.415200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790341803407600:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, COUNT(*), SUM(id) FROM `/Root/tableWithNulls` WHERE level = 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":11}},{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":12}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":11},{"Id":12},{"Id":3}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":11}},{"Function":{"Id":5,"Arguments":[{"Id":1}]},"Column":{"Id":12}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":11},{"Id":12},{"Id":3}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Uint64)) (let $2 (DataType 'Int64)) (let $3 '('"level" (OptionalType (DataType 'Int32)))) (let $4 (StructType '('_yql_agg_0 $1) '('_yql_agg_1 $2) $3)) (let $5 '('('"_logical_id" '1162) '('"_id" '"1bd6f42d-b184b822-9460519c-1b7a0ead") '('"_wide_channels" $4))) (let $6 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $17 (KqpBlockReadOlapTableRanges $16 (Void) '('"id" '"level") '() '() (lambda '($18) (block '( (let $19 '('eq '"level" (Int32 '"5"))) (let $20 '('?? $19 (Bool 'false))) (let $21 '('_yql_agg_0 '"count" '"*")) (let $22 '('_yql_agg_1 'sum '"id")) (return (TKqpOlapAgg (KqpOlapFilter $18 $20) '($21 $22) '('"level"))) ))))) (return (FromFlow $17)) ))) $5)) (let $7 (DqCnHashShuffle (TDqOutput $6 '0) '('2))) (let $8 (StructType '('"column1" $1) '('"column2" $2) $3)) (let $9 '('('"_logical_id" '1824) '('"_id" '"6232594a-36d834e-ca27f6e4-f31030f8") '('"_wide_channels" $8))) (let $10 (DqPhyStage '($7) (lambda '($23) (block '( (let $24 (lambda '($31 $32 $33 $34) $32 $33)) (let $25 (lambda '($35 $36 $37 $38 $39 $40) (AggrAdd $36 $39) (AggrAdd $37 $40))) (let $26 (lambda '($41 $42 $43) $42 $43 $41)) (let $27 (WideCombiner (WideFromBlocks (ToFlow $23)) '"" (lambda '($28 $29 $30) $30) $24 $25 $26)) (return (FromFlow (WideSort $27 '('('2 (Bool 'true)))))) ))) $9)) (let $11 (DqCnMerge (TDqOutput $10 '0) '('('2 '"Asc")))) (let $12 (DqPhyStage '($11) (lambda '($44) (FromFlow (NarrowMap (ToFlow $44) (lambda '($45 $46 $47) (AsStruct '('"column1" $45) '('"column2" $46) '('"level" $47)))))) '('('"_logical_id" '1836) '('"_id" '"fc67a570-5aff5cff-1a8ed971-67950cc2")))) (let $13 '($6 $10 $12)) (let $14 '('"level" '"column1" '"column2")) (let $15 (DqCnResult (TDqOutput $12 '0) $14)) (return (KqpPhysicalQuery '((KqpPhysicalTx $13 '($15) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $8) '0 '0)) '('('"type" '"query")))) ) >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> KqpOlap::SelectLimit1ManyShards [GOOD] >> KqpOlapAggregations::JsonDoc_GetValue_ToString [GOOD] >> KqpOlap::PredicatePushdownParameterTypesValidation >> KqpOlapAggregations::Aggregation_MaxL >> KqpOlapBlobsSharing::BlobsSharingSplit1_1_clean_with_restarts >> KqpOlapSysView::StatsSysViewRanges >> KqpLimits::CancelAfterRoTxWithFollowerLegacy [GOOD] >> KqpOlapAggregations::JsonDoc_Exists [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_GetValue_ToString [GOOD] Test command err: Trying to start YDB, gRPC: 15831, MsgBus: 16330 2024-11-21T17:46:13.660616Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790339936878772:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:13.660788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e54/r3tmp/tmpKFjQwL/pdisk_1.dat 2024-11-21T17:46:13.703655Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15831, node 1 2024-11-21T17:46:13.717666Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:13.717677Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:13.717679Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:13.717704Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16330 TClient is connected to server localhost:16330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:13.761443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:13.761474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:13.762502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:13.783743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:13.792817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:13.801827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790339936879410:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.801894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790339936879410:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.801961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790339936879410:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:13.801984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790339936879410:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:13.802009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790339936879410:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:13.802030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790339936879410:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:13.802049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790339936879410:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:13.802074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790339936879410:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:13.802096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790339936879410:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:13.802117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790339936879410:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.802138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790339936879410:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:13.802159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790339936879410:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:13.802620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:13.802636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:13.802648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:13.802652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:13.802667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:13.802671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:13.802680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:13.802695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:13.802708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:13.802717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:13.802724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:13.802733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:13.802782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:13.802793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:13.802807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:13.802815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.802825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:13.802833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:13.802847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:13.802856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:13.802865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:13.802873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:13.806151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790339936879411:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.806175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790339936879411:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.806216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790339936879411:2290];tablet_id=7207518622 ... 2;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:13.811975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:13.811987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:13.811992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:13.811999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:13.812007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.812013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:13.812016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:13.812023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:13.812028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:13.812033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:13.812035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1" RETURNING String) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1" RETURNING String) = "val1" AND id = 6; 2024-11-21T17:46:13.925326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790339936879703:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.925345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.925355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790339936879713:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.925939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:13.927332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790339936879717:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:14.092728Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211174000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.col1"), JSON_VALUE(jsondoc, "$.col1" RETURNING String) FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsondoc, "$.col1" RETURNING String) = "val1" AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == val1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val1"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == val1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1290) '('"_id" '"dabc7ff5-59fb901-6fd4c320-5ae4ac26") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $21 (Int32 '"6")) (let $22 (Just $21)) (let $23 (Int32 '1)) (let $24 '($22 $23)) (let $25 (If (== $21 (Int32 '2147483647)) $24 '((+ $22 $23) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($24 $25)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'String)) (let $11 (DataType 'Utf8)) (let $12 (OptionalType $11)) (let $13 (OptionalType $10)) (let $14 (DqPhyStage '() (lambda '() (block '( (let $26 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $27 '('"id" '"jsondoc" '"jsonval")) (let $28 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $29 (OptionalType (DataType 'JsonDocument))) (let $30 (DataType 'Json)) (let $31 '((VariantType (TupleType (TupleType (DataType 'Uint8) $10) $12)))) (let $32 '((ResourceType '"JsonPath"))) (let $33 (ResourceType '"JsonNode")) (let $34 (DictType $11 $33)) (let $35 '($34)) (let $36 (CallableType '() $31 '($29) $32 $35)) (let $37 '('('"strict"))) (let $38 (Udf '"Json2.JsonDocumentSqlValueUtf8" (Void) (VoidType) '"" $36 (VoidType) '"" $37)) (let $39 (CallableType '() $32 '($11))) (let $40 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $39 (VoidType) '"" '())) (let $41 (Apply $40 (Utf8 '"$.col1"))) (let $42 (Dict $34)) (let $43 (lambda '($54) (block '( (let $55 (Nothing $13)) (return $55) )))) (let $44 (lambda '($56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (SafeCast $58 $10))) (Nothing $13))) (return (If (Exists $56) $57 (Nothing $13))) )))) (let $45 (KqpWideReadOlapTableRanges $26 %kqp%tx_result_binding_0_0 $27 '() $28 (lambda '($46) (block '( (let $47 (StructType $9 '('"jsondoc" $29) '('"jsonval" (OptionalType $30)))) (let $48 (KqpOlapApply $47 '('"jsondoc") (lambda '($51) (block '( (let $52 (Apply $38 $51 $41 $42)) (let $53 (Nothing $13)) (return (Visit $52 '0 $43 '1 $44)) ))))) (let $49 '('eq $48 (String '"val1"))) (let $50 '('?? $49 (Bool 'false))) (return (KqpOlapFilter $46 $50)) ))))) (return (FromFlow (NarrowMap $45 (lambda '($59 $60 $61) (block '( (let $62 (OptionalType $33)) (let $63 (CallableType '() $31 '($62) $32 $35)) (let $64 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $63 (VoidType) '"" $37)) (let $65 (IfPresent $61 (lambda '($70) (block '( (let $71 '($30 '"" '1)) (let $72 (CallableType '() '($33) $71)) (let $73 (Udf '"Json2.Parse" (Void) (VoidType) '"" $72 (VoidType) '"" '())) (return (Just (Apply $73 $70))) ))) (Nothing $62))) (let $66 (Apply $64 $65 $41 $42)) (let $67 (Visit $66 '0 (lambda '($74) (Nothing $12)) '1 (lambda '($75) $75))) (let $68 (Apply $38 $60 $41 $42)) (let $69 (Visit $68 '0 $43 '1 $44)) (return (AsStruct '('"column1" $67) '('"column2" $69) '('"id" $59))) )))))) ))) '('('"_logical_id" '1361) '('"_id" '"9bbb7e6a-6ec81714-680a6832-20f7f138")))) (let $15 (DqCnUnionAll (TDqOutput $14 '0))) (let $16 (DqPhyStage '($15) (lambda '($76) $76) '('('"_logical_id" '2418) '('"_id" '"c289273a-42cd0ffe-9eee1fb0-9215c40a")))) (let $17 '('"id" '"column1" '"column2")) (let $18 (DqCnResult (TDqOutput $16 '0) $17)) (let $19 (KqpTxResultBinding $8 '0 '0)) (let $20 (KqpPhysicalTx '($14 $16) '($18) '('($5 $19)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $20) '((KqpTxResultBinding (ListType (StructType '('"column1" $12) '('"column2" $13) $9)) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlap::SimpleQueryOlapStats [GOOD] >> KqpOlapSparsed::SwitchingMultiColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SelectLimit1ManyShards [GOOD] Test command err: 2024-11-21T17:46:09.180077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:46:09.182923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:09.183036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:09.183072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:09.183406Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:09.183417Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e98/r3tmp/tmpPsAyyP/pdisk_1.dat 2024-11-21T17:46:09.263360Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:09.347413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:09.435053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:09.435083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:09.436012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:09.436030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:09.447147Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:09.447294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:09.447383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:09.780755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:09.820584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:1286:2803];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:46:09.823805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:1286:2803];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:46:09.823867Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2024-11-21T17:46:09.824514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.824555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.824594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.824612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.824636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.824654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.824672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.824689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.824707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.824728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.824745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.824762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.827973Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037895;self_id=[1:1292:2807];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:46:09.829843Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037895;self_id=[1:1292:2807];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:46:09.829880Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037895 2024-11-21T17:46:09.830311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.830328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.830355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.830367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.830377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.830395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.830411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.830428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.830440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.830451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.830462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.830472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.834012Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037896;self_id=[1:1298:2809];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:46:09.835543Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037896;self_id=[1:1298:2809];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:46:09.835573Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037896 2024-11-21T17:46:09.836054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.836075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.836116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.836137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.836155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.836171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_ ... ost\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173406,\"TaskId\":69,\"WaitInputTimeUs\":95050}]},{\"CpuTimeUs\":123,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173406,\"TaskId\":70,\"WaitInputTimeUs\":95073}]},{\"CpuTimeUs\":102,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173406,\"TaskId\":71,\"WaitInputTimeUs\":95086}]},{\"CpuTimeUs\":87,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173406,\"TaskId\":72,\"WaitInputTimeUs\":95107}]},{\"CpuTimeUs\":793,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173406,\"TaskId\":73,\"WaitInputTimeUs\":95133}]},{\"CpuTimeUs\":85,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173406,\"TaskId\":74,\"WaitInputTimeUs\":94447}]},{\"CpuTimeUs\":112,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173406,\"TaskId\":75,\"WaitInputTimeUs\":94465}]},{\"CpuTimeUs\":122,\"Tasks\":[{\"ComputeTimeUs\":5,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173406,\"TaskId\":76,\"WaitInputTimeUs\":94536}]},{\"CpuTimeUs\":118,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173407,\"TaskId\":77,\"WaitInputTimeUs\":94598}]},{\"CpuTimeUs\":136,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173407,\"TaskId\":78,\"WaitInputTimeUs\":96700}]},{\"CpuTimeUs\":123,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173407,\"TaskId\":79,\"WaitInputTimeUs\":96762}]},{\"CpuTimeUs\":99,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173407,\"TaskId\":80,\"WaitInputTimeUs\":96775}]},{\"CpuTimeUs\":122,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173407,\"TaskId\":81,\"WaitInputTimeUs\":96794}]},{\"CpuTimeUs\":121,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173407,\"TaskId\":82,\"WaitInputTimeUs\":96838}]},{\"CpuTimeUs\":109,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173407,\"TaskId\":83,\"WaitInputTimeUs\":96861}]},{\"CpuTimeUs\":148,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173407,\"TaskId\":84,\"WaitInputTimeUs\":96884}]},{\"CpuTimeUs\":103,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173407,\"TaskId\":85,\"WaitInputTimeUs\":96901}]},{\"CpuTimeUs\":111,\"Tasks\":[{\"ComputeTimeUs\":5,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173407,\"TaskId\":86,\"WaitInputTimeUs\":96922}]},{\"CpuTimeUs\":101,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173407,\"TaskId\":87,\"WaitInputTimeUs\":96941}]},{\"CpuTimeUs\":204,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173398,\"TaskId\":88,\"WaitInputTimeUs\":96962}]},{\"CpuTimeUs\":113,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173398,\"TaskId\":89,\"WaitInputTimeUs\":97003}]},{\"CpuTimeUs\":99,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173398,\"TaskId\":90,\"WaitInputTimeUs\":97072}]},{\"CpuTimeUs\":89,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":91,\"WaitInputTimeUs\":97104}]},{\"CpuTimeUs\":106,\"Tasks\":[{\"ComputeTimeUs\":14,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":92,\"WaitInputTimeUs\":97132}]},{\"CpuTimeUs\":94,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":93,\"WaitInputTimeUs\":97166}]},{\"CpuTimeUs\":116,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":94,\"WaitInputTimeUs\":97199}]},{\"CpuTimeUs\":101,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":95,\"WaitInputTimeUs\":97233}]},{\"CpuTimeUs\":168,\"Tasks\":[{\"ComputeTimeUs\":6,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":96,\"WaitInputTimeUs\":170473}]},{\"CpuTimeUs\":105,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":97,\"WaitInputTimeUs\":170640}]},{\"CpuTimeUs\":118,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":98,\"WaitInputTimeUs\":170700}]},{\"CpuTimeUs\":95,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":99,\"WaitInputTimeUs\":170761}]},{\"CpuTimeUs\":127,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":100,\"WaitInputTimeUs\":170799}]},{\"CpuTimeUs\":126,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":101,\"WaitInputTimeUs\":170831}]},{\"CpuTimeUs\":553,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":102,\"WaitInputTimeUs\":170879}]},{\"CpuTimeUs\":138,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":103,\"WaitInputTimeUs\":170497}]},{\"CpuTimeUs\":117,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":104,\"WaitInputTimeUs\":170554}]},{\"CpuTimeUs\":105,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":105,\"WaitInputTimeUs\":170623}]},{\"CpuTimeUs\":104,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":106,\"WaitInputTimeUs\":170702}]},{\"CpuTimeUs\":130,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173399,\"TaskId\":107,\"WaitInputTimeUs\":170779}]},{\"CpuTimeUs\":134,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":108,\"WaitInputTimeUs\":170868}]},{\"CpuTimeUs\":133,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":109,\"WaitInputTimeUs\":170942}]},{\"CpuTimeUs\":163,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":110,\"WaitInputTimeUs\":171015}]},{\"CpuTimeUs\":148,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":111,\"WaitInputTimeUs\":171089}]},{\"CpuTimeUs\":145,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":112,\"WaitInputTimeUs\":171163}]},{\"CpuTimeUs\":142,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":113,\"WaitInputTimeUs\":171252}]},{\"CpuTimeUs\":167,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":114,\"WaitInputTimeUs\":172102}]},{\"CpuTimeUs\":149,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":115,\"WaitInputTimeUs\":172202}]},{\"CpuTimeUs\":156,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":116,\"WaitInputTimeUs\":172276}]},{\"CpuTimeUs\":129,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":117,\"WaitInputTimeUs\":172344}]},{\"CpuTimeUs\":117,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":118,\"WaitInputTimeUs\":172414}]},{\"CpuTimeUs\":104,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":119,\"WaitInputTimeUs\":172489}]},{\"CpuTimeUs\":129,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173400,\"TaskId\":120,\"WaitInputTimeUs\":172559}]},{\"CpuTimeUs\":98,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173401,\"TaskId\":121,\"WaitInputTimeUs\":172665}]},{\"CpuTimeUs\":129,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173401,\"TaskId\":122,\"WaitInputTimeUs\":172715}]},{\"CpuTimeUs\":106,\"Tasks\":[{\"ComputeTimeUs\":4,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173401,\"TaskId\":123,\"WaitInputTimeUs\":172789}]},{\"CpuTimeUs\":112,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173401,\"TaskId\":124,\"WaitInputTimeUs\":172862}]},{\"CpuTimeUs\":113,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173401,\"TaskId\":125,\"WaitInputTimeUs\":172938}]},{\"CpuTimeUs\":112,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173401,\"TaskId\":126,\"WaitInputTimeUs\":173005}]},{\"CpuTimeUs\":106,\"Tasks\":[{\"ComputeTimeUs\":3,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173401,\"TaskId\":127,\"WaitInputTimeUs\":173067}]},{\"CpuTimeUs\":96,\"Tasks\":[{\"ComputeTimeUs\":2,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211173401,\"TaskId\":128,\"WaitInputTimeUs\":173128}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"selectStore\\/selectTable\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"c4b891ad-4ff4bf01-f7eec7cf-3e1ccbe7\",\"Stats\":{\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\n\022\014\010U\020\212\"\030\255\260\001 \201\001" } } 2024-11-21T17:46:14.047983Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:3308:3890] TxId: 281474976715662. Ctx: { TraceId: 01jd7x9se23a7wzzjwxf9kc8cc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdjMjhkMjUtNmUxMTQ4NWMtYWY0OGRjZmYtZTA3MGFkNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:46:14.048000Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:3308:3890] TxId: 281474976715662. Ctx: { TraceId: 01jd7x9se23a7wzzjwxf9kc8cc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdjMjhkMjUtNmUxMTQ4NWMtYWY0OGRjZmYtZTA3MGFkNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.022573s ReadRows: 2 ReadBytes: 2151 ru: 15 rate limiter was not found force flag: 1 2024-11-21T17:46:14.049195Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.h:30;event=destroy;allocation_id=21;stage=FO::FETCHING; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.h:30;event=destroy;allocation_id=15;stage=FO::FETCHING; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.h:30;event=destroy;allocation_id=16;stage=FO::ACCESSORS; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.h:30;event=destroy;allocation_id=13;stage=FO::ACCESSORS; >> KqpOlap::PushdownFilter >> KqpOlapStats::AddRowsTableInTableStore [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Debug] |75.6%| [TA] $(B)/ydb/tests/functional/tpc/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerLegacy [GOOD] Test command err: Trying to start YDB, gRPC: 24661, MsgBus: 7983 2024-11-21T17:45:46.765835Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790221935762846:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:46.765855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001691/r3tmp/tmpktgLhn/pdisk_1.dat 2024-11-21T17:45:46.859098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:46.859126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:46.861396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24661, node 1 2024-11-21T17:45:46.893731Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:46.908510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:46.908522Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:46.908524Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:46.908567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7983 TClient is connected to server localhost:7983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:46.992098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.000881Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:47.017093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.077761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.098414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.111045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:47.279493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790226230731541:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.279536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.319291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.328160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.343099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.357431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.382397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.394877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:45:47.429357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790226230732051:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.429392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.429566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790226230732056:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:47.430473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:45:47.433751Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:45:47.433831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790226230732058:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:45:47.630862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28798, MsgBus: 10759 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001691/r3tmp/tmpnADHA1/pdisk_1.dat 2024-11-21T17:45:48.960331Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:48.965067Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28798, node 2 2024-11-21T17:45:48.976941Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:48.976954Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:48.976957Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:48.976996Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10759 2024-11-21T17:45:49.046093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:49.046121Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:10759 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:45:49.047044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:49.048568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.061545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.070602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.086519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.099344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:49.254904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790238283030888:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:49.254930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource p ... 0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:03.556014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:03.567330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:03.683181Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790295130695884:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.683209Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.687953Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.695073Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.705667Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.712587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.726782Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.733754Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.749059Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790295130696388:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.749070Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790295130696393:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.749077Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:03.749750Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:03.753605Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790295130696395:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:03.953908Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.999848Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gty50mm9nkssnf9a5bg, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.002945Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gv08jpjsjy8drameaw3, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.006535Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gv35xwd2r3vbarpktk3, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.011147Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gv781e9372nhvp10x1p, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.017172Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439790299425664102:2454] TxId: 281474976715673. Ctx: { TraceId: 01jd7x9gvb2rfssf2cqx7ng4nc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 5ms } {
: Error: Cancelling after 5ms during execution } ] 2024-11-21T17:46:04.017351Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gvb2rfssf2cqx7ng4nc, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.025114Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gvj7ss2hcpy5jvkg0vv, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.033261Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gvtahx46dwfv78d2bv0, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.042378Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gw27vht53gnnprq54qw, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.060756Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gwj32h4n22fy064sdrw, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.080764Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gx48fdbbcvk92jej81w, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.101794Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gxna6gf9a7k0pg3jnt3, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.118151Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gy6cj9qytsr8bq72naz, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.160167Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439790299425664264:2454] TxId: 281474976715680. Ctx: { TraceId: 01jd7x9gzd60kn5vztaqhr7nw8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 18ms } {
: Error: Cancelling after 18ms during execution } ] 2024-11-21T17:46:04.160278Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439790299425664268:2527], TxId: 281474976715680, task: 1. Ctx: { TraceId : 01jd7x9gzd60kn5vztaqhr7nw8. SessionId : ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439790299425664264:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T17:46:04.160439Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439790299425664269:2528], TxId: 281474976715680, task: 2. Ctx: { TraceId : 01jd7x9gzd60kn5vztaqhr7nw8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439790299425664264:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T17:46:04.160624Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9gzd60kn5vztaqhr7nw8, Create QueryResponse for error on request, msg: 2024-11-21T17:46:04.184454Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439790299425664284:2454] TxId: 281474976715682. Ctx: { TraceId: 01jd7x9h01a3q98jfrzx9qm3tf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 19ms } {
: Error: Cancelling after 22ms during execution } ] 2024-11-21T17:46:04.184758Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439790299425664288:2533], TxId: 281474976715682, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7x9h01a3q98jfrzx9qm3tf. SessionId : ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439790299425664284:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T17:46:04.184769Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439790299425664289:2534], TxId: 281474976715682, task: 2. Ctx: { TraceId : 01jd7x9h01a3q98jfrzx9qm3tf. SessionId : ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439790299425664284:2454], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T17:46:04.185008Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA0ZTE2ZDUtOWZhOWUzNjItYThiNzg0YTgtY2ZmMjM2MjU=, ActorId: [3:7439790295130696690:2454], ActorState: ExecuteState, TraceId: 01jd7x9h01a3q98jfrzx9qm3tf, Create QueryResponse for error on request, msg: 2024-11-21T17:46:08.397968Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439790295130694339:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:08.399717Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleQueryOlapStats [GOOD] Test command err: Trying to start YDB, gRPC: 27082, MsgBus: 26970 2024-11-21T17:46:14.096090Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790345217283806:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:14.096256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e4c/r3tmp/tmpqak7Yd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27082, node 1 2024-11-21T17:46:14.150711Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:14.154318Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:14.154332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:14.154333Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:14.154366Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26970 TClient is connected to server localhost:26970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:14.196790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:14.196813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:14.197898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:14.226266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:14.238074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:14.247727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790345217284457:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.247785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790345217284457:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.247816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790345217284457:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.247841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790345217284457:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.247865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790345217284457:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.247886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790345217284457:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.247903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790345217284457:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.247930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790345217284457:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.247955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790345217284457:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:14.247978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790345217284457:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.247999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790345217284457:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:14.248018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790345217284457:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:14.251151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345217284455:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.251177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345217284455:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.251209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345217284455:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.251227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345217284455:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.251249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345217284455:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.251270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345217284455:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.251292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345217284455:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.251320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345217284455:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.251346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345217284455:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:14.251368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345217284455:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.251390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345217284455:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:14.251407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345217284455:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:14.253572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345217284456:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.253590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345217284456:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.253606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345217284456:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.253615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345217284456:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.253624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345217284456:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.253632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345217284456:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.253639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345217284456:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.253653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345217284456:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.253673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74397903452172844 ... s":1748,"TaskId":58}],"CpuTimeUs":90,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174598,"ComputeTimeUs":8,"NodeId":1,"OutputChannels":[{"ChannelId":35,"DstStageId":1}],"WaitInputTimeUs":1855,"TaskId":35}],"CpuTimeUs":133,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":22,"NodeId":1,"OutputChannels":[{"ChannelId":49,"DstStageId":1}],"WaitInputTimeUs":1488,"TaskId":49}],"CpuTimeUs":118,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":7,"NodeId":1,"OutputChannels":[{"ChannelId":52,"DstStageId":1}],"WaitInputTimeUs":1594,"TaskId":52}],"CpuTimeUs":74,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":14,"NodeId":1,"OutputChannels":[{"ChannelId":59,"DstStageId":1}],"WaitInputTimeUs":1723,"TaskId":59}],"CpuTimeUs":105,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174598,"ComputeTimeUs":10,"NodeId":1,"OutputChannels":[{"ChannelId":42,"DstStageId":1}],"WaitInputTimeUs":2017,"TaskId":42}],"CpuTimeUs":96,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":14,"NodeId":1,"OutputChannels":[{"ChannelId":50,"DstStageId":1}],"WaitInputTimeUs":1440,"TaskId":50}],"CpuTimeUs":101,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":16,"NodeId":1,"OutputChannels":[{"ChannelId":53,"DstStageId":1}],"WaitInputTimeUs":2423,"TaskId":53}],"CpuTimeUs":80,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":7,"NodeId":1,"OutputChannels":[{"ChannelId":60,"DstStageId":1}],"WaitInputTimeUs":1703,"TaskId":60}],"CpuTimeUs":99,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174598,"ComputeTimeUs":8,"NodeId":1,"OutputChannels":[{"ChannelId":43,"DstStageId":1}],"WaitInputTimeUs":2005,"TaskId":43}],"CpuTimeUs":174,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":10,"NodeId":1,"OutputChannels":[{"ChannelId":51,"DstStageId":1}],"WaitInputTimeUs":1387,"TaskId":51}],"CpuTimeUs":73,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":18,"NodeId":1,"OutputChannels":[{"ChannelId":62,"DstStageId":1}],"WaitInputTimeUs":1738,"TaskId":62}],"CpuTimeUs":80,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":9,"NodeId":1,"OutputChannels":[{"ChannelId":61,"DstStageId":1}],"WaitInputTimeUs":1685,"TaskId":61}],"CpuTimeUs":83,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":8,"NodeId":1,"OutputChannels":[{"ChannelId":63,"DstStageId":1}],"WaitInputTimeUs":1713,"TaskId":63}],"CpuTimeUs":122,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":11,"NodeId":1,"OutputChannels":[{"ChannelId":64,"DstStageId":1}],"WaitInputTimeUs":1693,"TaskId":64}],"CpuTimeUs":69,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174598,"ComputeTimeUs":8,"NodeId":1,"OutputChannels":[{"ChannelId":55,"DstStageId":1}],"WaitInputTimeUs":1799,"TaskId":55}],"CpuTimeUs":85,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":13,"NodeId":1,"OutputChannels":[{"ChannelId":46,"DstStageId":1}],"WaitInputTimeUs":1924,"TaskId":46}],"CpuTimeUs":94,"DurationUs":4000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174603,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174598,"ComputeTimeUs":8,"NodeId":1,"OutputChannels":[{"ChannelId":56,"DstStageId":1}],"WaitInputTimeUs":1785,"TaskId":56}],"CpuTimeUs":85,"DurationUs":5000},{"PeakMemoryUsageBytes":131072,"Tasks":[{"FinishTimeMs":1732211174604,"Host":"ghrun-2rqap2spfm","StartTimeMs":1732211174599,"ComputeTimeUs":10,"NodeId":1,"OutputChannels":[{"ChannelId":47,"DstStageId":1}],"WaitInputTimeUs":1545,"TaskId":47}],"CpuTimeUs":84,"DurationUs":5000}],"UseLlvm":false,"DurationUs":{"Count":64,"Max":6000,"Sum":308000,"Min":4000},"Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"FirstMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6}},"Name":"3","Push":{"LastMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Chunks":{"Count":1,"Max":2,"Sum":2,"Min":2},"ResumeMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"FirstMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"PauseMessageMs":{"Count":1,"Max":5,"Sum":5,"Min":5},"WaitTimeUs":{"Count":64,"Max":2324,"Sum":110856,"Min":1076},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":6,"Min":5}}}],"MaxMemoryUsage":{"Count":64,"Max":1048576,"Sum":67108864,"Min":1048576},"Tasks":64,"OutputRows":{"Count":1,"Max":2,"Sum":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":8000,"BaseTimeMs":1732211174596,"NodesScanShards":[{"node_id":1,"shards_count":3}],"WaitInputTimeUs":{"Count":64,"Max":3508,"Sum":127244,"Min":1173},"CpuTimeUs":{"Count":64,"Max":198,"Sum":4312,"Min":39},"OutputBytes":{"Count":1,"Max":16,"Sum":16,"Min":16}}}],"Node Type":"Merge","SortColumns":["resource_id (Asc)","timestamp (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage","Stats":{"ComputeNodes":[{"PeakMemoryUsageBytes":65536,"Tasks":[{"InputBytes":16,"FinishTimeMs":1732211174604,"Host":"ghrun-2rqap2spfm","ResultRows":2,"ResultBytes":16,"OutputRows":2,"StartTimeMs":1732211174602,"InputRows":2,"InputChannels":[{"ChannelId":1,"WaitTimeUs":2842,"Rows":2,"Bytes":16,"SrcStageId":0},{"ChannelId":2,"SrcStageId":0},{"ChannelId":3,"SrcStageId":0},{"ChannelId":4,"SrcStageId":0},{"ChannelId":5,"SrcStageId":0},{"ChannelId":6,"SrcStageId":0},{"ChannelId":7,"SrcStageId":0},{"ChannelId":8,"SrcStageId":0},{"ChannelId":9,"SrcStageId":0},{"ChannelId":10,"SrcStageId":0},{"ChannelId":11,"SrcStageId":0},{"ChannelId":12,"SrcStageId":0},{"ChannelId":13,"SrcStageId":0},{"ChannelId":14,"SrcStageId":0},{"ChannelId":15,"SrcStageId":0},{"ChannelId":16,"SrcStageId":0},{"ChannelId":17,"SrcStageId":0},{"ChannelId":18,"SrcStageId":0},{"ChannelId":19,"SrcStageId":0},{"ChannelId":20,"SrcStageId":0},{"ChannelId":21,"SrcStageId":0},{"ChannelId":22,"SrcStageId":0},{"ChannelId":23,"SrcStageId":0},{"ChannelId":24,"SrcStageId":0},{"ChannelId":25,"SrcStageId":0},{"ChannelId":26,"SrcStageId":0},{"ChannelId":27,"SrcStageId":0},{"ChannelId":28,"SrcStageId":0},{"ChannelId":29,"SrcStageId":0},{"ChannelId":30,"SrcStageId":0},{"ChannelId":31,"SrcStageId":0},{"ChannelId":32,"SrcStageId":0},{"ChannelId":33,"SrcStageId":0},{"ChannelId":34,"SrcStageId":0},{"ChannelId":35,"SrcStageId":0},{"ChannelId":36,"SrcStageId":0},{"ChannelId":37,"SrcStageId":0},{"ChannelId":38,"SrcStageId":0},{"ChannelId":39,"SrcStageId":0},{"ChannelId":40,"SrcStageId":0},{"ChannelId":41,"SrcStageId":0},{"ChannelId":42,"SrcStageId":0},{"ChannelId":43,"SrcStageId":0},{"ChannelId":44,"SrcStageId":0},{"ChannelId":45,"SrcStageId":0},{"ChannelId":46,"SrcStageId":0},{"ChannelId":47,"SrcStageId":0},{"ChannelId":48,"SrcStageId":0},{"ChannelId":49,"SrcStageId":0},{"ChannelId":50,"SrcStageId":0},{"ChannelId":51,"SrcStageId":0},{"ChannelId":52,"SrcStageId":0},{"ChannelId":53,"SrcStageId":0},{"ChannelId":54,"SrcStageId":0},{"ChannelId":55,"SrcStageId":0},{"ChannelId":56,"SrcStageId":0},{"ChannelId":57,"SrcStageId":0},{"ChannelId":58,"SrcStageId":0},{"ChannelId":59,"SrcStageId":0},{"ChannelId":60,"SrcStageId":0},{"ChannelId":61,"SrcStageId":0},{"ChannelId":62,"SrcStageId":0},{"ChannelId":63,"SrcStageId":0},{"ChannelId":64,"SrcStageId":0}],"ComputeTimeUs":55,"NodeId":1,"OutputChannels":[{"ChannelId":65,"DstStageId":0,"Rows":2,"Bytes":16}],"WaitInputTimeUs":2822,"TaskId":65,"OutputBytes":16}],"CpuTimeUs":267,"DurationUs":2000}],"UseLlvm":false,"OutputRows":{"Count":1,"Max":2,"Sum":2,"Min":2},"PhysicalStageId":1,"InputBytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"DurationUs":{"Count":1,"Max":2000,"Sum":2000,"Min":2000},"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"Min":1048576},"BaseTimeMs":1732211174596,"Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"FirstMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Chunks":{"Count":1,"Max":2,"Sum":2,"Min":2},"ResumeMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"FirstMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"PauseMessageMs":{"Count":1,"Max":4,"Sum":4,"Min":4},"WaitTimeUs":{"Count":1,"Max":2867,"Sum":2867,"Min":2867},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":6,"Min":4}}}],"CpuTimeUs":{"Count":1,"Max":110,"Sum":110,"Min":110},"StageDurationUs":2000,"WaitInputTimeUs":{"Count":1,"Max":2822,"Sum":2822,"Min":2822},"ResultRows":{"Count":1,"Max":2,"Sum":2,"Min":2},"ResultBytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"OutputBytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"Input":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"FirstMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6}},"Name":"1","Push":{"LastMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Rows":{"Count":1,"Max":2,"Sum":2,"Min":2},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"FirstMessageMs":{"Count":1,"Max":6,"Sum":6,"Min":6},"Bytes":{"Count":1,"Max":16,"Sum":16,"Min":16},"PauseMessageMs":{"Count":1,"Max":4,"Sum":4,"Min":4},"WaitTimeUs":{"Count":1,"Max":2842,"Sum":2842,"Min":2842},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":6,"Min":4}}}],"Tasks":1,"InputRows":{"Count":1,"Max":2,"Sum":2,"Min":2}}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"TotalDurationUs":31018,"ProcessCpuTimeUs":92,"Compilation":{"FromCache":false,"CpuTimeUs":16427,"DurationUs":17078}}},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"A-Rows":2,"A-Cpu":0.198,"SortBy":"","Name":"Sort"}],"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":["resource_id","timestamp"],"E-Cost":"No estimate","SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":2},{"Id":1}]}}]}}],"Node Type":"TableFullScan"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::JsonDoc_Exists [GOOD] Test command err: Trying to start YDB, gRPC: 32726, MsgBus: 19598 2024-11-21T17:46:14.111465Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790342810830130:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:14.111484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e51/r3tmp/tmpTX2SeF/pdisk_1.dat 2024-11-21T17:46:14.156764Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32726, node 1 2024-11-21T17:46:14.169615Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:14.169628Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:14.169629Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:14.169656Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19598 TClient is connected to server localhost:19598 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:14.212308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:14.212336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:14.213339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:14.218339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:14.227245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:14.233874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342810830764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.233926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342810830764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.233956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342810830764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.233978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342810830764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.233989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342810830764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.233999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342810830764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.234008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342810830764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.234021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342810830764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.234037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342810830764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:14.234053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342810830764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.234067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342810830764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:14.234081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342810830764:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:14.234417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:14.234429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:14.234436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:14.234444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:14.234454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:14.234460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:14.234465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:14.234472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:14.234481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:14.234487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:14.234491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:14.234498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:14.234534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:14.234546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:14.234560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:14.234563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.234573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:14.234577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:14.234589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:14.234599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:14.234608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:14.234617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:14.236613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342810830767:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.236631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342810830767:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.236653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342810830767:2290];tablet_id=7207518622 ... d;id=CleanGranuleId; 2024-11-21T17:46:14.241966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:14.241969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:14.241975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:14.241978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:14.241983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:14.241987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:14.242009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:14.242013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:14.242027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:14.242031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.242041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:14.242044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:14.242058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:14.242062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:14.242072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:14.242074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsondoc, "$.col1") AND id = 6; 2024-11-21T17:46:14.370552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790342810831059:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.370571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790342810831070:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.370575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.371228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:14.372724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790342810831073:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:14.519285Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211174428, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsondoc, "$.col1") AND id = 6; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Filter":{"Predicate":{"Id":9}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":6}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Filter":{"Predicate":{"Id":9}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1256) '('"_id" '"45f24c99-97ab611d-fa625278-de06b87d") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $18 (Int32 '"6")) (let $19 (Just $18)) (let $20 (Int32 '1)) (let $21 '($19 $20)) (let $22 (If (== $18 (Int32 '2147483647)) $21 '((+ $19 $20) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($21 $22)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (OptionalType (DataType 'Bool))) (let $11 (DqPhyStage '() (lambda '() (block '( (let $23 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $24 '('"id" '"jsondoc" '"jsonval")) (let $25 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $26 (OptionalType (DataType 'JsonDocument))) (let $27 (DataType 'Json)) (let $28 '($10)) (let $29 '((ResourceType '"JsonPath"))) (let $30 (DataType 'Utf8)) (let $31 (ResourceType '"JsonNode")) (let $32 (DictType $30 $31)) (let $33 '($32)) (let $34 (CallableType '() $28 '($26) $29 $33 $28)) (let $35 '('('"strict"))) (let $36 (Udf '"Json2.JsonDocumentSqlExists" (Void) (VoidType) '"" $34 (VoidType) '"" $35)) (let $37 (CallableType '() $29 '($30))) (let $38 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $37 (VoidType) '"" '())) (let $39 (Apply $38 (Utf8 '"$.col1"))) (let $40 (Dict $32)) (let $41 (Bool 'false)) (let $42 (Just $41)) (let $43 (KqpWideReadOlapTableRanges $23 %kqp%tx_result_binding_0_0 $24 '() $25 (lambda '($44) (block '( (let $45 (StructType $9 '('"jsondoc" $26) '('"jsonval" (OptionalType $27)))) (let $46 (KqpOlapApply $45 '('"jsondoc") (lambda '($48) (Apply $36 $48 $39 $40 $42)))) (let $47 '('?? $46 $41)) (return (KqpOlapFilter $44 $47)) ))))) (return (FromFlow (NarrowMap $43 (lambda '($49 $50 $51) (block '( (let $52 (OptionalType $31)) (let $53 (CallableType '() $28 '($52) $29 $33 $28)) (let $54 (Udf '"Json2.SqlExists" (Void) (VoidType) '"" $53 (VoidType) '"" $35)) (let $55 (IfPresent $51 (lambda '($58) (block '( (let $59 '($27 '"" '1)) (let $60 (CallableType '() '($31) $59)) (let $61 (Udf '"Json2.Parse" (Void) (VoidType) '"" $60 (VoidType) '"" '())) (return (Just (Apply $61 $58))) ))) (Nothing $52))) (let $56 (Apply $54 $55 $39 $40 $42)) (let $57 (Apply $36 $50 $39 $40 $42)) (return (AsStruct '('"column1" $56) '('"column2" $57) '('"id" $49))) )))))) ))) '('('"_logical_id" '1326) '('"_id" '"5ff49e75-e473f5b3-96e09a2b-3855bd1d")))) (let $12 (DqCnUnionAll (TDqOutput $11 '0))) (let $13 (DqPhyStage '($12) (lambda '($62) $62) '('('"_logical_id" '2016) '('"_id" '"5b62d182-c4f6bebb-1a57053e-d4bec74e")))) (let $14 '('"id" '"column1" '"column2")) (let $15 (DqCnResult (TDqOutput $13 '0) $14)) (let $16 (KqpTxResultBinding $8 '0 '0)) (let $17 (KqpPhysicalTx '($11 $13) '($15) '('($5 $16)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $17) '((KqpTxResultBinding (ListType (StructType '('"column1" $10) '('"column2" $10) $9)) '1 '0)) '('('"type" '"scan_query")))) ) >> test.py::test[blocks-decimal_avg--Debug] [GOOD] >> test.py::test[blocks-decimal_avg--Plan] [GOOD] >> test.py::test[blocks-decimal_avg--Results] >> KqpOlap::BlockChannelForce [GOOD] >> KqpDatetime64ColumnShard::UseTimestamp64AsPrimaryKey >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes |75.6%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::AddRowsTableInTableStore [GOOD] Test command err: Trying to start YDB, gRPC: 24000, MsgBus: 22170 2024-11-21T17:46:13.380134Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790338349911909:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:13.380155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e57/r3tmp/tmpd2mDbv/pdisk_1.dat 2024-11-21T17:46:13.424725Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24000, node 1 2024-11-21T17:46:13.438887Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:13.438902Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:13.438903Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:13.438936Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22170 TClient is connected to server localhost:22170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:13.481168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:13.481201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:13.482303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:13.483486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLESTORE `/Root/TableStoreTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:46:13.671175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790338349912513:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.671214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.673903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:13.680565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790338349912581:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.680594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790338349912581:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.680624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790338349912581:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:13.680636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790338349912581:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:13.680649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790338349912581:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:13.680660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790338349912581:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:13.680680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790338349912581:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:13.680702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790338349912581:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:13.680735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790338349912581:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:13.680753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790338349912581:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.680782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790338349912581:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:13.680807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790338349912581:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:13.681173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:13.681186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:13.681195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:13.681198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:13.681207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:13.681210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:13.681215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:13.681219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:13.681225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:13.681233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:13.681238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:13.681240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:13.681295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:13.681313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:13.681323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:13.681333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.681339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:13.681341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:13.681351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:13.681359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:13.681365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:13.681368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; CREATE TABL ... 2TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=24288;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=24288;columns=3; >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute |75.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[pg-select_win_max-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::BlockChannelForce [GOOD] Test command err: Trying to start YDB, gRPC: 64404, MsgBus: 23069 2024-11-21T17:46:14.191864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790345022187354:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:14.192013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e49/r3tmp/tmpQmvKJx/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64404, node 1 2024-11-21T17:46:14.247676Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:14.249910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:14.249924Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:14.249926Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:14.249960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23069 TClient is connected to server localhost:23069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:14.293257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:14.293301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:14.294354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:14.317310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:14.323709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:14.385433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:14.403852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:14.412635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:14.450144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790345022188908:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.450174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.477755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:14.485298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:14.492054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:14.499976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:14.507186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:14.521870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:14.529150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790345022189399:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.529176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.529202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790345022189404:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.529876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:14.533243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790345022189406:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:14.701628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:46:14.712688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790345022189770:2461];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.712688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439790345022189769:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.712716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790345022189770:2461];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.712739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439790345022189769:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.712797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790345022189770:2461];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.712800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439790345022189769:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.712825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439790345022189769:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.712827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790345022189770:2461];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.712848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439790345022189769:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.712848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790345022189770:2461];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.712861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790345022189770:2461];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.712863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439790345022189769:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.712877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439790345022189769:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.712879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790345022189770:2461];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.712903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790345022189770:2461];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.712906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7439790345022189769:2460];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.712924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790345022189770:2461];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-1 ... NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:14.717082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.717089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:14.717096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:14.717107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:14.717114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:14.717121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:14.717128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:14.717230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:14.717240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:14.717248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:14.717259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:14.717268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:14.717276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:14.717280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:14.717283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:14.717301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:14.717309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:14.717313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:14.717315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:14.717334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:14.717342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:14.717350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:14.717358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.717364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:14.717372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:14.717381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:14.717387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:14.717394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:14.717400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:14.717512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:14.717527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:14.717532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:14.717534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:14.717544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:14.717546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:14.717551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:14.717553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:14.717558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:14.717560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:14.717564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:14.717566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:14.717583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:14.717591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:14.717600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:14.717608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.717615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:14.717622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:14.717630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:14.717632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:14.717639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:14.717641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:14.734306Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790345022190000:2535] TxId: 281474976715672. Ctx: { TraceId: 01jd7x9va6cyhf8382sh4vk4tj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzgyMTc1NzgtYzJkMDVkMzMtZGYxYzljYjEtM2FkOTdlYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T17:46:14.830474Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790345022190101:2555] TxId: 281474976715674. Ctx: { TraceId: 01jd7x9vbx8cjn6j8j8yvkq1zj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzc0NzJkOWMtYjQyYWJlMDEtYzViZDhlZTUtNTkyNmExNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit >> KqpOlap::BulkUpsertUpdate |75.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[aggregate-ensure_count-default.txt-Results] [GOOD] >> test.py::test[window-generic/aggregations_after_current--Results] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Debug] >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> KqpOlap::PredicatePushdownParameterTypesValidation [GOOD] >> KqpOlap::OlapRead_UsesGenericQueryOnJoinWithDataShardTable >> KqpOlapAggregations::Blocks_NoAggPushdown |75.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |75.7%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpDatetime64ColumnShard::UseTimestamp64AsPrimaryKey [GOOD] >> KqpDecimalColumnShard::TestAggregation >> KqpOlapAggregations::Aggregation_Count_Null >> test.py::test[blocks-decimal_avg--Results] [GOOD] >> KqpOlap::PredicatePushdownCastErrors >> test.py::test[blocks-decimal_unary--Debug] >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy >> KqpOlap::PushdownFilter [GOOD] >> KqpOlapAggregations::Aggregation_Avg_NullMixGroupBy [GOOD] >> KqpOlapBlobsSharing::TableReshardingModuloN >> KqpOlap::ScanQueryOltpAndOlap >> KqpOlap::OlapRead_UsesGenericQueryOnJoinWithDataShardTable [GOOD] >> KqpOlapAggregations::Aggregation_Count_Null [GOOD] |75.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownParameterTypesValidation [GOOD] Test command err: Trying to start YDB, gRPC: 26649, MsgBus: 4569 2024-11-21T17:46:14.456495Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790344587677413:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:14.456513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e42/r3tmp/tmpRcxw4I/pdisk_1.dat 2024-11-21T17:46:14.517161Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26649, node 1 2024-11-21T17:46:14.526775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:14.526788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:14.526789Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:14.526822Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4569 2024-11-21T17:46:14.558306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:14.558329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:14.559395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:14.573079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:14.591167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:14.598243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344587678048:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.598308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344587678048:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.598346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344587678048:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.598362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344587678048:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.598376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344587678048:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.598389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344587678048:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.598403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344587678048:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.598421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344587678048:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.598437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344587678048:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:14.598453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344587678048:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.598469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344587678048:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:14.598484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344587678048:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:14.598791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:14.598803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:14.598815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:14.598825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:14.598839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:14.598842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:14.598852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:14.598859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:14.598868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:14.598877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:14.598883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:14.598887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:14.598944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:14.598954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:14.598971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:14.598979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.598988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:14.598996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:14.599012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:14.599022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:14.599032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:14.599035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:14.647955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211174624 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "olapStore" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732211174694 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 720... (TRUNCATED) TClient::Ls request: /Root/olapStore TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "olapStore" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732211174694 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } } Children { Name: "OlapParametersTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732211174701 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 Schemesha... (TRUNCATED) TClient::Ls request: /Root/olapStore/OlapParametersTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "OlapParametersTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732211174701 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 I... (TRUNCATED) 2024-11-21T17:46:14.721186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790344587678265:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.721221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790344587678276:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.721227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.721773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:14.723087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790344587678279:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T17:46:14.861158Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211174778, txId: 18446744073709551615] shutting down 2024-11-21T17:46:14.905650Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211174890, txId: 18446744073709551615] shutting down 2024-11-21T17:46:14.954971Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211174946, txId: 18446744073709551615] shutting down 2024-11-21T17:46:14.995694Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211175000, txId: 18446744073709551615] shutting down 2024-11-21T17:46:15.043048Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211175037, txId: 18446744073709551615] shutting down 2024-11-21T17:46:15.096418Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211175079, txId: 18446744073709551615] shutting down 2024-11-21T17:46:15.159678Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211175128, txId: 18446744073709551615] shutting down 2024-11-21T17:46:15.231165Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211175191, txId: 18446744073709551615] shutting down 2024-11-21T17:46:15.282878Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211175268, txId: 18446744073709551615] shutting down 2024-11-21T17:46:15.336729Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211175324, txId: 18446744073709551615] shutting down 2024-11-21T17:46:15.386739Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211175373, txId: 18446744073709551615] shutting down 2024-11-21T17:46:15.442205Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211175422, txId: 18446744073709551615] shutting down 2024-11-21T17:46:15.497553Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211175478, txId: 18446744073709551615] shutting down 2024-11-21T17:46:15.550683Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211175534, txId: 18446744073709551615] shutting down 2024-11-21T17:46:15.609324Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211175590, txId: 18446744073709551615] shutting down |75.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |75.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_UsesGenericQueryOnJoinWithDataShardTable [GOOD] Test command err: Trying to start YDB, gRPC: 3101, MsgBus: 61455 2024-11-21T17:46:16.038879Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790354358501626:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:16.039082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e32/r3tmp/tmpq7XJZB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3101, node 1 2024-11-21T17:46:16.109952Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:16.110172Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:16.110185Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:16.110186Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:16.110213Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61455 TClient is connected to server localhost:61455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:16.139144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:16.139176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:16.140318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:16.145709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:16.157807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:16.166457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354358502259:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.166519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354358502259:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.166569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354358502259:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:16.166594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354358502259:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:16.166616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354358502259:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:16.166640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354358502259:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:16.166662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354358502259:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:16.166686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354358502259:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:16.166710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354358502259:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:16.166732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354358502259:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.166756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354358502259:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:16.166781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354358502259:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:16.167249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.167262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.167281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.167289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.167307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.167322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.167337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.167350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.167364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.167372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.167383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.167391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.167445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.167455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.167471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.167479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.167496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.167506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.167522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.167525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.167537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.167544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:16.170742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790354358502260:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.170766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790354358502260:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.170806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790354358502260:2289];tablet_id=720751862240 ... escription=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.240776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:16.240934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.240943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.240953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.240961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.240983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.240991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.240999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.241008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.241021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.241030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.241040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.241048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.241080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.241090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.241109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.241118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.241133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.241142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.241162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.241170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.241184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.241191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:16.241249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.241258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.241266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.241274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.241305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.241313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.241321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.241329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.241340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.241348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.241353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.241361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.241391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.241400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.241417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.241425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.241440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.241449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.241468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.241476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.241486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.241493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:16.276674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T17:46:16.310639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790354358502791:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.310711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790354358502783:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.310723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.311390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:16.312794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790354358502797:2414], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_NullMixGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 13407, MsgBus: 23751 2024-11-21T17:46:13.752028Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790339203613328:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:13.752298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e52/r3tmp/tmpmSBoNq/pdisk_1.dat 2024-11-21T17:46:13.793780Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13407, node 1 2024-11-21T17:46:13.805300Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:13.805314Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:13.805315Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:13.805351Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23751 TClient is connected to server localhost:23751 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:13.846161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:13.852339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:13.852366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:13.853426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:13.857110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:13.864541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339203613954:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.864584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339203613954:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.864605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339203613954:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:13.864621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339203613954:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:13.864637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339203613954:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:13.864651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339203613954:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:13.864665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339203613954:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:13.864679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339203613954:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:13.864692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339203613954:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:13.864706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339203613954:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.864720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339203613954:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:13.864735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790339203613954:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:13.866560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339203613961:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.866579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339203613961:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.866595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339203613961:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:13.866608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339203613961:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:13.866621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339203613961:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:13.866629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339203613961:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:13.866643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339203613961:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:13.866655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339203613961:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:13.866669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339203613961:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:13.866681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339203613961:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.866693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339203613961:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:13.866705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790339203613961:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:13.868444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790339203613955:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.868460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790339203613955:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.868477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790339203613955:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:13.868493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790339203613955:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:13.868505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790339203613955:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:13.868517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790339203613955:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:13.868530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790339203613955:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:13.868543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790339203613955:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:13.868555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74397903392036139 ... 9;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:13.871319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:13.871321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:13.871325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:13.871326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:13.871330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:13.871333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:13.871336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:13.871338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:13.871349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:13.871355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:13.871361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:13.871363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.871369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:13.871374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:13.871381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:13.871387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:13.871392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:13.871397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; 2024-11-21T17:46:14.006283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790343498581545:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.006300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790343498581555:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.006304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.006776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:14.007922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790343498581559:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:16.474109Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211174064, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id > 4 AND id < 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (4, 7)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (4, 7)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1732) '('"_id" '"470f063-f0362ec1-e485f11b-7d42707b") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $26 (Int32 '0)) (let $27 '((Nothing $2) $26)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $26) $27))) (RangeCreate (AsList '($27 '((Just (Int32 '"7")) $26)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (OptionalType (TupleType $10 (DataType 'Uint64)))) (let $12 '('"id" $1)) (let $13 '('('"_logical_id" '1791) '('"_id" '"d4c6b760-fcf81fab-f64125ae-204f7e70") '('"_wide_channels" (StructType '('_yql_agg_0 $11) $12)))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('"id")) (let $30 '('('"UsedKeyColumns" $29) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"level")) (let $35 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $33 '($34 $35) $29)) ))))) (let $32 (lambda '($36 $37 $38) (block '( (let $39 (IfPresent $37 (lambda '($40) (Just '((Convert $40 'Double) $36))) (Nothing $11))) (return $39 $38) )))) (return (FromFlow (WideMap $31 $32))) ))) $13)) (let $15 (DqCnHashShuffle (TDqOutput $14 '0) '('1))) (let $16 (OptionalType $10)) (let $17 (StructType '('"column1" $16) $12)) (let $18 '('('"_logical_id" '3379) '('"_id" '"2892188b-8fada0bf-4579ba13-587aecca") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($15) (lambda '($41) (block '( (let $42 (lambda '($55 $56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (Div (Nth $58 '0) (Nth $58 '1)))) (Nothing $16))) (return $57 $55) )))) (let $43 (WideCombiner (ToFlow $41) '"" (lambda '($44 $45) $45) (lambda '($46 $47 $48) $47) (lambda '($49 $50 $51 $52) (IfPresent $50 (lambda '($53) (IfPresent $52 (lambda '($54) (Just '((AggrAdd (Nth $53 '0) (Nth $54 '0)) (AggrAdd (Nth $53 '1) (Nth $54 '1))))) $50)) $52)) $42)) (return (FromFlow (WideSort $43 '('('1 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('1 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($59) (FromFlow (NarrowMap (ToFlow $59) (lambda '($60 $61) (AsStruct '('"column1" $60) '('"id" $61)))))) '('('"_logical_id" '3391) '('"_id" '"9798bfd3-902e196e-e9197d2e-f963035b")))) (let $22 '($14 $19 $21)) (let $23 (DqCnResult (TDqOutput $21 '0) '('"id" '"column1"))) (let $24 (KqpTxResultBinding $9 '0 '0)) (let $25 (KqpPhysicalTx $22 '($23) '('($7 $24)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $25) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PushdownFilter [GOOD] Test command err: Trying to start YDB, gRPC: 63856, MsgBus: 32652 2024-11-21T17:46:14.998549Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790345393877025:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:14.998985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e39/r3tmp/tmpVZMoJK/pdisk_1.dat 2024-11-21T17:46:15.044249Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63856, node 1 2024-11-21T17:46:15.057376Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:15.057389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:15.057390Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:15.057415Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32652 TClient is connected to server localhost:32652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:15.098650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:15.099518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:15.099536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:46:15.100646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:15.111115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:15.117660Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:46:15.119665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:46:15.119704Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037891 2024-11-21T17:46:15.120166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:15.120210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:15.120268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:15.120297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:15.120317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:15.120334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:15.120357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:15.120375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:15.120402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:15.120423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:15.120446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:15.120467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790349688844970:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:15.121130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:46:15.123270Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:46:15.123306Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T17:46:15.123819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:15.123833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:15.123862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:15.123875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:15.123886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:15.123896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:15.123906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:15.123916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:15.123927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:15.123938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:15.123949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:15.123958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790349688844969:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:15.124319Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2024-11-21T17:46:15.124335Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:46:15.124338Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:46:15.124349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:46:15.124388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:15.124398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:15.124400Z node 1 :TX_COLUMNSHARD NOTICE: tab ... s } 2024-11-21T17:46:16.391693Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790353683188519:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.391697Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.392314Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:16.393768Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790353683188522:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:46:16.507892Z node 3 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715663 scanId: 3 version: {1732211176444:max} readable: {1732211176549:max} at tablet 72075186224037889 2024-11-21T17:46:16.507902Z node 3 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715663 scanId: 3 version: {1732211176444:max} readable: {1732211176549:max} at tablet 72075186224037890 2024-11-21T17:46:16.507935Z node 3 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715663 scanId: 3 at tablet 72075186224037889 2024-11-21T17:46:16.507944Z node 3 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715663 scanId: 3 at tablet 72075186224037890 2024-11-21T17:46:16.508029Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[3:7439790353683188234:2289];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1732211176444:max};tablet=72075186224037889;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Id: 6 } Constant { Text: "5" } } } Command { Assign { Column { Id: 7 } Function { Arguments { Id: 2 } Arguments { Id: 6 } FunctionType: YQL_KERNEL KernelIdx: 0 YqlOperationId: 11 } } } Command { Assign { Column { Id: 8 } Constant { Uint8: 0 } } } Command { Assign { Column { Id: 9 } Function { Arguments { Id: 7 } Arguments { Id: 8 } FunctionType: YQL_KERNEL KernelIdx: 1 YqlOperationId: 17 } } } Command { Filter { Predicate { Id: 9 } } } Command { Projection { Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 2 } Columns { Id: 1 } Columns { Id: 3 } } } Version: 5 Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\010\207\203\001H\203\001H\207\203\014\203\014\213\004?\010?\n\001\235?\014\001\235?\016\001\006\000\t\211\010?\024\235?\002\001\235?\004\000\235?\010\001\235?\n\000\006\000\t\251\000?\032\002\000\t\251\000?\034\002\000\t\251\000?\036\002\000\t\251\000? \002\000\000\t\211\004?\026?\036\235?\n\001\006\000\t\211\006?\036\203\005@?\032?\034$BlockFunc\000\003?:\014Equals?&?*\001\t\211\004?6?\036? 4BlockCoalesce\000?.?2\000\000\000/" ; 2024-11-21T17:46:16.508046Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[3:7439790353683188267:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1732211176444:max};tablet=72075186224037890;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Id: 6 } Constant { Text: "5" } } } Command { Assign { Column { Id: 7 } Function { Arguments { Id: 2 } Arguments { Id: 6 } FunctionType: YQL_KERNEL KernelIdx: 0 YqlOperationId: 11 } } } Command { Assign { Column { Id: 8 } Constant { Uint8: 0 } } } Command { Assign { Column { Id: 9 } Function { Arguments { Id: 7 } Arguments { Id: 8 } FunctionType: YQL_KERNEL KernelIdx: 1 YqlOperationId: 17 } } } Command { Filter { Predicate { Id: 9 } } } Command { Projection { Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 2 } Columns { Id: 1 } Columns { Id: 3 } } } Version: 5 Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\010\207\203\001H\203\001H\207\203\014\203\014\213\004?\010?\n\001\235?\014\001\235?\016\001\006\000\t\211\010?\024\235?\002\001\235?\004\000\235?\010\001\235?\n\000\006\000\t\251\000?\032\002\000\t\251\000?\034\002\000\t\251\000?\036\002\000\t\251\000? \002\000\000\t\211\004?\026?\036\235?\n\001\006\000\t\211\006?\036\203\005@?\032?\034$BlockFunc\000\003?:\014Equals?&?*\001\t\211\004?6?\036? 4BlockCoalesce\000?.?2\000\000\000/" ; 2024-11-21T17:46:16.511925Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[3:7439790353683188234:2289];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1732211176444:max};tablet=72075186224037889;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=5;column=G:6;};{yql_op=Equals;arguments=[resource_id;G:6;];kernel=local_function;column=G:7;};{op=Constant;const=0;column=G:8;};{yql_op=Coalesce;arguments=[G:7;G:8;];kernel=local_function;column=G:9;};];filters=[G:9;];projections=[level;message;resource_id;timestamp;uid;];};]; 2024-11-21T17:46:16.512026Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[3:7439790353683188267:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1732211176444:max};tablet=72075186224037890;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=5;column=G:6;};{yql_op=Equals;arguments=[resource_id;G:6;];kernel=local_function;column=G:7;};{op=Constant;const=0;column=G:8;};{yql_op=Coalesce;arguments=[G:7;G:8;];kernel=local_function;column=G:9;};];filters=[G:9;];projections=[level;message;resource_id;timestamp;uid;];};]; 2024-11-21T17:46:16.512169Z node 3 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715663 scanId: 3 version: {1732211176444:max} readable: {1732211176549:max} at tablet 72075186224037888 2024-11-21T17:46:16.512192Z node 3 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715663 scanId: 3 at tablet 72075186224037888 2024-11-21T17:46:16.512302Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:7439790353683188233:2288];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1732211176444:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Id: 6 } Constant { Text: "5" } } } Command { Assign { Column { Id: 7 } Function { Arguments { Id: 2 } Arguments { Id: 6 } FunctionType: YQL_KERNEL KernelIdx: 0 YqlOperationId: 11 } } } Command { Assign { Column { Id: 8 } Constant { Uint8: 0 } } } Command { Assign { Column { Id: 9 } Function { Arguments { Id: 7 } Arguments { Id: 8 } FunctionType: YQL_KERNEL KernelIdx: 1 YqlOperationId: 17 } } } Command { Filter { Predicate { Id: 9 } } } Command { Projection { Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 2 } Columns { Id: 1 } Columns { Id: 3 } } } Version: 5 Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\010\207\203\001H\203\001H\207\203\014\203\014\213\004?\010?\n\001\235?\014\001\235?\016\001\006\000\t\211\010?\024\235?\002\001\235?\004\000\235?\010\001\235?\n\000\006\000\t\251\000?\032\002\000\t\251\000?\034\002\000\t\251\000?\036\002\000\t\251\000? \002\000\000\t\211\004?\026?\036\235?\n\001\006\000\t\211\006?\036\203\005@?\032?\034$BlockFunc\000\003?:\014Equals?&?*\001\t\211\004?6?\036? 4BlockCoalesce\000?.?2\000\000\000/" ; 2024-11-21T17:46:16.512389Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:7439790353683188233:2288];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=3;gen=1;table=/Root/olapStore/olapTable;snapshot={1732211176444:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=5;column=G:6;};{yql_op=Equals;arguments=[resource_id;G:6;];kernel=local_function;column=G:7;};{op=Constant;const=0;column=G:8;};{yql_op=Coalesce;arguments=[G:7;G:8;];kernel=local_function;column=G:9;};];filters=[G:9;];projections=[level;message;resource_id;timestamp;uid;];};]; 2024-11-21T17:46:16.512452Z node 3 :TX_COLUMNSHARD INFO: self_id=[3:7439790353683188303:2303];tablet_id=72075186224037889;parent=[3:7439790353683188234:2289];fline=manager.h:99;event=ask_data;request=request_id=7;3={portions_count=1};; 2024-11-21T17:46:16.512513Z node 3 :TX_COLUMNSHARD INFO: self_id=[3:7439790353683188289:2297];tablet_id=72075186224037890;parent=[3:7439790353683188267:2291];fline=manager.h:99;event=ask_data;request=request_id=8;3={portions_count=1};; 2024-11-21T17:46:16.512585Z node 3 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T17:46:16.512621Z node 3 :TX_COLUMNSHARD INFO: self_id=[3:7439790353683188296:2300];tablet_id=72075186224037888;parent=[3:7439790353683188233:2288];fline=manager.h:99;event=ask_data;request=request_id=9;3={portions_count=1};; 2024-11-21T17:46:16.512626Z node 3 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T17:46:16.512665Z node 3 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T17:46:16.512674Z node 3 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T17:46:16.512844Z node 3 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T17:46:16.512867Z node 3 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T17:46:16.512886Z node 3 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T17:46:16.512899Z node 3 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037889 2024-11-21T17:46:16.512954Z node 3 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T17:46:16.513064Z node 3 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2024-11-21T17:46:16.513213Z node 3 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037890 2024-11-21T17:46:16.515163Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211176444, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Count_Null [GOOD] Test command err: Trying to start YDB, gRPC: 23719, MsgBus: 19635 2024-11-21T17:46:16.188673Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790351437229783:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:16.188864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e29/r3tmp/tmpaZkmTw/pdisk_1.dat 2024-11-21T17:46:16.233536Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23719, node 1 2024-11-21T17:46:16.247783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:16.247793Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:16.247794Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:16.247819Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19635 TClient is connected to server localhost:19635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:16.288928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:16.288954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:16.290062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:16.294127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:16.303238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:16.311692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351437230414:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.311781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351437230414:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.311842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351437230414:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:16.311865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351437230414:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:16.311884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351437230414:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:16.311903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351437230414:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:16.311922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351437230414:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:16.311951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351437230414:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:16.311973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351437230414:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:16.311996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351437230414:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.312015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351437230414:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:16.312061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351437230414:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:16.312594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.312610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.312624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.312630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.312664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.312668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.312680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.312690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.312699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.312708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.312714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.312719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.312777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.312788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.312803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.312812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.312824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.312832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.312850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.312860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.312877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.312885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:16.316166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790351437230415:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.316196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790351437230415:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.316254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790351437230415:2289];tablet_id=7207518622 ... 4-11-21T17:46:16.327499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.327510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.327514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.327535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.327544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.327556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.327568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.327578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.327588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.327595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.327603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.327644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.327656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.327672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.327681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.327695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.327704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.327726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.327735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.327751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.327758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 5; 2024-11-21T17:46:16.444320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790351437230722:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.444344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790351437230711:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.444365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.445022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:16.446567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790351437230725:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:16.646150Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211176500, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(level) FROM `/Root/tableWithNulls` WHERE id > 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["level"],"scan_by":["id (5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (5, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":7}}]}},{"Projection":{"Columns":[{"Id":7}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (declare %kqp%tx_result_binding_1_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '704) '('"_id" '"a5d01541-20578c5d-6697825e-f381021a") $3)) (let $5 (DqPhyStage '() (lambda '() (block '( (let $28 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) $28) '((Nothing $2) $28))))))))))) ))) $4)) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 (KqpPhysicalTx '($5) '($6) '() '('('"type" '"compute")))) (let $8 '"%kqp%tx_result_binding_0_0") (let $9 (TupleType $2 $1)) (let $10 (TupleType (ListType (TupleType $9 $9)))) (let $11 (DataType 'Uint64)) (let $12 '('('"_logical_id" '762) '('"_id" '"e2b4d69b-cf9ba7cb-10c54db-d8f3faab") '('"_wide_channels" (StructType '('_yql_agg_0 $11))))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $29 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $30 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $29 %kqp%tx_result_binding_0_0 '('"level") '() $30 (lambda '($32) (TKqpOlapAgg $32 '('('_yql_agg_0 'count '"level")) '())))) (return (FromFlow $31)) ))) $12)) (let $14 (DqCnUnionAll (TDqOutput $13 '0))) (let $15 (DqPhyStage '($14) (lambda '($33) (block '( (let $34 (Bool 'false)) (let $35 (WideCondense1 (ToFlow $33) (lambda '($37) $37) (lambda '($38 $39) $34) (lambda '($40 $41) (AggrAdd $40 $41)))) (let $36 (Condense (NarrowMap (Take $35 (Uint64 '1)) (lambda '($42) (AsStruct '('Count0 $42)))) (Nothing (OptionalType (StructType '('Count0 $11)))) (lambda '($43 $44) $34) (lambda '($45 $46) (Just $45)))) (return (FromFlow (Map $36 (lambda '($47) (AsList (AsStruct '('"column0" (Coalesce (Member $47 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '1317) '('"_id" '"c2c9e708-299b31b4-e2a88506-bf9920f")))) (let $16 (DqCnValue (TDqOutput $15 '0))) (let $17 (KqpTxResultBinding $10 '0 '0)) (let $18 '('('"type" '"scan"))) (let $19 (KqpPhysicalTx '($13 $15) '($16) '('($8 $17)) $18)) (let $20 '"%kqp%tx_result_binding_1_0") (let $21 (ListType (StructType '('"column0" $11)))) (let $22 '('('"_logical_id" '1420) '('"_id" '"7aa328fc-f8e09918-6f07b62c-55ab954c") $3)) (let $23 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_1_0)) $22)) (let $24 (DqCnResult (TDqOutput $23 '0) '('"column0"))) (let $25 (KqpTxResultBinding $21 '1 '0)) (let $26 (KqpPhysicalTx '($23) '($24) '('($20 $25)) $18)) (let $27 '($7 $19 $26)) (return (KqpPhysicalQuery $27 '((KqpTxResultBinding $21 '"2" '0)) '('('"type" '"scan_query")))) ) >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> test.py::test[window-presort_window_order_by_table-default.txt-Debug] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Plan] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Results] >> KqpOlap::ScanQueryOltpAndOlap [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Debug] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Results] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile >> test.py::test[blocks-decimal_unary--Debug] [GOOD] >> test.py::test[blocks-decimal_unary--Plan] [GOOD] >> test.py::test[blocks-decimal_unary--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ScanQueryOltpAndOlap [GOOD] Test command err: Trying to start YDB, gRPC: 5194, MsgBus: 15397 2024-11-21T17:46:16.775202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790350774944287:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:16.775407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e1d/r3tmp/tmpjLyyR6/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5194, node 1 2024-11-21T17:46:16.825094Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:16.829774Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:16.829786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:16.829787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:16.829823Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15397 TClient is connected to server localhost:15397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:16.876110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:16.876143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:16.877230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:16.903666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:16.914680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:16.923365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350774944940:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.923414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350774944940:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.923445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350774944940:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:16.923462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350774944940:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:16.923481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350774944940:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:16.923499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350774944940:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:16.923517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350774944940:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:16.923547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350774944940:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:16.923574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350774944940:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:16.923594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350774944940:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.923615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350774944940:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:16.923635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350774944940:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:16.926247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790350774944941:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.926265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790350774944941:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.926288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790350774944941:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:16.926298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790350774944941:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:16.926313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790350774944941:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:16.926326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790350774944941:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:16.926339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790350774944941:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:16.926354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790350774944941:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:16.926369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790350774944941:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:16.926383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790350774944941:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.926395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790350774944941:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:16.926407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790350774944941:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:16.926793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.926806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.926816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.926824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.926844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.926851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.926863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.926873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.926884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.926893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.926902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889 ... alization_finished; 2024-11-21T17:46:16.933792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.933801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.933809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.933818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.933831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.933840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.933852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.933861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.933869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.933877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.933882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.933890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.933917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.933925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.933944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.933951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.933961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.933969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.933987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.933995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.934009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.934012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:16.934069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.934078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.934085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.934093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.934105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.934112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.934119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.934127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.934134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.934141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.934146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.934149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.934171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.934179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.934192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.934199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.934207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.934215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.934227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.934234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.934243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.934250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:16.972104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3632;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3632;columns=5; 2024-11-21T17:46:16.990259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:17.049908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790355069912635:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:17.049912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790355069912627:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:17.049921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:17.050440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:17.053275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790355069912641:2390], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:46:17.215072Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211177249, txId: 281474976715664] shutting down >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> KqpOlapIndexes::IndexesActualization [GOOD] >> KqpOlapAggregations::Aggregation_Avg_GroupBy >> KqpOlapBlobsSharing::BlobsSharingSplit1_3_1 >> KqpOlap::ManyColumnShardsWithRestarts >> KqpOlapWrite::TestRemoveTableBeforeIndexation >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> KqpOlapAggregations::Blocks_NoAggPushdown [GOOD] >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols >> KqpOlapAggregations::BlockGenericSelectAll >> test.py::test[blocks-decimal_unary--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_keys--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDecimalColumnShard::TestAggregation Test command err: Trying to start YDB, gRPC: 29037, MsgBus: 29461 2024-11-21T17:46:15.307816Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790346448237297:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:15.308111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e38/r3tmp/tmpVJ8E97/pdisk_1.dat 2024-11-21T17:46:15.354279Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29037, node 1 2024-11-21T17:46:15.365749Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:15.365762Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:15.365765Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:15.365812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29461 TClient is connected to server localhost:29461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:15.408774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:15.408806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:15.409860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:15.435678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (timestamp Timestamp64 NOT NULL, interval Interval64, PRIMARY KEY (timestamp)) PARTITION BY HASH(timestamp) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:46:15.561112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790346448237900:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:15.561139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:15.578960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:15.586552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790346448237976:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:15.586609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790346448237976:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:15.586654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790346448237976:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:15.586675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790346448237976:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:15.586705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790346448237976:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:15.586730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790346448237976:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:15.586754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790346448237976:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:15.586780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790346448237976:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:15.586817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790346448237976:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:15.586841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790346448237976:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:15.586863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790346448237976:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:15.586885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790346448237976:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:15.587377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:15.587392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:15.587406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:15.587411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:15.587427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:15.587437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:15.587451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:15.587465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:15.587479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:15.587488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:15.587498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:15.587506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:15.587577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:15.587595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:15.587615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:15.587623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:15.587634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:15.587642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:15.587659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:15.587666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:15.587678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:15.587686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event= ... ONS_COUNT = 1); 2024-11-21T17:46:16.812595Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790352749468865:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.812621Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.813824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:16.818536Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439790352749468911:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.818557Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439790352749468911:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.818584Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439790352749468911:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:16.818601Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439790352749468911:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:16.818616Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439790352749468911:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:16.818632Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439790352749468911:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:16.818646Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439790352749468911:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:16.818661Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439790352749468911:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:16.818677Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439790352749468911:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:16.818693Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439790352749468911:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.818708Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439790352749468911:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:16.818724Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7439790352749468911:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:16.819026Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.819038Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.819045Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.819055Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.819074Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.819080Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.819085Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.819092Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.819098Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.819104Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.819108Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.819113Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.819144Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.819151Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.819161Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.819163Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.819170Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.819176Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.819186Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.819192Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.819199Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.819205Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow14Decimal128TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=296;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=296;columns=3; 2024-11-21T17:46:16.891016Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790354699873584:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.891049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.891118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790354699873589:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.891749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:16.892993Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790354699873591:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } >> test.py::test[window-presort_window_order_by_table-default.txt-Results] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::IndexesActualization [GOOD] Test command err: Trying to start YDB, gRPC: 9063, MsgBus: 7592 2024-11-21T17:46:09.614158Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790321003850491:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:09.614298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e83/r3tmp/tmptCrJLz/pdisk_1.dat 2024-11-21T17:46:09.664889Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9063, node 1 2024-11-21T17:46:09.676412Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:09.676426Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:09.676428Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:09.676466Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7592 TClient is connected to server localhost:7592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:09.714762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:09.714788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:09.715916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:09.744869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:09.762094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:09.772017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.772080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.772122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.772141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.772163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.772186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.772205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.772221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.772259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.772282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.772306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.772329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.772794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:09.772808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:09.772822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:09.772831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:09.772846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:09.772854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:09.772862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:09.772872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:09.772878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:09.772886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:09.772892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:09.772899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:09.772947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:09.772956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:09.772968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:09.772976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.772986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:09.772993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:09.773007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:09.773014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:09.773023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:09.773030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:09.775928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790321003851149:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.775952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790321003851149:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.775981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790321003851149:2291];tablet_id=720751862240378 ... 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790321003851149:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2024-11-21T17:46:17.840758Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790321003851142:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T17:46:17.840760Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790321003851149:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T17:46:17.840764Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790321003851149:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732210877781;tx_id=18446744073709551615;;current_snapshot_ts=1732211170000; 2024-11-21T17:46:17.840764Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790321003851142:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732210877781;tx_id=18446744073709551615;;current_snapshot_ts=1732211170000; 2024-11-21T17:46:17.840767Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790321003851149:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:17.840767Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790321003851142:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:17.840769Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790321003851149:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:17.840770Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790321003851142:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:17.840771Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790321003851149:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:17.840772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790321003851142:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:17.840772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790321003851149:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:17.840774Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790321003851142:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:17.840777Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790321003851149:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:17.840780Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790321003851142:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:17.840785Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2024-11-21T17:46:17.840795Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:17.840798Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:17.840804Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2024-11-21T17:46:17.840813Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T17:46:17.840817Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732210877781;tx_id=18446744073709551615;;current_snapshot_ts=1732211170000; 2024-11-21T17:46:17.840819Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:17.840821Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:17.840822Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:17.840825Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:17.840834Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:17.840846Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211177781 at tablet 72075186224037888 2024-11-21T17:46:17.840855Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:17.840858Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2024-11-21T17:46:17.840861Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T17:46:17.840865Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=1732210877781;tx_id=18446744073709551615;;current_snapshot_ts=1732211170000; 2024-11-21T17:46:17.840868Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:17.840883Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:17.840891Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:17.840893Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:17.840898Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790321003851140:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:17.841080Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790321003851147:2290];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2024-11-21T17:46:17.841099Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790321003851147:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891; 2024-11-21T17:46:17.841110Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:17.841116Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790321003851147:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:17.841125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7439790321003851147:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:516;problem=Background activities cannot be started: no index at tablet; 2024-11-21T17:46:17.841141Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211177781 at tablet 72075186224037891 2024-11-21T17:46:17.841144Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790321003851147:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:17.841146Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7439790321003851147:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:516;problem=Background activities cannot be started: no index at tablet; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Blocks_NoAggPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 5908, MsgBus: 27808 2024-11-21T17:46:16.155782Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790352132694990:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:16.155899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e2f/r3tmp/tmpuiZQgA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5908, node 1 2024-11-21T17:46:16.223517Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:16.223529Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:16.223530Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:16.223548Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:16.223555Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27808 TClient is connected to server localhost:27808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:16.256869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:16.256889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:16.257884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:16.287870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:16.300830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:16.308361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790352132695629:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.308414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790352132695629:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.308439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790352132695629:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:16.308451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790352132695629:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:16.308466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790352132695629:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:16.308479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790352132695629:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:16.308494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790352132695629:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:16.308510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790352132695629:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:16.308528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790352132695629:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:16.308540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790352132695629:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.308554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790352132695629:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:16.308569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790352132695629:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:16.308930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.308945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.308955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.308959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.308971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.308980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.308989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.308996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.309010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.309017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.309025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.309033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.309088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.309100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.309117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.309121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.309133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.309140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.309152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.309154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.309161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.309167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:16.311633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790352132695628:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.311652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790352132695628:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.311674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790352132695628:2288];tablet_id=720751862240 ... =TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.317420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.317422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.317429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.317435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.317439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.317441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.317445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.317448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.317450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.317453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.317467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.317470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.317481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.317487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.317492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.317494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.317502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.317508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.317513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.317515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA UseBlocks; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls`; 2024-11-21T17:46:16.411091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790352132695923:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.411109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790352132695934:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.411115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.411759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:16.413251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790352132695937:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:17.979602Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211176465, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA UseBlocks; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls`; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":6,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"Aggregate-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":4}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (lambda '($26) $26)) (let $2 '('('"_logical_id" '601) '('"_id" '"ca3734f3-3a2a9b46-bfcf6f33-cd294d2a") '('"_wide_channels" (StructType '('"id" (DataType 'Int32)))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $23 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $24 (KqpBlockReadOlapTableRanges $23 (Void) '('"id") '() '() (lambda '($25) $25))) (return (FromFlow (WideCombiner (WideFromBlocks $24) '-1073741824 $1 (lambda '($27 $28) $27) (lambda '($29 $30 $31) $31) (lambda '($32 $33) $33)))) ))) $2)) (let $4 (DqCnHashShuffle (TDqOutput $3 '0) '('0))) (let $5 (Uint64 '1)) (let $6 (Bool 'false)) (let $7 (DataType 'Uint64)) (let $8 '('('"_logical_id" '1309) '('"_id" '"2e7b7856-9d22cf82-addb80bc-33936219") '('"_wide_channels" (StructType '('_yql_agg_0 (OptionalType $7)))))) (let $9 (DqPhyStage '($4) (lambda '($34) (block '( (let $35 (lambda '($38 $39))) (let $36 (WideCombiner (ToFlow $34) '"" $1 $35 $35 $1)) (let $37 (Condense1 (NarrowMap $36 (lambda '($40) (AsStruct '('"id" $40)))) (lambda '($41) $5) (lambda '($42 $43) $6) (lambda '($44 $45) (Inc $45)))) (return (FromFlow (ExpandMap $37 (lambda '($46) (Just $46))))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '0))) (let $11 (DqPhyStage '($10) (lambda '($47) (block '( (let $48 (Condense (NarrowMap (WideFromBlocks (WideTakeBlocks (WideToBlocks (WideCondense1 (ToFlow $47) (lambda '($49) $49) (lambda '($50 $51) $6) (lambda '($52 $53) (IfPresent $52 (lambda '($54) (IfPresent $53 (lambda '($55) (Just (AggrAdd $54 $55))) $52)) $53)))) $5)) (lambda '($56) (AsStruct '('Count0 (Unwrap $56))))) (Nothing (OptionalType (StructType '('Count0 $7)))) (lambda '($57 $58) $6) (lambda '($59 $60) (Just $59)))) (return (FromFlow (Map $48 (lambda '($61) (AsList (AsStruct '('"column0" (Coalesce (Member $61 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '1908) '('"_id" '"ff36d77a-26371de6-930f4108-8324e6b")))) (let $12 '($3 $9 $11)) (let $13 (DqCnValue (TDqOutput $11 '0))) (let $14 '('('"type" '"scan"))) (let $15 (KqpPhysicalTx $12 '($13) '() $14)) (let $16 '"%kqp%tx_result_binding_0_0") (let $17 (ListType (StructType '('"column0" $7)))) (let $18 '('('"_logical_id" '2011) '('"_id" '"65cf3401-4f960708-26bd54b3-e890b0fa") '('"_partition_mode" '"single"))) (let $19 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $18)) (let $20 (DqCnResult (TDqOutput $19 '0) '('"column0"))) (let $21 (KqpTxResultBinding $17 '0 '0)) (let $22 (KqpPhysicalTx '($19) '($20) '('($16 $21)) $14)) (return (KqpPhysicalQuery '($15 $22) '((KqpTxResultBinding $17 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpDatetime64ColumnShard::UseDate32AsPrimaryKey >> KqpOlapAggregations::BlockGenericSelectAll [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] Test command err: 2024-11-21T17:45:03.525308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:03.525333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:03.525338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:03.525342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:03.525355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:03.525359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:03.525367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:03.525427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:03.532564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:03.532583Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:03.535738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:03.535817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:03.535841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T17:45:03.542545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:03.542807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:03.542897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:03.542960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:03.544669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:03.545332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:03.545346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:03.545377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:03.545385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:03.545391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:03.545433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.592838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T17:45:03.592913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.592969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T17:45:03.593003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T17:45:03.593008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.594689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:03.594715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T17:45:03.594753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.594762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T17:45:03.594765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:03.594769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:03.599824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.599847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T17:45:03.599854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:03.604638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.604660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.604668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:03.604679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:03.605303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:03.605911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:03.605966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:45:03.606178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:03.606184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T17:45:03.606189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:04.145018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:04.145074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 657 RawX2: 4294969527 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T17:45:04.145082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:04.145156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:04.145164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:04.145195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:04.145203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:04.148481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:04.148496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:04.148550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:04.148554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:664:2236], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T17:45:04.148600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:04.148607Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:04.148618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:04.148620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:04.148639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:04.148643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:04.148646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:04.148649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:04.148659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T17:45:04.148663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:45:04.148666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T17:45:04.149022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:04.149051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:04.149055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T17:45:04.149060Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T17:45:04.149065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:04.149080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T17:45:04.149084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:521:2138] 2024-11-21T ... 519342Z node 169 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:46:16.519369Z node 169 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:46:16.519372Z node 170 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:46:16.519400Z node 170 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:46:16.519404Z node 171 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:46:16.519432Z node 171 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:46:16.519435Z node 163 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:46:16.519457Z node 163 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:46:16.519468Z node 169 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:16.519488Z node 169 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:16.519502Z node 169 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:16.519507Z node 170 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:16.519511Z node 170 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:16.519519Z node 170 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:16.519523Z node 171 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:16.519527Z node 171 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:16.519535Z node 171 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:16.519540Z node 164 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:16.519545Z node 164 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:16.519552Z node 164 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:16.519556Z node 165 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:16.519559Z node 165 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:16.519566Z node 165 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:16.519569Z node 166 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:16.519573Z node 166 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:16.519579Z node 166 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:16.519583Z node 167 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:16.519587Z node 167 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:16.519593Z node 167 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:16.519598Z node 168 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:16.519602Z node 168 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:16.519609Z node 168 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:16.519679Z node 169 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:16.519683Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:16.519686Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:16.519699Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[169:961:2101] 2024-11-21T17:46:16.519747Z node 170 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:16.519751Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:16.519753Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:16.519758Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[170:963:2101] 2024-11-21T17:46:16.519800Z node 171 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:16.519804Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:16.519806Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:16.519811Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[171:965:2101] 2024-11-21T17:46:16.519847Z node 164 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:16.519851Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:16.519853Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:16.519857Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[164:967:2101] 2024-11-21T17:46:16.519889Z node 165 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:16.519895Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:16.519897Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:16.519901Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[165:969:2101] 2024-11-21T17:46:16.519934Z node 166 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:16.519938Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:16.519940Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:16.519945Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[166:971:2101] 2024-11-21T17:46:16.519978Z node 167 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:16.519982Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:16.519984Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:16.519990Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[167:973:2103] 2024-11-21T17:46:16.520020Z node 168 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:16.520024Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:16.520026Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:16.520031Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[168:975:2101] 2024-11-21T17:46:16.522176Z node 170 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:16.522189Z node 170 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [170:933:2098] 2024-11-21T17:46:16.522204Z node 171 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:16.522207Z node 171 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [171:934:2098] 2024-11-21T17:46:16.522222Z node 164 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:16.522226Z node 164 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [164:927:2098] 2024-11-21T17:46:16.522236Z node 165 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:16.522241Z node 165 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [165:928:2098] 2024-11-21T17:46:16.522251Z node 166 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:16.522254Z node 166 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [166:929:2098] 2024-11-21T17:46:16.522264Z node 167 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:16.522267Z node 167 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [167:930:2100] 2024-11-21T17:46:16.522276Z node 168 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:16.522280Z node 168 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [168:931:2098] 2024-11-21T17:46:16.522296Z node 169 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:16.522300Z node 169 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [169:932:2098] 2024-11-21T17:46:16.522823Z node 168 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[168:975:2101] 2024-11-21T17:46:16.522845Z node 169 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[169:961:2101] 2024-11-21T17:46:16.522863Z node 170 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[170:963:2101] 2024-11-21T17:46:16.522951Z node 171 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[171:965:2101] 2024-11-21T17:46:16.522966Z node 164 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[164:967:2101] 2024-11-21T17:46:16.522970Z node 165 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[165:969:2101] 2024-11-21T17:46:16.523009Z node 166 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[166:971:2101] 2024-11-21T17:46:16.523012Z node 167 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[167:973:2103] 2024-11-21T17:46:16.523018Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:16.523025Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:16.523027Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:16.523032Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:16.523035Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:16.523037Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:16.523082Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:16.523085Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:16.523087Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:16.523129Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:16.523132Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:16.523134Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:16.523146Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:16.523148Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:16.523150Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:16.523182Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:16.523184Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:16.523186Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:16.523190Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:16.523192Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:16.523194Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:16.523212Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:16.523215Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:16.523217Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlockGenericSelectAll [GOOD] Test command err: Trying to start YDB, gRPC: 16701, MsgBus: 21560 2024-11-21T17:46:18.281327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790359722813717:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:18.281346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e15/r3tmp/tmpP9rag8/pdisk_1.dat 2024-11-21T17:46:18.327236Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16701, node 1 2024-11-21T17:46:18.340540Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:18.340558Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:18.340560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:18.340603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21560 2024-11-21T17:46:18.382470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:18.382500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:18.383558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:18.398641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:18.407096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:18.414551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359722814358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:18.414606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359722814358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:18.414631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359722814358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:18.414651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359722814358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:18.414670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359722814358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:18.414688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359722814358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:18.414702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359722814358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:18.414719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359722814358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:18.414737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359722814358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:18.414753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359722814358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:18.414773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359722814358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:18.414791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359722814358:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:18.415142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:18.415158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:18.415169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:18.415178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:18.415192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:18.415201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:18.415207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:18.415215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:18.415226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:18.415230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:18.415239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:18.415242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:18.415287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:18.415297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:18.415308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:18.415316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:18.415324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:18.415333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:18.415345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:18.415352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:18.415360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:18.415366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:18.417063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359722814359:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:18.417078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359722814359:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:18.417091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359722814359:2289];tablet_id=7207518622 ... ormalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:18.422483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:18.422559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:18.422570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:18.422578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:18.422582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:18.422602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:18.422611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:18.422619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:18.422629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:18.422637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:18.422647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:18.422651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:18.422655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:18.422687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:18.422696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:18.422713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:18.422723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:18.422733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:18.422743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:18.422755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:18.422764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:18.422774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:18.422782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id, level FROM `/Root/tableWithNulls` WHERE level != 5 OR level IS NULL ORDER BY id, resource_id, level; 2024-11-21T17:46:18.547274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790359722814652:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:18.547298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:18.547310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790359722814657:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:18.548077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:18.549967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790359722814666:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, resource_id, level FROM `/Root/tableWithNulls` WHERE level != 5 OR level IS NULL ORDER BY id, resource_id, level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"","Name":"Sort"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"level != 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":12,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Function":{"Id":7,"Arguments":[{"Id":3}]},"Column":{"Id":11}}},{"Assign":{"Function":{"Id":23,"Arguments":[{"Id":11}]},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":1,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":12}]},"Column":{"Id":13}}},{"Filter":{"Predicate":{"Id":13}}},{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Sort-Filter-TableFullScan"}],"Node Type":"Merge","SortColumns":["id (Asc)","resource_id (Asc)","level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":12,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Assign":{"Function":{"Id":7,"Arguments":[{"Id":3}]},"Column":{"Id":11}}},{"Assign":{"Function":{"Id":23,"Arguments":[{"Id":11}]},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":1,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":10},{"Id":12}]},"Column":{"Id":13}}},{"Filter":{"Predicate":{"Id":13}}},{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level != 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"SortBy":"","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Int32)) (let $2 (StructType '('"id" $1) '('"level" (OptionalType $1)) '('"resource_id" (OptionalType (DataType 'Utf8))))) (let $3 '('('"_logical_id" '880) '('"_id" '"39644f99-b582918d-8c93827c-88d7a26d") '('"_wide_channels" $2))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $10 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $11 '('"id" '"level" '"resource_id")) (let $12 (KqpBlockReadOlapTableRanges $10 (Void) $11 '() '() (lambda '($15) (block '( (let $16 '('neq '"level" (Int32 '"5"))) (let $17 '('?? $16 (Bool 'false))) (return (KqpOlapFilter $15 (KqpOlapOr $17 '('empty '"level")))) ))))) (let $13 (Bool 'true)) (let $14 '('('0 $13) '('2 $13) '('1 $13))) (return (FromFlow (WideFromBlocks (WideSortBlocks $12 $14)))) ))) $3)) (let $5 '('('0 '"Asc") '('2 '"Asc") '('1 '"Asc"))) (let $6 (DqCnMerge (TDqOutput $4 '0) $5)) (let $7 (DqPhyStage '($6) (lambda '($18) (FromFlow (NarrowMap (ToFlow $18) (lambda '($19 $20 $21) (AsStruct '('"id" $19) '('"level" $20) '('"resource_id" $21)))))) '('('"_logical_id" '892) '('"_id" '"64ac6870-8333dc61-ddd1f324-e799e54a")))) (let $8 '('"id" '"resource_id" '"level")) (let $9 (DqCnResult (TDqOutput $7 '0) $8)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($4 $7) '($9) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $2) '"0" '"0")) '('('"type" '"query")))) ) >> KqpOlap::PredicatePushdownPartial >> DataShardVolatile::TwoAppendsMustBeVolatile [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy [GOOD] >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain >> KqpOlap::SimpleRangeOlap >> KqpDatetime64ColumnShard::UseDate32AsPrimaryKey [GOOD] >> KqpDatetime64ColumnShard::Csv >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots2 [GOOD] |75.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |75.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[pg_catalog-pg_stat_database-default.txt-ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 27412, MsgBus: 21023 2024-11-21T17:46:16.637555Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790354909668350:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:16.637755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e26/r3tmp/tmpMjv2ak/pdisk_1.dat 2024-11-21T17:46:16.683426Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27412, node 1 2024-11-21T17:46:16.693719Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:16.693731Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:16.693732Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:16.693758Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21023 TClient is connected to server localhost:21023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:16.738219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:16.738249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:16.739291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:16.767784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:16.776220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:16.787032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790354909668986:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.787113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790354909668986:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.787162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790354909668986:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:16.787190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790354909668986:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:16.787215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790354909668986:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:16.787241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790354909668986:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:16.787264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790354909668986:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:16.787289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790354909668986:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:16.787314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790354909668986:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:16.787344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790354909668986:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.787368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790354909668986:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:16.787397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790354909668986:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:16.787881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.787898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.787909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.787914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.787930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.787934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.787947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.787956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.787973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.787983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.787990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.788000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.788063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.788076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.788092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.788102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.788115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.788126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.788143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.788153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.788165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.788175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:16.791238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354909668987:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.791265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354909668987:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.791300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790354909668987:2289];tablet_id=7207518622 ... TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.799589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.799598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.799605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.799608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.799614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.799626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.799636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.799640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.799667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.799675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.799690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.799699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.799713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.799721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.799732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.799740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.799754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.799761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; 2024-11-21T17:46:16.884571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790354909669281:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.884589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790354909669292:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.884594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.885160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:16.886755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790354909669295:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:19.271435Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211177000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 6 AND 7 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1732) '('"_id" '"8cadd180-f69f5bdc-c5fc5647-5c3b9c44") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $26 (Int32 '1)) (let $27 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"6")) $26) $27))) (RangeCreate (AsList '($27 '((Just (Int32 '"7")) $26)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (OptionalType (TupleType $10 (DataType 'Uint64)))) (let $12 '('"id" $1)) (let $13 '('('"_logical_id" '1791) '('"_id" '"ac0d7fcc-953c6847-6c3863af-8bf8ec49") '('"_wide_channels" (StructType '('_yql_agg_0 $11) $12)))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('"id")) (let $30 '('('"UsedKeyColumns" $29) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"level")) (let $35 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $33 '($34 $35) $29)) ))))) (let $32 (lambda '($36 $37 $38) (block '( (let $39 (IfPresent $37 (lambda '($40) (Just '((Convert $40 'Double) $36))) (Nothing $11))) (return $39 $38) )))) (return (FromFlow (WideMap $31 $32))) ))) $13)) (let $15 (DqCnHashShuffle (TDqOutput $14 '0) '('1))) (let $16 (OptionalType $10)) (let $17 (StructType '('"column1" $16) $12)) (let $18 '('('"_logical_id" '3379) '('"_id" '"3a5bf950-3e07b996-9c60616f-7781b088") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($15) (lambda '($41) (block '( (let $42 (lambda '($55 $56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (Div (Nth $58 '0) (Nth $58 '1)))) (Nothing $16))) (return $57 $55) )))) (let $43 (WideCombiner (ToFlow $41) '"" (lambda '($44 $45) $45) (lambda '($46 $47 $48) $47) (lambda '($49 $50 $51 $52) (IfPresent $50 (lambda '($53) (IfPresent $52 (lambda '($54) (Just '((AggrAdd (Nth $53 '0) (Nth $54 '0)) (AggrAdd (Nth $53 '1) (Nth $54 '1))))) $50)) $52)) $42)) (return (FromFlow (WideSort $43 '('('1 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('1 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($59) (FromFlow (NarrowMap (ToFlow $59) (lambda '($60 $61) (AsStruct '('"column1" $60) '('"id" $61)))))) '('('"_logical_id" '3391) '('"_id" '"2048fe15-a97a93d9-7e5b07f8-af8a6645")))) (let $22 '($14 $19 $21)) (let $23 (DqCnResult (TDqOutput $21 '0) '('"id" '"column1"))) (let $24 (KqpTxResultBinding $9 '0 '0)) (let $25 (KqpPhysicalTx $22 '($23) '('($7 $24)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $25) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable |75.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |75.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 >> VDiskBalancing::TestRandom_Block42 >> KqpOlap::SimpleRangeOlap [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:46:07.479532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:46:07.479556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:46:07.479561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:46:07.479566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:46:07.479572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:46:07.479575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:46:07.479584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:46:07.479650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:07.491214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:46:07.491235Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:46:07.493526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:07.493623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:46:07.493654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:46:07.496273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:46:07.496344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:46:07.496438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:46:07.496634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:46:07.497243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:46:07.497483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:46:07.497495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:46:07.497507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:46:07.497513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:46:07.497519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:46:07.497551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:46:07.498748Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:46:07.515852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:46:07.515935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:07.515988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:46:07.516030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:46:07.516038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:07.516740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:46:07.516766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:46:07.516817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:07.516827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:46:07.516832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:46:07.516836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:46:07.517216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:07.517228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:46:07.517232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:46:07.517568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:07.517579Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:07.517584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:46:07.517590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:46:07.518156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:46:07.518499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:46:07.518539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:46:07.518710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:46:07.518733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:46:07.518739Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:46:07.518794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:46:07.518801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:46:07.518826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:46:07.518838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:46:07.519206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:46:07.519216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:46:07.519250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:46:07.519255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:46:07.519326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:07.519332Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:46:07.519341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:46:07.519345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:46:07.519350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:46:07.519355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:46:07.519359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:46:07.519363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:46:07.519373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:46:07.519378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:46:07.519382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... .654581Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2024-11-21T17:46:19.654721Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:46:19.654741Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 214748366952 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:46:19.654748Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000005 2024-11-21T17:46:19.654785Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:46:19.654798Z node 50 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2024-11-21T17:46:19.654820Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:46:19.654826Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:46:19.654831Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:46:19.655057Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:46:19.655126Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T17:46:19.655413Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:46:19.655419Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:46:19.655442Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:46:19.655457Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:46:19.655471Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:46:19.655475Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T17:46:19.655479Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T17:46:19.655483Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T17:46:19.655532Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:46:19.655539Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T17:46:19.655549Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:46:19.655553Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:46:19.655558Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T17:46:19.655563Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:46:19.655567Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:46:19.655570Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:46:19.655580Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:46:19.655583Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:46:19.655588Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2024-11-21T17:46:19.655594Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:46:19.655596Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T17:46:19.655599Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:46:19.655650Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:19.655658Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:19.655662Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:46:19.655665Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:46:19.655669Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:46:19.655730Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:46:19.655735Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:46:19.655743Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:46:19.655793Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:19.655804Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:19.655807Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:46:19.655810Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:46:19.655814Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:46:19.655909Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:19.655923Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:19.655926Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:46:19.655930Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:46:19.655934Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:46:19.655944Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T17:46:19.656717Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:46:19.656741Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:46:19.656890Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:46:19.656958Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:46:19.657002Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:46:19.657009Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:46:19.657076Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:46:19.657090Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:46:19.657095Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [50:383:2375] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:46:19.657158Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:46:19.657180Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 32us result status StatusPathDoesNotExist 2024-11-21T17:46:19.657212Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpDatetime64ColumnShard::Csv [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-Debug] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-Plan] [GOOD] >> test.py::test[window-row_number_to_map_multiple-default.txt-Results] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleRangeOlap [GOOD] Test command err: Trying to start YDB, gRPC: 26798, MsgBus: 2674 2024-11-21T17:46:19.702192Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790366867796128:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:19.702392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e06/r3tmp/tmpG3ijXZ/pdisk_1.dat 2024-11-21T17:46:19.747805Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26798, node 1 2024-11-21T17:46:19.758712Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:19.758736Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:19.758738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:19.758766Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2674 TClient is connected to server localhost:2674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:19.803394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:19.803418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:19.804512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:19.828935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:19.842622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:19.852143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790366867796789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:19.852200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790366867796789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:19.852246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790366867796789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:19.852269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790366867796789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:19.852283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790366867796789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:19.852301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790366867796789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:19.852317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790366867796789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:19.852333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790366867796789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:19.852356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790366867796789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:19.852371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790366867796789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:19.852385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790366867796789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:19.852401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790366867796789:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:19.852722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:19.852732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:19.852740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:19.852743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:19.852753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:19.852759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:19.852765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:19.852769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:19.852779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:19.852781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:19.852785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:19.852791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:19.852837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:19.852845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:19.852855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:19.852858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:19.852865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:19.852871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:19.852882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:19.852887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:19.852894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:19.852896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:19.854678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790366867796785:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:19.854698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790366867796785:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:19.854719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790366867796785:2288];tablet_id=7207518622403 ... ct.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:19.861004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:19.861063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:19.861072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:19.861079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:19.861086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:19.861099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:19.861107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:19.861115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:19.861118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:19.861124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:19.861127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:19.861132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:19.861140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:19.861165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:19.861172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:19.861186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:19.861194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:19.861203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:19.861210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:19.861224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:19.861231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:19.861240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:19.861247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:19.861296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:19.861304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:19.861321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:19.861331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:19.861346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:19.861357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:19.861368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:19.861374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:19.861383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:19.861390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:19.861395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:19.861398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:19.861421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:19.861429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:19.861441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:19.861448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:19.861457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:19.861464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:19.861478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:19.861486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:19.861495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:19.861503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T17:46:19.902838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T17:46:19.995644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790366867797082:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:19.995664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790366867797101:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:19.995669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:19.996383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:19.997924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790366867797111:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:46:20.116496Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180049, txId: 18446744073709551615] shutting down >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpDatetime64ColumnShard::Csv [GOOD] Test command err: Trying to start YDB, gRPC: 18393, MsgBus: 4948 2024-11-21T17:46:18.919555Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790360299623657:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:18.919590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e09/r3tmp/tmpjNSrCG/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18393, node 1 2024-11-21T17:46:18.987328Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:18.995250Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:18.995269Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:18.995270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:18.995300Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4948 2024-11-21T17:46:19.019953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:19.019975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:19.021010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:19.044206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (date Date32 NOT NULL, interval Interval64, PRIMARY KEY (date)) PARTITION BY HASH(date) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:46:19.226461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790364594591547:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:19.226520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:19.229925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:19.236959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790364594591624:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:19.237014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790364594591624:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:19.237051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790364594591624:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:19.237081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790364594591624:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:19.237103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790364594591624:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:19.237123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790364594591624:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:19.237146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790364594591624:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:19.237169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790364594591624:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:19.237192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790364594591624:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:19.237212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790364594591624:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:19.237237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790364594591624:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:19.237258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790364594591624:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:19.237731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:19.237747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:19.237758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:19.237763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:19.237785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:19.237794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:19.237804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:19.237814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:19.237826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:19.237834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:19.237843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:19.237851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:19.237907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:19.237917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:19.237942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:19.237950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:19.237963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:19.237971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:19.237990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:19.237999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:19.238015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:19.238023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; ... 86224037888;self_id=[2:7439790372073364571:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:20.073838Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439790372073364571:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:20.073864Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439790372073364571:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:20.073888Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439790372073364571:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:20.073908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439790372073364571:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:20.073934Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439790372073364571:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:20.073959Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439790372073364571:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:20.074462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:20.074477Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:20.074489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:20.074493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:20.074509Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:20.074513Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:20.074521Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:20.074527Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:20.074535Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:20.074545Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:20.074552Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:20.074556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:20.074595Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:20.074608Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:20.074623Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:20.074627Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:20.074639Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:20.074649Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:20.074665Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:20.074671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:20.074681Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:20.074684Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:20.128104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790372073364664:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:20.128131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:20.128130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790372073364669:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:20.128670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:20.133227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790372073364671:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:20.264914Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=manager.cpp:16;event=register_group;external_process_id=281474976715662;external_group_id=7;size=1;external_scope_id=7; 2024-11-21T17:46:20.264959Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=allocation.cpp:17;event=add;id=25;stage=FO::ACCESSORS; 2024-11-21T17:46:20.265103Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=allocation.cpp:17;event=add;id=26;stage=FO::FETCHING; 2024-11-21T17:46:20.265381Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=allocation.cpp:17;event=add;id=27;stage=FO::FETCHING; 2024-11-21T17:46:20.265424Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=allocation.cpp:17;event=add;id=28;stage=FO::MERGE; 2024-11-21T17:46:20.265567Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=process.h:130;event=allocation_unregister;allocation_id=27;wait=0;internal_group_id=1;allocation_status=Allocated; 2024-11-21T17:46:20.265575Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=process.h:130;event=allocation_unregister;allocation_id=26;wait=0;internal_group_id=1;allocation_status=Allocated; 2024-11-21T17:46:20.265581Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=0;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2024-11-21T17:46:20.265583Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=1;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2024-11-21T17:46:20.265586Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=process.h:130;event=allocation_unregister;allocation_id=25;wait=0;internal_group_id=1;allocation_status=Allocated; 2024-11-21T17:46:20.265589Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=0;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2024-11-21T17:46:20.265592Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=1;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2024-11-21T17:46:20.265634Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=0;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2024-11-21T17:46:20.265641Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=1;external_process_id=281474976715662;forced_internal_group_id=1;external_scope_id=7;forced_external_group_id=7; 2024-11-21T17:46:20.265669Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=manager.cpp:25;event=unregister_group;external_process_id=281474976715662;external_group_id=7;size=1; 2024-11-21T17:46:20.265676Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=process.h:139;event=remove_group;external_group_id=7;internal_group_id=1; 2024-11-21T17:46:20.265681Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=process.h:130;event=allocation_unregister;allocation_id=28;wait=0;internal_group_id=1;allocation_status=Allocated; 2024-11-21T17:46:20.265684Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=0;external_process_id=281474976715662;forced_internal_group_id=NO_VALUE_OPTIONAL;external_scope_id=7;forced_external_group_id=NO_VALUE_OPTIONAL; 2024-11-21T17:46:20.265686Z node 2 :GROUPED_MEMORY_LIMITER DEBUG: fline=group.cpp:23;event=try_allocation;limit=1;external_process_id=281474976715662;forced_internal_group_id=NO_VALUE_OPTIONAL;external_scope_id=7;forced_external_group_id=NO_VALUE_OPTIONAL; 2024-11-21T17:46:20.266032Z node 2 :GROUPED_MEMORY_LIMITER INFO: fline=process.h:68;event=scope_cleaned;process_id=281474976715662;external_scope_id=7; 2024-11-21T17:46:20.268576Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180189, txId: 18446744073709551615] shutting down >> KqpOlap::PredicatePushdownPartial [GOOD] >> VDiskBalancing::TestStopOneNode_Block42 >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] Test command err: RandomSeed# 2631329286384828392 SEND TEvPut with key [1:1:1:0:0:100:0] 2024-11-21T17:46:20.400708Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2024-11-21T17:46:20.400793Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2024-11-21T17:46:20.418400Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 >> KqpOlapAggregations::Aggregation_Avg_GroupBy [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownPartial [GOOD] Test command err: Trying to start YDB, gRPC: 13947, MsgBus: 13475 2024-11-21T17:46:19.525712Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790365693503101:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:19.525728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e08/r3tmp/tmpxLqI6X/pdisk_1.dat 2024-11-21T17:46:19.571603Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13947, node 1 2024-11-21T17:46:19.583721Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:19.583734Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:19.583735Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:19.583765Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13475 TClient is connected to server localhost:13475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:19.626695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:19.626759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:19.626782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:19.627933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:46:19.638956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:19.648831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790365693503749:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:19.648887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790365693503749:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:19.648915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790365693503749:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:19.648933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790365693503749:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:19.648947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790365693503749:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:19.648961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790365693503749:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:19.648975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790365693503749:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:19.648989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790365693503749:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:19.649004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790365693503749:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:19.649020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790365693503749:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:19.649036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790365693503749:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:19.649055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790365693503749:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:19.651477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790365693503750:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:19.651498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790365693503750:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:19.651522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790365693503750:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:19.651533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790365693503750:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:19.651543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790365693503750:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:19.651552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790365693503750:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:19.651561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790365693503750:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:19.651574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790365693503750:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:19.651584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790365693503750:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:19.651598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790365693503750:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:19.651612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790365693503750:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:19.651625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790365693503750:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:19.653648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790365693503751:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:19.653665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790365693503751:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:19.653684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790365693503751:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:19.653708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790365693503751:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:19.653722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790365693503751:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:19.653739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790365693503751:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:19.653751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790365693503751:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:19.653765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790365693503751:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:19.653778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:74397903656935037 ... fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:19.656890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:19.656894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:19.656896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:19.656899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:19.656901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:19.656915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:19.656921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:19.656929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:19.656935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:19.656941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:19.656943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:19.656950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:19.656956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:19.656962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:19.656963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:19.698006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=5800;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5800;columns=5; --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE LENGTH(`uid`) > 0 AND `resource_id` = "10001" ORDER BY `timestamp` 2024-11-21T17:46:19.798534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790365693504085:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:19.798555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790365693504074:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:19.798574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:19.799316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:19.800921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790365693504088:2389], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE LENGTH(`uid`) > 0 AND `resource_id` = "10001" ORDER BY `timestamp` 2024-11-21T17:46:19.963188Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211179853, txId: 18446744073709551615] shutting down 2024-11-21T17:46:19.996921Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND `level` > 1 AND LENGTH(`uid`) > 0 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND `level` > 1 AND LENGTH(`uid`) > 0 ORDER BY `timestamp` 2024-11-21T17:46:20.053980Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180035, txId: 18446744073709551615] shutting down 2024-11-21T17:46:20.094635Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180091, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` >= "10001" AND LENGTH(`uid`) > 0 AND `level` >= 1 AND `level` < 3 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` >= "10001" AND LENGTH(`uid`) > 0 AND `level` >= 1 AND `level` < 3 ORDER BY `timestamp` 2024-11-21T17:46:20.163976Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180133, txId: 18446744073709551615] shutting down 2024-11-21T17:46:20.198696Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180196, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE LENGTH(`uid`) > 0 AND (`resource_id` >= "10001" OR `level`>= 1 AND `level` <= 3) ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE LENGTH(`uid`) > 0 AND (`resource_id` >= "10001" OR `level`>= 1 AND `level` <= 3) ORDER BY `timestamp` 2024-11-21T17:46:20.267059Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180238, txId: 18446744073709551615] shutting down 2024-11-21T17:46:20.302810Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180301, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT(`resource_id` = "10001" OR `level` >= 1) AND LENGTH(`uid`) > 0 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT(`resource_id` = "10001" OR `level` >= 1) AND LENGTH(`uid`) > 0 ORDER BY `timestamp` 2024-11-21T17:46:20.379728Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180343, txId: 18446744073709551615] shutting down 2024-11-21T17:46:20.419563Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180413, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT(`resource_id` = "10001" AND `level` != 1) AND LENGTH(`uid`) > 0 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT(`resource_id` = "10001" AND `level` != 1) AND LENGTH(`uid`) > 0 ORDER BY `timestamp` 2024-11-21T17:46:20.497984Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180455, txId: 18446744073709551615] shutting down 2024-11-21T17:46:20.537850Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180525, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND Unwrap(`level`/1) = `level` ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND Unwrap(`level`/1) = `level` ORDER BY `timestamp` 2024-11-21T17:46:20.602271Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180574, txId: 18446744073709551615] shutting down 2024-11-21T17:46:20.642521Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180637, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND Unwrap(`level`/1) = `level` AND `level` > 1 ORDER BY `timestamp` 2024-11-21T17:46:20.708055Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180679, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` = "10001" AND Unwrap(`level`/1) = `level` AND `level` > 1 ORDER BY `timestamp` 2024-11-21T17:46:20.752717Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211180742, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_GroupBy [GOOD] Test command err: Trying to start YDB, gRPC: 61589, MsgBus: 11533 2024-11-21T17:46:18.138870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790359405585641:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:18.139025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e17/r3tmp/tmpYn9o7t/pdisk_1.dat 2024-11-21T17:46:18.189142Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61589, node 1 2024-11-21T17:46:18.200411Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:18.200430Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:18.200431Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:18.200469Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11533 TClient is connected to server localhost:11533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:18.241077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:18.241101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:18.242233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:18.244463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:18.256540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:18.263616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359405586289:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:18.263668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359405586289:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:18.263695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359405586289:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:18.263708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359405586289:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:18.263722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359405586289:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:18.263741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359405586289:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:18.263756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359405586289:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:18.263776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359405586289:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:18.263790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359405586289:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:18.263808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359405586289:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:18.263826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359405586289:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:18.263844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790359405586289:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:18.265869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790359405586310:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:18.265889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790359405586310:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:18.265907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790359405586310:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:18.265917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790359405586310:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:18.265929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790359405586310:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:18.265939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790359405586310:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:18.265950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790359405586310:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:18.265960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790359405586310:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:18.265971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790359405586310:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:18.265983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790359405586310:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:18.265999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790359405586310:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:18.266009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790359405586310:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:18.266281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:18.266294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:18.266302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:18.266305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:18.266318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:18.266326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:18.266334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:18.266340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:18.266351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:18.266354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:18.266359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... ss=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:18.272058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:18.272060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:18.272067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:18.272070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:18.272074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:18.272077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:18.272080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:18.272082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:18.272107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:18.272115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:18.272125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:18.272132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:18.272139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:18.272143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:18.272152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:18.272159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:18.272166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:18.272168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; 2024-11-21T17:46:18.421313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790359405586584:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:18.421331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790359405586571:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:18.421338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:18.422005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:18.423324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790359405586600:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:20.992510Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211178474, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, AVG(level) FROM `/Root/tableWithNulls` WHERE id BETWEEN 4 AND 5 GROUP BY id ORDER BY id; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["id (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [4, 5]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [4, 5]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":5,"Arguments":[{"Id":3}]},"Column":{"Id":7}},{"Function":{"Id":2,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":1}]}},{"Projection":{"Columns":[{"Id":8},{"Id":7},{"Id":1}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.id","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '1732) '('"_id" '"fab79d4-9781039e-5773fe74-cfcf65be") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $26 (Int32 '1)) (let $27 '((Nothing $2) (Int32 '0))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeIntersect (RangeCreate (AsList '('((Just (Int32 '"4")) $26) $27))) (RangeCreate (AsList '($27 '((Just (Int32 '"5")) $26)))))))))))) ))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (OptionalType (TupleType $10 (DataType 'Uint64)))) (let $12 '('"id" $1)) (let $13 '('('"_logical_id" '1791) '('"_id" '"eee1d75e-757bbc3c-2552bc79-30fa057a") '('"_wide_channels" (StructType '('_yql_agg_0 $11) $12)))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $29 '('"id")) (let $30 '('('"UsedKeyColumns" $29) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpWideReadOlapTableRanges $28 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($33) (block '( (let $34 '('"_yql_agg_0_sum" '"sum" '"level")) (let $35 '('"_yql_agg_0_cnt" '"count" '"level")) (return (TKqpOlapAgg $33 '($34 $35) $29)) ))))) (let $32 (lambda '($36 $37 $38) (block '( (let $39 (IfPresent $37 (lambda '($40) (Just '((Convert $40 'Double) $36))) (Nothing $11))) (return $39 $38) )))) (return (FromFlow (WideMap $31 $32))) ))) $13)) (let $15 (DqCnHashShuffle (TDqOutput $14 '0) '('1))) (let $16 (OptionalType $10)) (let $17 (StructType '('"column1" $16) $12)) (let $18 '('('"_logical_id" '3379) '('"_id" '"3aa79591-b18dbf6-eefd1ff7-714c0669") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($15) (lambda '($41) (block '( (let $42 (lambda '($55 $56) (block '( (let $57 (IfPresent $56 (lambda '($58) (Just (Div (Nth $58 '0) (Nth $58 '1)))) (Nothing $16))) (return $57 $55) )))) (let $43 (WideCombiner (ToFlow $41) '"" (lambda '($44 $45) $45) (lambda '($46 $47 $48) $47) (lambda '($49 $50 $51 $52) (IfPresent $50 (lambda '($53) (IfPresent $52 (lambda '($54) (Just '((AggrAdd (Nth $53 '0) (Nth $54 '0)) (AggrAdd (Nth $53 '1) (Nth $54 '1))))) $50)) $52)) $42)) (return (FromFlow (WideSort $43 '('('1 (Bool 'true)))))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('1 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($59) (FromFlow (NarrowMap (ToFlow $59) (lambda '($60 $61) (AsStruct '('"column1" $60) '('"id" $61)))))) '('('"_logical_id" '3391) '('"_id" '"c6c6a5c8-2a16d21e-ebfb7258-5947154")))) (let $22 '($14 $19 $21)) (let $23 (DqCnResult (TDqOutput $21 '0) '('"id" '"column1"))) (let $24 (KqpTxResultBinding $9 '0 '0)) (let $25 (KqpPhysicalTx $22 '($23) '('($7 $24)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $25) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] Test command err: RandomSeed# 14470071291215550241 SEND TEvPut with key [1:1:1:0:0:533504:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:533504:0] 2024-11-21T17:46:21.108005Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] Test command err: RandomSeed# 8008541593001933649 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2024-11-21T17:46:21.202308Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:185:16] ServerId# [1:284:55] TabletId# 72057594037932033 PipeClientId# [3:185:16] 2024-11-21T17:46:21.202340Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:215:16] ServerId# [1:289:60] TabletId# 72057594037932033 PipeClientId# [8:215:16] 2024-11-21T17:46:21.202355Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:203:16] ServerId# [1:287:58] TabletId# 72057594037932033 PipeClientId# [6:203:16] 2024-11-21T17:46:21.202368Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:197:16] ServerId# [1:286:57] TabletId# 72057594037932033 PipeClientId# [5:197:16] 2024-11-21T17:46:21.202391Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:191:16] ServerId# [1:285:56] TabletId# 72057594037932033 PipeClientId# [4:191:16] 2024-11-21T17:46:21.202405Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:179:16] ServerId# [1:283:54] TabletId# 72057594037932033 PipeClientId# [2:179:16] 2024-11-21T17:46:21.202418Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:209:16] ServerId# [1:288:59] TabletId# 72057594037932033 PipeClientId# [7:209:16] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_1_clean_with_restarts Test command err: Trying to start YDB, gRPC: 25101, MsgBus: 5644 2024-11-21T17:46:14.455955Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790343495426651:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:14.456098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e3b/r3tmp/tmp8Qnn6E/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25101, node 1 2024-11-21T17:46:14.514510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:14.514525Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:14.514527Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:14.514557Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:14.514611Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5644 TClient is connected to server localhost:5644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:14.556373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:14.556404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:14.557537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:14.560117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:14.561785Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:46:14.571243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:14.578164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:46:14.580362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:46:14.580412Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2024-11-21T17:46:14.580916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.580957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.580991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.581009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.581020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.581037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.581051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.581068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.581083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:14.581102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.581117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:14.581131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790343495427285:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:14.581694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:46:14.583348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:46:14.583382Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T17:46:14.583825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.583846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.583873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.583897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.583924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.583946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.583968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.583993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.584018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:14.584058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.584084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:14.584109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790343495427284:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:14.584539Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439790343495427285:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:14.584566Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037889 2024-11-21T17:46:14.584588Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:46:14.584596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:46:14.584612Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:46:14.584638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=nor ... EBUG: tablet_id=72075186224037889;parent=[1:7439790360675297102:2509];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:19.997898Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T17:46:19.998003Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888; 2024-11-21T17:46:19.998046Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:19.998061Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:19.998126Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:19.998196Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:19.998239Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:19.998250Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:19.998263Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:19.998364Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:19.998498Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:19.998524Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2024-11-21T17:46:19.998596Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889; 2024-11-21T17:46:19.998634Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:19.998688Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:19.998769Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:19.998864Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:19.998887Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211180000 at tablet 72075186224037888 2024-11-21T17:46:19.998896Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:19.998911Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:19.998931Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:19.998933Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:19.998940Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:19.998948Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:19.998957Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:19.998989Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:19.999006Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:19.999013Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:19.999033Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:19.999052Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790360675297104:2510];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:19.999088Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439790360675297104:2510];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:19.999120Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:19.999124Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439790360675297102:2509];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:19.999139Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439790360675297131:2513];tablet_id=72075186224037888;parent=[1:7439790360675297104:2510];fline=manager.h:99;event=ask_data;request=request_id=38;3={portions_count=1};; 2024-11-21T17:46:19.999244Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211180000 at tablet 72075186224037889 2024-11-21T17:46:19.999254Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:19.999261Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:19.999268Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:19.999278Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:19.999287Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:19.999291Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:19.999298Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:19.999316Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360675297102:2509];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1266B1AA 1. /-S/util/system/yassert.cpp:55: Panic @ 0x12662916 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x13680663 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x12459D20 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:229: Execute_ @ 0x1245B20B 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x1246A9F6 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x127C571D 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x1246A3B9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x127C5E92 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x127D90AC 10. ??:0: ?? @ 0x7FF1DE9A0D8F 11. ??:0: ?? @ 0x7FF1DE9A0E3F 12. ??:0: ?? @ 0x1179D028 >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 17623725297596453376 SEND TEvPut with key [1:1:1:0:0:3201024:0] 2024-11-21T17:46:21.226040Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2024-11-21T17:46:21.226118Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2024-11-21T17:46:21.243732Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 >> test.py::test[window-row_number_to_map_multiple-default.txt-Results] [GOOD] >> test.py::test[window-win_by_all_percentile_interval-default.txt-Debug] >> VDiskBalancing::TestRandom_Mirror3dc >> KqpOlapSysView::StatsSysViewTable [GOOD] >> test.py::test[blocks-distinct_mixed_keys--Debug] [GOOD] >> test.py::test[blocks-distinct_mixed_keys--Plan] [GOOD] >> test.py::test[blocks-distinct_mixed_keys--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] Test command err: RandomSeed# 3180301206469719869 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:100:0] 2024-11-21T17:46:21.545126Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:6311:826] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Start compaction Finish compaction >> VDiskBalancing::TestStopOneNode_Mirror3dc >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewTable [GOOD] Test command err: Trying to start YDB, gRPC: 28756, MsgBus: 1225 2024-11-21T17:46:11.838008Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790331897112692:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:11.838251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e66/r3tmp/tmpa09nt4/pdisk_1.dat 2024-11-21T17:46:11.893510Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28756, node 1 2024-11-21T17:46:11.908391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:11.908426Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:11.908428Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:11.908472Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1225 2024-11-21T17:46:11.939352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:11.939389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:1225 2024-11-21T17:46:11.940412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:11.974581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:11.982207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:11.992578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790331897113350:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:11.992647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790331897113350:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:11.992700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790331897113350:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:11.992728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790331897113350:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:11.992746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790331897113350:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:11.992768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790331897113350:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:11.992788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790331897113350:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:11.992811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790331897113350:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:11.992838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790331897113350:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:11.992862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790331897113350:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.992893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790331897113350:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:11.992916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790331897113350:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:11.993405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:11.993418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:11.993431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:11.993444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:11.993465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:11.993473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:11.993483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:11.993495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:11.993508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:11.993516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:11.993523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:11.993531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:11.993586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:11.993597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:11.993617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:11.993629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:11.993641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:11.993648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:11.993664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:11.993673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:11.993684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:11.993693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:11.996642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790331897113349:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:11.996664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790331897113349:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:11.996702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790331897113349:2288];tablet_id=7207518622403 ... ;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:12.004509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:12.004515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:12.004522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:12.004527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:12.004533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:12.004535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:12.004543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:12.004549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:12.004553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:12.004555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:12.040016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 Status: 48 TxId: 281474976715660 Issues { message: "Check failed: path: \'/Root/olapStore\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeColumnStore, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/olap/operations/create_store.cpp:361" severity: 1 } SchemeShardStatus: 4 SchemeShardReason: "Check failed: path: \'/Root/olapStore\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeColumnStore, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/olap/operations/create_store.cpp:361" SchemeShardTabletId: 72057594046644480 PathId: 2 PathCreateTxId: 281474976715658 2024-11-21T17:46:12.052125Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2024-11-21T17:46:12.052644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715661 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2348304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2348304;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2024-11-21T17:46:16.838500Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790331897112692:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:16.838546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/olapTable_1/.sys/primary_index_stats` WHERE Activity = 1 GROUP BY PathId, TabletId, Kind ORDER BY PathId, TabletId, Kind RESULT: 2024-11-21T17:46:17.354014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790357666917973:2673], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:17.354016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790357666917962:2670], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:17.354026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:17.354599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:46:17.355980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790357666917976:2674], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:46:19.579237Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211177510, txId: 281474976715664] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/olapTable_2/.sys/primary_index_stats` WHERE Activity = 1 GROUP BY PathId, TabletId, Kind ORDER BY PathId, TabletId, Kind RESULT: 2024-11-21T17:46:21.759935Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211179622, txId: 281474976715666] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 4 TabletId: 72075186224037889 Kind: INSERTED PathId: 4 TabletId: 72075186224037891 Kind: INSERTED PathId: 4 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable_1/.sys/primary_index_stats` WHERE PathId > UInt64("3") ORDER BY PathId, Kind, TabletId RESULT: 2024-11-21T17:46:21.804390Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211181789, txId: 281474976715668] shutting down >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 3973248919554060705 SEND TEvPut with key [1:1:1:0:0:3201024:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:3201024:0] 2024-11-21T17:46:21.991889Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:185:16] ServerId# [1:284:55] TabletId# 72057594037932033 PipeClientId# [3:185:16] 2024-11-21T17:46:21.991926Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:215:16] ServerId# [1:289:60] TabletId# 72057594037932033 PipeClientId# [8:215:16] 2024-11-21T17:46:21.991946Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:203:16] ServerId# [1:287:58] TabletId# 72057594037932033 PipeClientId# [6:203:16] 2024-11-21T17:46:21.991964Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:197:16] ServerId# [1:286:57] TabletId# 72057594037932033 PipeClientId# [5:197:16] 2024-11-21T17:46:21.991979Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:191:16] ServerId# [1:285:56] TabletId# 72057594037932033 PipeClientId# [4:191:16] 2024-11-21T17:46:21.991998Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:179:16] ServerId# [1:283:54] TabletId# 72057594037932033 PipeClientId# [2:179:16] 2024-11-21T17:46:21.992014Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:209:16] ServerId# [1:288:59] TabletId# 72057594037932033 PipeClientId# [7:209:16] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> KqpOlap::CompactionPlanner >> test.py::test[aggregate-group_by_mul_gs_ru--Debug] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Plan] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] >> KqpOlapWrite::TierDraftsGCWithRestart >> KqpOlapAggregations::Json_Exists >> KqpOlapAggregations::Aggregation_MinL >> KqpOlap::MetadataMemoryManager >> TTxLocatorTest::TestAllocateAll ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] Test command err: RandomSeed# 13948073910920785329 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2024-11-21T17:46:22.338439Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> TSchemeShardSplitBySizeTest::Split10Shards [GOOD] >> TTxLocatorTest::TestAllocateAll [GOOD] >> TTxLocatorTest::TestImposibleSize >> TTxLocatorTest::TestImposibleSize [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2024-11-21T17:46:22.837913Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T17:46:22.838007Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T17:46:22.838125Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T17:46:22.838493Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:22.838604Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T17:46:22.840662Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:22.840683Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:22.840692Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:22.840707Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T17:46:22.840740Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:22.840756Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T17:46:22.840773Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T17:46:22.840878Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#281474976710655 2024-11-21T17:46:22.840963Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:22.840970Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:22.840980Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2024-11-21T17:46:22.840985Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2024-11-21T17:46:22.841606Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2107] requested range size#1 2024-11-21T17:46:22.841633Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2024-11-21T17:46:22.841638Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:73:2107] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::Split10Shards [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:45:46.089577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:46.089608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:46.089613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:46.089619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:46.089632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:46.089637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:46.089647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:46.089732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:46.101521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:46.101547Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:46.104191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:46.105010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:46.105040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:46.106302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:46.106462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:46.106548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.106611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:46.107432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.107666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:46.107674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.107708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:46.107716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:46.107722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:46.107735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.108916Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:45:46.120071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:46.120138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.120183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:46.120253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:46.120261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.122816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.122857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:46.122943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.122956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:46.122960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:46.122965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:46.123615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.123628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:46.123632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:46.128097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.128125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.128132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:46.128140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:46.128853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:46.129477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:46.129543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:46.129716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:46.129746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:46.129775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:46.129834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:46.129842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:46.129874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:46.129887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:46.130370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:46.130379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:46.130426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:46.130431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:45:46.130501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:46.130507Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:46.130516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:46.130519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:46.130523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:46.130527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:46.130530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:46.130532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:46.130555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:46.130561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:46.130565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:45:46.130822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:46.130835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:46.130839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:45:46.130844Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:45:46.130849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:46.130862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... shardId: 72075186233409574 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\201I\3174\000\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409575 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409576 } TableStats { DataSize: 119380 RowCount: 1000 IndexSize: 306 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 16 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 119380 IndexSize: 306 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 879 Memory: 1465360 Network: 0 Storage: 121185 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 119686 DataSize: 119380 IndexSize: 306 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 119686 DataSize: 119380 IndexSize: 306 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:46:22.941148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:22.941160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710672:0 ProgressState, operation type: TxSplitTablePartition, at tablet72057594046678944 2024-11-21T17:46:22.941195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710672:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 32 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:46:22.941208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710672:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 33 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:46:22.942299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:32 msg type: 268697601 2024-11-21T17:46:22.942363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:33 msg type: 268697601 2024-11-21T17:46:22.942378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710672, partId: 0, tablet: 72057594037968897 2024-11-21T17:46:22.942383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710672, shardIdx: 72057594046678944:32, partId: 0 2024-11-21T17:46:22.942386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710672, shardIdx: 72057594046678944:33, partId: 0 2024-11-21T17:46:22.942482Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 32 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:46:22.942538Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 32, type DataShard, boot OK, tablet id 72075186233409577 2024-11-21T17:46:22.942607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 32 TabletID: 72075186233409577 Origin: 72057594037968897 2024-11-21T17:46:22.942612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710672, shardIdx: 72057594046678944:32, partId: 0 2024-11-21T17:46:22.942626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 32 TabletID: 72075186233409577 Origin: 72057594037968897 2024-11-21T17:46:22.942633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710672:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:46:22.942636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710672:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 32 TabletID: 72075186233409577 Origin: 72057594037968897 2024-11-21T17:46:22.942741Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 33 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:46:22.942765Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 33, type DataShard, boot OK, tablet id 72075186233409578 2024-11-21T17:46:22.942793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 33 TabletID: 72075186233409578 Origin: 72057594037968897 2024-11-21T17:46:22.942796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710672, shardIdx: 72057594046678944:33, partId: 0 2024-11-21T17:46:22.942819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 33 TabletID: 72075186233409578 Origin: 72057594037968897 2024-11-21T17:46:22.942824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710672:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:46:22.942829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710672:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 33 TabletID: 72075186233409578 Origin: 72057594037968897 2024-11-21T17:46:22.942839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710672:0 2 -> 3 2024-11-21T17:46:22.943842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:22.944004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:22.944057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:22.944062Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:22.944080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186233409577 splitOp: 281474976710672:0 alterVersion: 2 at tablet: 72057594046678944 2024-11-21T17:46:22.944107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186233409578 splitOp: 281474976710672:0 alterVersion: 2 at tablet: 72057594046678944 2024-11-21T17:46:22.945111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72075186233409577 cookie: 72057594046678944:32 msg type: 269553152 2024-11-21T17:46:22.945167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72075186233409578 cookie: 72057594046678944:33 msg type: 269553152 2024-11-21T17:46:22.945190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710672, partId: 0, tablet: 72075186233409577 2024-11-21T17:46:22.945194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710672, partId: 0, tablet: 72075186233409578 2024-11-21T17:46:22.954571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710672 TabletId: 72075186233409577 2024-11-21T17:46:22.954595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId#281474976710672:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710672:0, at schemeshard: 72057594046678944 message# OperationCookie: 281474976710672 TabletId: 72075186233409577 2024-11-21T17:46:22.955616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:22.955735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710672 TabletId: 72075186233409578 2024-11-21T17:46:22.955743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId#281474976710672:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710672:0, at schemeshard: 72057594046678944 message# OperationCookie: 281474976710672 TabletId: 72075186233409578 2024-11-21T17:46:22.955756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710672:0 3 -> 131 2024-11-21T17:46:22.956327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:22.956388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:22.956393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#281474976710672:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:46:22.956398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TSplitMerge TTransferData operationId#281474976710672:0 Starting split on src datashard 72075186233409576 splitOpId# 281474976710672:0 at tablet 72057594046678944 2024-11-21T17:46:22.957090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72075186233409576 cookie: 72057594046678944:31 msg type: 269553154 2024-11-21T17:46:22.957125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710672, partId: 0, tablet: 72075186233409576 >> KqpOlapAggregations::Json_Exists [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2024-11-21T17:46:23.103236Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T17:46:23.103325Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T17:46:23.103443Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T17:46:23.103805Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.103911Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T17:46:23.105771Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.105793Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.105802Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.105815Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T17:46:23.105848Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.105864Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T17:46:23.105883Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T17:46:23.105988Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#281474976710656 2024-11-21T17:46:23.106026Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2024-11-21T17:46:23.106512Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2024-11-21T17:46:23.106568Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2106] requested range size#123456 2024-11-21T17:46:23.106641Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.106648Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.106659Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2024-11-21T17:46:23.106663Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2106] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2024-11-21T17:46:23.106698Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2110] requested range size#281474976587200 2024-11-21T17:46:23.106717Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2024-11-21T17:46:23.106720Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:76:2110] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2024-11-21T17:46:23.106749Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:79:2113] requested range size#246912 2024-11-21T17:46:23.106776Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.106781Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.106790Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2024-11-21T17:46:23.106793Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:79:2113] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2024-11-21T17:46:23.106825Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2117] requested range size#281474976340288 2024-11-21T17:46:23.106831Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2024-11-21T17:46:23.106834Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:83:2117] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> KqpOlap::MetadataMemoryManager [GOOD] >> test.py::test[window-win_by_all_percentile_interval-default.txt-Debug] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_Exists [GOOD] Test command err: Trying to start YDB, gRPC: 62029, MsgBus: 11184 2024-11-21T17:46:22.764153Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790378297458662:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:22.764172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dfe/r3tmp/tmpfVtqba/pdisk_1.dat 2024-11-21T17:46:22.808434Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62029, node 1 2024-11-21T17:46:22.819417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:22.819432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:22.819433Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:22.819471Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11184 TClient is connected to server localhost:11184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:22.864168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:22.864186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:22.864713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:22.865261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:46:22.876089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:22.885289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378297459291:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:22.885356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378297459291:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:22.885385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378297459291:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:22.885407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378297459291:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:22.885419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378297459291:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:22.885436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378297459291:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:22.885453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378297459291:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:22.885477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378297459291:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:22.885494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378297459291:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:22.885508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378297459291:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.885522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378297459291:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:22.885541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378297459291:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:22.887675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378297459294:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:22.887700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378297459294:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:22.887722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378297459294:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:22.887735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378297459294:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:22.887745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378297459294:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:22.887760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378297459294:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:22.887773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378297459294:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:22.887789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378297459294:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:22.887807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378297459294:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:22.887821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378297459294:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.887832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378297459294:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:22.887845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378297459294:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:22.888137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:22.888147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:22.888155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:22.888164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:22.888178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:22.888185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:22.888191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:22.888198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:22.888203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:22.888205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:22.888209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... rmalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:22.894012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:22.894017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:22.894020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:22.894023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:22.894025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:22.894040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:22.894046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:22.894054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:22.894060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.894065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:22.894067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:22.894075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:22.894081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:22.894086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:22.894088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsonval, "$.col1") AND level = 1; 2024-11-21T17:46:23.030796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790382592426881:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:23.030824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:23.030860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790382592426893:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:23.031637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:23.033421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790382592426895:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:23.173661Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211183087, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsonval, "$.col1") AND level = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"level == 1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","jsondoc","jsonval","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Int32":1},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":3},{"Id":10}]},"Column":{"Id":11}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":3,"FunctionType":2,"Arguments":[{"Id":11},{"Id":12}]},"Column":{"Id":13}}},{"Assign":{"Function":{"YqlOperationId":0,"KernelIdx":4,"FunctionType":2,"Arguments":[{"Id":9},{"Id":13}]},"Column":{"Id":14}}},{"Filter":{"Predicate":{"Id":14}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","jsondoc","jsonval","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Int32":1},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":3},{"Id":10}]},"Column":{"Id":11}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":3,"FunctionType":2,"Arguments":[{"Id":11},{"Id":12}]},"Column":{"Id":13}}},{"Assign":{"Function":{"YqlOperationId":0,"KernelIdx":4,"FunctionType":2,"Arguments":[{"Id":9},{"Id":13}]},"Column":{"Id":14}}},{"Filter":{"Predicate":{"Id":14}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 1","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Int32)) (let $2 '('"id" $1)) (let $3 (OptionalType (DataType 'Bool))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $9 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $10 '('"id" '"jsondoc" '"jsonval" '"level")) (let $11 (OptionalType (DataType 'JsonDocument))) (let $12 '($3)) (let $13 (ResourceType '"JsonNode")) (let $14 (OptionalType $13)) (let $15 '((ResourceType '"JsonPath"))) (let $16 (DataType 'Utf8)) (let $17 (DictType $16 $13)) (let $18 '($17)) (let $19 (CallableType '() $12 '($14) $15 $18 $12)) (let $20 '('('"strict"))) (let $21 (Udf '"Json2.SqlExists" (Void) (VoidType) '"" $19 (VoidType) '"" $20)) (let $22 (lambda '($41) (block '( (let $42 '((DataType 'Json) '"" '1)) (let $43 (CallableType '() '($13) $42)) (let $44 (Udf '"Json2.Parse" (Void) (VoidType) '"" $43 (VoidType) '"" '())) (return (Just (Apply $44 $41))) )))) (let $23 (Nothing $14)) (let $24 (CallableType '() $15 '($16))) (let $25 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $24 (VoidType) '"" '())) (let $26 (Apply $25 (Utf8 '"$.col1"))) (let $27 (Dict $17)) (let $28 (Bool 'false)) (let $29 (Just $28)) (let $30 (KqpWideReadOlapTableRanges $9 (Void) $10 '() '() (lambda '($31) (block '( (let $32 (DataType 'Json)) (let $33 (StructType $2 '('"jsondoc" $11) '('"jsonval" (OptionalType $32)) '('"level" (OptionalType $1)))) (let $34 (KqpOlapApply $33 '('"jsonval") (lambda '($39) (block '( (let $40 (IfPresent $39 $22 $23)) (return (Apply $21 $40 $26 $27 $29)) ))))) (let $35 '('?? $34 $28)) (let $36 '('eq '"level" (Int32 '1))) (let $37 '('?? $36 $28)) (let $38 '('"id" '"jsondoc" '"jsonval")) (return (TKqpOlapExtractMembers (KqpOlapFilter $31 (KqpOlapAnd $35 $37)) $38)) ))))) (return (FromFlow (NarrowMap $30 (lambda '($45 $46 $47) (block '( (let $48 (IfPresent $47 $22 $23)) (let $49 (Apply $21 $48 $26 $27 $29)) (let $50 (CallableType '() $12 '($11) $15 $18 $12)) (let $51 (Udf '"Json2.JsonDocumentSqlExists" (Void) (VoidType) '"" $50 (VoidType) '"" $20)) (let $52 (Apply $51 $46 $26 $27 $29)) (return (AsStruct '('"column1" $49) '('"column2" $52) '('"id" $45))) )))))) ))) '('('"_logical_id" '893) '('"_id" '"3c88f708-c1f1f326-109c9e42-b2aa8628")))) (let $5 (DqCnUnionAll (TDqOutput $4 '"0"))) (let $6 (DqPhyStage '($5) (lambda '($53) $53) '('('"_logical_id" '1415) '('"_id" '"cd8854e5-1ef5dc3d-50a7f917-76179689")))) (let $7 '('"id" '"column1" '"column2")) (let $8 (DqCnResult (TDqOutput $6 '"0") $7)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($4 $6) '($8) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"column1" $3) '('"column2" $3) $2)) '"0" '"0")) '('('"type" '"scan_query")))) ) >> test.py::test[window-win_by_all_percentile_interval-default.txt-Plan] [GOOD] >> test.py::test[window-win_by_all_percentile_interval-default.txt-Results] >> KqpOlap::ScanFailedSnapshotTooOld [GOOD] >> TTxLocatorTest::TestZeroRange >> test.py::test[blocks-distinct_mixed_keys--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_all--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::MetadataMemoryManager [GOOD] Test command err: Trying to start YDB, gRPC: 23208, MsgBus: 28202 2024-11-21T17:46:22.789135Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790380038008381:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:22.789381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dfa/r3tmp/tmpPQc4F9/pdisk_1.dat 2024-11-21T17:46:22.833396Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23208, node 1 2024-11-21T17:46:22.846015Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:22.846029Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:22.846030Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:22.846065Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28202 TClient is connected to server localhost:28202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:22.889655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:22.889681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:22.890790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:22.914659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:22.921007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:22.932567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790380038009034:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:22.932626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790380038009034:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:22.932667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790380038009034:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:22.932694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790380038009034:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:22.932711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790380038009034:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:22.932735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790380038009034:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:22.932757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790380038009034:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:22.932778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790380038009034:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:22.932800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790380038009034:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:22.932821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790380038009034:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.932845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790380038009034:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:22.932870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790380038009034:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:22.933397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:22.933410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:22.933422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:22.933431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:22.933447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:22.933459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:22.933474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:22.933484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:22.933499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:22.933503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:22.933509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:22.933513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:22.933579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:22.933593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:22.933611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:22.933615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.933635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:22.933644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:22.933661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:22.933670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:22.933686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:22.933694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:22.936365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790380038009033:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:22.936386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790380038009033:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:22.936421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790380038009033:2288];tablet_id=7207518622 ... nks;id=Chunks; 2024-11-21T17:46:22.944272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:22.944276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:22.944306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:22.944315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:22.944326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:22.944336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:22.944350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:22.944361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:22.944368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:22.944372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:22.944420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:22.944431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:22.944449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:22.944456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.944467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:22.944475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:22.944492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:22.944500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:22.944512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:22.944519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:22.944719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:22.944729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:22.944738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:22.944746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:22.944767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:22.944776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:22.944783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:22.944792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:22.944806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:22.944814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:22.944820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:22.944829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:22.944866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:22.944875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:22.944890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:22.944898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.944913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:22.944922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:22.944941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:22.944950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:22.944966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:22.944974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:22.978160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2024-11-21T17:46:23.151685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790384332976682:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:23.151685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790384332976674:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:23.151703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:23.152375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:23.153798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790384332976689:2402], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:46:23.366410Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211183206, txId: 18446744073709551615] shutting down 2024-11-21T17:46:23.370470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:23.469824Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211183423, txId: 18446744073709551615] shutting down >> TTxLocatorTest::TestZeroRange [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ScanFailedSnapshotTooOld [GOOD] Test command err: Trying to start YDB, gRPC: 8445, MsgBus: 18907 2024-11-21T17:46:13.269491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790341681987632:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:13.269716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e5a/r3tmp/tmpWFWYSU/pdisk_1.dat 2024-11-21T17:46:13.313010Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8445, node 1 2024-11-21T17:46:13.324522Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:13.324535Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:13.324536Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:13.324568Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18907 TClient is connected to server localhost:18907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:13.370745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:13.370778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:13.371915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:13.395199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:13.397024Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/cnt` (key Int32 NOT NULL, c Int32, PRIMARY KEY (key)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:46:13.518538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790341681988242:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.518558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:13.540831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:13.546847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341681988318:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.546879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341681988318:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.546907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341681988318:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:13.546922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341681988318:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:13.546935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341681988318:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:13.546948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341681988318:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:13.546962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341681988318:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:13.546977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341681988318:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:13.546991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341681988318:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:13.547008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341681988318:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.547021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341681988318:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:13.547035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341681988318:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:13.547347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:13.547360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:13.547371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:13.547380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:13.547407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:13.547415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:13.547422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:13.547430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:13.547434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:13.547441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:13.547444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:13.547450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:13.547491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:13.547509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:13.547527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:13.547535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.547546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:13.547552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:13.547561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:13.547568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:13.547581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:13.547588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:18.269897Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790341681987632:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:18.269936Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:23.599495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790384631661567:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:23.599522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:23.599568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790384631661572:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:23.600327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:23.602026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790384631661574:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:23.768491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=interaction.h:353;batch=key: [ 1 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715661}],"starts":[{"inc":{"count_not_include":1},"id":281474976715661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":1},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"1;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715661}]},"p":{"include":2147483647}}]}; >> TTxLocatorTest::Boot >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx >> TTxLocatorTest::TestAllocateAllByPieces ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2024-11-21T17:46:23.988982Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T17:46:23.989067Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T17:46:23.989184Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T17:46:23.989527Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.989612Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T17:46:23.991173Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.991191Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.991199Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.991212Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T17:46:23.991237Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.991249Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T17:46:23.991260Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T17:46:23.991356Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#0 2024-11-21T17:46:23.991425Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.991431Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:23.991441Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2024-11-21T17:46:23.991445Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 0 expected SUCCESS >> TTxLocatorTest::Boot [GOOD] >> TExternalTableTestReboots::ParallelCreateDrop [GOOD] >> TFlatTableLongTxLarge::LargeDeltaChain [GOOD] >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] >> TTxLocatorTest::TestWithReboot >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] |75.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> test.py::test[aggregate-group_by_mul_gs_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2024-11-21T17:46:24.388886Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T17:46:24.388982Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T17:46:24.389105Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T17:46:24.389488Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.389605Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T17:46:24.391155Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391169Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391176Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391189Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T17:46:24.391219Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391234Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T17:46:24.391251Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T17:46:24.391414Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2024-11-21T17:46:24.391508Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2024-11-21T17:46:24.391553Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2024-11-21T17:46:24.391592Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2024-11-21T17:46:24.391617Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#100000 2024-11-21T17:46:24.391646Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391662Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2024-11-21T17:46:24.391678Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391708Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391724Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391734Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2024-11-21T17:46:24.391753Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391762Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2024-11-21T17:46:24.391788Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391800Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2024-11-21T17:46:24.391816Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391828Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391845Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391854Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2024-11-21T17:46:24.391870Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391883Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2024-11-21T17:46:24.391887Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2024-11-21T17:46:24.391901Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391910Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2024-11-21T17:46:24.391914Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2024-11-21T17:46:24.391924Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2024-11-21T17:46:24.391928Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2024-11-21T17:46:24.391937Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391944Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391951Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2024-11-21T17:46:24.391954Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2024-11-21T17:46:24.391965Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391973Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2024-11-21T17:46:24.391976Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 400000 to# 500000 2024-11-21T17:46:24.391983Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391988Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.391992Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2024-11-21T17:46:24.391994Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 500000 to# 600000 2024-11-21T17:46:24.392001Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2024-11-21T17:46:24.392003Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2024-11-21T17:46:24.392010Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.392015Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.392019Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2024-11-21T17:46:24.392021Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2024-11-21T17:46:24.392029Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.392032Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2024-11-21T17:46:24.392034Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2024-11-21T17:46:24.392041Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.392045Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2024-11-21T17:46:24.392047Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2024-11-21T17:46:24.392741Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:109:2143] requested range size#100000 2024-11-21T17:46:24.392835Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:111:2145] requested range size#100000 2024-11-21T17:46:24.392851Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:113:2147] requested range size#100000 2024-11-21T17:46:24.392868Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.392885Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.392915Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:115:2149] requested range size#100000 2024-11-21T17:46:24.392942Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:117:2151] requested range size#100000 2024-11-21T17:46:24.392966Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.392972Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.392988Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:15:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.392998Z node 1 :TX_ALLOCATOR DEBUG: tablet# ... from# 8200000 Reserved to# 8300000 2024-11-21T17:46:24.398782Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:393:2427] TEvAllocateResult from# 8200000 to# 8300000 2024-11-21T17:46:24.398794Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.398803Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8300000 Reserved to# 8400000 2024-11-21T17:46:24.398807Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:395:2429] TEvAllocateResult from# 8300000 to# 8400000 2024-11-21T17:46:24.398816Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.398824Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8400000 Reserved to# 8500000 2024-11-21T17:46:24.398828Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:397:2431] TEvAllocateResult from# 8400000 to# 8500000 2024-11-21T17:46:24.398836Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.398844Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8500000 Reserved to# 8600000 2024-11-21T17:46:24.398849Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:399:2433] TEvAllocateResult from# 8500000 to# 8600000 2024-11-21T17:46:24.398860Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2024-11-21T17:46:24.398864Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:401:2435] TEvAllocateResult from# 8600000 to# 8700000 2024-11-21T17:46:24.398874Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.398882Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.398889Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2024-11-21T17:46:24.398892Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:403:2437] TEvAllocateResult from# 8700000 to# 8800000 2024-11-21T17:46:24.398903Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2024-11-21T17:46:24.398906Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:405:2439] TEvAllocateResult from# 8800000 to# 8900000 2024-11-21T17:46:24.398913Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2024-11-21T17:46:24.398915Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:407:2441] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2024-11-21T17:46:24.399167Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:429:2463] requested range size#100000 2024-11-21T17:46:24.412714Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:431:2465] requested range size#100000 2024-11-21T17:46:24.412938Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:433:2467] requested range size#100000 2024-11-21T17:46:24.412972Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413018Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413043Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:435:2469] requested range size#100000 2024-11-21T17:46:24.413088Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413112Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:437:2471] requested range size#100000 2024-11-21T17:46:24.413126Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413146Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413163Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:439:2473] requested range size#100000 2024-11-21T17:46:24.413175Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413206Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:441:2475] requested range size#100000 2024-11-21T17:46:24.413231Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413247Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:443:2477] requested range size#100000 2024-11-21T17:46:24.413260Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413266Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413281Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413295Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:445:2479] requested range size#100000 2024-11-21T17:46:24.413322Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413355Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413383Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2024-11-21T17:46:24.413388Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:429:2463] TEvAllocateResult from# 9000000 to# 9100000 2024-11-21T17:46:24.413405Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413420Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:447:2481] requested range size#100000 2024-11-21T17:46:24.413435Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413448Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2024-11-21T17:46:24.413452Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:431:2465] TEvAllocateResult from# 9100000 to# 9200000 2024-11-21T17:46:24.413459Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413471Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2024-11-21T17:46:24.413474Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:433:2467] TEvAllocateResult from# 9200000 to# 9300000 2024-11-21T17:46:24.413496Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2024-11-21T17:46:24.413499Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:435:2469] TEvAllocateResult from# 9300000 to# 9400000 2024-11-21T17:46:24.413506Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413525Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2024-11-21T17:46:24.413529Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:437:2471] TEvAllocateResult from# 9400000 to# 9500000 2024-11-21T17:46:24.413537Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413546Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2024-11-21T17:46:24.413550Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:439:2473] TEvAllocateResult from# 9500000 to# 9600000 2024-11-21T17:46:24.413556Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413567Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413579Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2024-11-21T17:46:24.413582Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:441:2475] TEvAllocateResult from# 9600000 to# 9700000 2024-11-21T17:46:24.413594Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2024-11-21T17:46:24.413597Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:443:2477] TEvAllocateResult from# 9700000 to# 9800000 2024-11-21T17:46:24.413604Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.413615Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2024-11-21T17:46:24.413619Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:445:2479] TEvAllocateResult from# 9800000 to# 9900000 2024-11-21T17:46:24.413629Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2024-11-21T17:46:24.413633Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:447:2481] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS >> KqpOlapStatistics::StatsUsageNotPK >> TTxLocatorTest::TestWithReboot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2024-11-21T17:46:24.437000Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T17:46:24.437095Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T17:46:24.437218Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T17:46:24.437614Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.437722Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T17:46:24.440309Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.440327Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.440334Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.440344Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T17:46:24.440367Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.440380Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T17:46:24.440393Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2024-11-21T17:46:24.476577Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T17:46:24.476646Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T17:46:24.476731Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T17:46:24.476990Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.477076Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T17:46:24.479146Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479170Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479181Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479196Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T17:46:24.479227Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479245Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T17:46:24.479260Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T17:46:24.479339Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#8796093022207 2024-11-21T17:46:24.479411Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479416Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479422Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2024-11-21T17:46:24.479425Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2024-11-21T17:46:24.479795Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2107] requested range size#8796093022207 2024-11-21T17:46:24.479832Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479838Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479845Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2024-11-21T17:46:24.479847Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:73:2107] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2024-11-21T17:46:24.479872Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#8796093022207 2024-11-21T17:46:24.479893Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479897Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479903Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2024-11-21T17:46:24.479906Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2024-11-21T17:46:24.479927Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#8796093022207 2024-11-21T17:46:24.479943Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479947Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479952Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2024-11-21T17:46:24.479955Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2024-11-21T17:46:24.479975Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2119] requested range size#8796093022207 2024-11-21T17:46:24.479992Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.479996Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480001Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2024-11-21T17:46:24.480003Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:85:2119] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2024-11-21T17:46:24.480026Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:89:2123] requested range size#8796093022207 2024-11-21T17:46:24.480040Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480044Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480049Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2024-11-21T17:46:24.480051Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:89:2123] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2024-11-21T17:46:24.480073Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:93:2127] requested range size#8796093022207 2024-11-21T17:46:24.480088Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480092Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480096Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2024-11-21T17:46:24.480098Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:93:2127] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2024-11-21T17:46:24.480128Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:97:2131] requested range size#8796093022207 2024-11-21T17:46:24.480146Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480151Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480156Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2024-11-21T17:46:24.480158Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:97:2131] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2024-11-21T17:46:24.480186Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:101:2135] requested range size#8796093022207 2024-11-21T17:46:24.480204Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480209Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480214Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2024-11-21T17:46:24.480216Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:101:2135] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2024-11-21T17:46:24.480267Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:105:2139] requested range size#8796093022207 2024-11-21T17:46:24.480287Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480292Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480298Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 79164837199863 Reserved to# 87960930222070 2024-11-21T17:46:24.480300Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:105:2139] TEvAllocateResult from# 79164837199863 to# 87960930222070 expected SUCCESS 2024-11-21T17:46:24.480324Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:109:2143] requested range size#8796093022207 2024-11-21T17:46:24.480338Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480343Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480348Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 87960930222070 Reserved to# 96757023244277 2024-11-21T17:46:24.480350Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:109:2143] TEvAllocateResult from# 87960930222070 to# 96757023244277 expected SUCCESS 2024-11-21T17:46:24.480374Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:113:2147] requested range size#8796093022207 2024-11-21T17:46:24.480388Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.480393Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:69:0] Status# OK StatusFla ... e 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:149:2183] requested range size#8796093022207 2024-11-21T17:46:24.481007Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481012Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481016Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 175921860444140 Reserved to# 184717953466347 2024-11-21T17:46:24.481019Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:149:2183] TEvAllocateResult from# 175921860444140 to# 184717953466347 expected SUCCESS 2024-11-21T17:46:24.481049Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:153:2187] requested range size#8796093022207 2024-11-21T17:46:24.481064Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481068Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481073Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 184717953466347 Reserved to# 193514046488554 2024-11-21T17:46:24.481075Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:153:2187] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2024-11-21T17:46:24.481103Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:157:2191] requested range size#8796093022207 2024-11-21T17:46:24.481118Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481123Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481128Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2024-11-21T17:46:24.481130Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:157:2191] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2024-11-21T17:46:24.481162Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:161:2195] requested range size#8796093022207 2024-11-21T17:46:24.481177Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481181Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481186Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2024-11-21T17:46:24.481188Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:161:2195] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2024-11-21T17:46:24.481218Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:165:2199] requested range size#8796093022207 2024-11-21T17:46:24.481233Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481237Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481242Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2024-11-21T17:46:24.481244Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:165:2199] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2024-11-21T17:46:24.481275Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:169:2203] requested range size#8796093022207 2024-11-21T17:46:24.481288Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481294Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481300Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2024-11-21T17:46:24.481302Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:169:2203] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2024-11-21T17:46:24.481348Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:173:2207] requested range size#8796093022207 2024-11-21T17:46:24.481368Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481372Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481377Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2024-11-21T17:46:24.481380Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:173:2207] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2024-11-21T17:46:24.481412Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:177:2211] requested range size#8796093022207 2024-11-21T17:46:24.481426Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481431Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481436Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2024-11-21T17:46:24.481438Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:177:2211] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2024-11-21T17:46:24.481472Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:181:2215] requested range size#8796093022207 2024-11-21T17:46:24.481495Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481501Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481508Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2024-11-21T17:46:24.481511Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:181:2215] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2024-11-21T17:46:24.481561Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:185:2219] requested range size#8796093022207 2024-11-21T17:46:24.481588Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481593Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481598Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2024-11-21T17:46:24.481601Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:185:2219] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2024-11-21T17:46:24.481638Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:189:2223] requested range size#8796093022207 2024-11-21T17:46:24.481657Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481661Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481665Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2024-11-21T17:46:24.481667Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:189:2223] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2024-11-21T17:46:24.481708Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:193:2227] requested range size#8796093022207 2024-11-21T17:46:24.481722Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481726Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481731Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2024-11-21T17:46:24.481733Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:193:2227] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2024-11-21T17:46:24.481767Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:197:2231] requested range size#31 2024-11-21T17:46:24.481781Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481786Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.481790Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2024-11-21T17:46:24.481792Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:197:2231] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2024-11-21T17:46:24.481826Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:201:2235] requested range size#1 2024-11-21T17:46:24.481831Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2024-11-21T17:46:24.481834Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:201:2235] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_large/unittest >> TFlatTableLongTxLarge::LargeDeltaChain [GOOD] Test command err: DataBytes = 1073746235 DataPages = 150237 FlatIndexBytes = 3155010 BTreeIndexBytes = 6810652 DataBytes = 1073742150 DataPages = 151523 FlatIndexBytes = 22252537 BTreeIndexBytes = 25927778 DataBytes = 1073753117 DataPages = 148879 FlatIndexBytes = 1072403884 BTreeIndexBytes = 1077128646 DataBytes = 1073744451 DataPages = 150676 FlatIndexBytes = 6479123 BTreeIndexBytes = 7437771 DataBytes = 1073743502 DataPages = 47351 FlatIndexBytes = 1643820 BTreeIndexBytes = 2065359 DataBytes = 1073744719 DataPages = 70000 FlatIndexBytes = 3454718 BTreeIndexBytes = 3553208 00000.000 II| FAKE_ENV: Born at 2024-11-21T17:44:35.499279Z 00000.007 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 ...compacting ...waiting until compacted ...compacting ...waiting until compacted 00108.657 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00108.665 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 263 reqs hit {0 0b} miss {265 17583755543b} 00108.665 II| FAKE_ENV: Shut order, stopping 4 BS groups 00108.665 II| FAKE_ENV: DS.0 gone, left {15399b, 2}, put {222278b, 1558} 00108.665 II| FAKE_ENV: DS.1 gone, left {4398138657b, 535}, put {8813752974b, 2851} 00109.102 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00109.102 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00109.102 II| FAKE_ENV: All BS storage groups are stopped 00109.102 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00109.107 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2024-11-21T17:46:24.672581Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T17:46:24.672674Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T17:46:24.672790Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T17:46:24.673139Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.673252Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T17:46:24.675129Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675152Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675162Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675176Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T17:46:24.675208Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675224Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T17:46:24.675242Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T17:46:24.675434Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2024-11-21T17:46:24.675538Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2024-11-21T17:46:24.675582Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2024-11-21T17:46:24.675624Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2024-11-21T17:46:24.675649Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#100000 2024-11-21T17:46:24.675677Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675691Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2024-11-21T17:46:24.675705Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675730Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675744Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675755Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2024-11-21T17:46:24.675770Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675780Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2024-11-21T17:46:24.675805Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675816Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2024-11-21T17:46:24.675831Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675842Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675859Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675867Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2024-11-21T17:46:24.675886Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675897Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2024-11-21T17:46:24.675902Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2024-11-21T17:46:24.675916Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675926Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2024-11-21T17:46:24.675929Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2024-11-21T17:46:24.675939Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2024-11-21T17:46:24.675943Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2024-11-21T17:46:24.675952Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675958Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675965Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2024-11-21T17:46:24.675968Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2024-11-21T17:46:24.675980Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.675988Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2024-11-21T17:46:24.675992Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 400000 to# 500000 2024-11-21T17:46:24.676002Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.676010Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.676016Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2024-11-21T17:46:24.676019Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 500000 to# 600000 2024-11-21T17:46:24.676030Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2024-11-21T17:46:24.676033Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2024-11-21T17:46:24.676044Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.676052Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.676059Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2024-11-21T17:46:24.676062Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2024-11-21T17:46:24.676074Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.676078Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2024-11-21T17:46:24.676081Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2024-11-21T17:46:24.676092Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.676098Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2024-11-21T17:46:24.676102Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2024-11-21T17:46:24.676824Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 2 Marker# TSYS31 2024-11-21T17:46:24.677035Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 2 Promote Marker# TSYS16 2024-11-21T17:46:24.677137Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:2:12:0:0:71:0] Snap: 2:1 for 72057594046447617 Marker# TRRH04 2024-11-21T17:46:24.677149Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:12:0:0:71:0], refs: [[72057594046447617:2:12:1:24576:76:0],] for 72057594046447617 2024-11-21T17:46:24.677184Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:1:0:0:42:0], refs: [[72057594046447617:2:1:1:28672:35:0],] for 72057594046447617 2024-11-21T17:46:24.677191Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:2:0:0:71:0], refs: [[72057594046447617:2:2:1:8192:71:0],] for 72057594046447617 2024-11-21T17:46:24.677196Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:3:0:0:69:0], refs: [[72057594046447617:2:3:1:24576:70:0],] for 72057594046447617 2024-11-21T17:46:24.677200Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:4:0:0:71:0], refs: [[72057594046447617:2:4:1:24576:76:0],] for 720575940 ... IN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.841434Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2024-11-21T17:46:24.841436Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:615:2546] TEvAllocateResult from# 9100000 to# 9200000 2024-11-21T17:46:24.841441Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2024-11-21T17:46:24.841443Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:617:2548] TEvAllocateResult from# 9200000 to# 9300000 2024-11-21T17:46:24.841448Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.841455Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2024-11-21T17:46:24.841457Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:619:2550] TEvAllocateResult from# 9300000 to# 9400000 2024-11-21T17:46:24.841465Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.841476Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2024-11-21T17:46:24.841479Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:621:2552] TEvAllocateResult from# 9400000 to# 9500000 2024-11-21T17:46:24.841490Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.841494Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2024-11-21T17:46:24.841496Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:623:2554] TEvAllocateResult from# 9500000 to# 9600000 2024-11-21T17:46:24.841502Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.841510Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2024-11-21T17:46:24.841512Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:625:2556] TEvAllocateResult from# 9600000 to# 9700000 2024-11-21T17:46:24.841518Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.841523Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.841530Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2024-11-21T17:46:24.841532Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:627:2558] TEvAllocateResult from# 9700000 to# 9800000 2024-11-21T17:46:24.841540Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.841546Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.841551Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2024-11-21T17:46:24.841553Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:629:2560] TEvAllocateResult from# 9800000 to# 9900000 2024-11-21T17:46:24.841560Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.841568Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.841574Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2024-11-21T17:46:24.841578Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:631:2562] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2024-11-21T17:46:24.841879Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2024-11-21T17:46:24.842122Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2024-11-21T17:46:24.842225Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2024-11-21T17:46:24.842233Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2024-11-21T17:46:24.842256Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2024-11-21T17:46:24.842262Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2024-11-21T17:46:24.842268Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2024-11-21T17:46:24.842273Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2024-11-21T17:46:24.842285Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2024-11-21T17:46:24.842291Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2024-11-21T17:46:24.842297Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2024-11-21T17:46:24.842304Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2024-11-21T17:46:24.842309Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2024-11-21T17:46:24.842315Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2024-11-21T17:46:24.842344Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2024-11-21T17:46:24.842350Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2024-11-21T17:46:24.842354Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2024-11-21T17:46:24.842356Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2024-11-21T17:46:24.842358Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2024-11-21T17:46:24.842362Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:5:1:24576:78:0],] 2024-11-21T17:46:24.842365Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:6:1:24576:78:0],] 2024-11-21T17:46:24.842367Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2024-11-21T17:46:24.842369Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2024-11-21T17:46:24.842372Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2024-11-21T17:46:24.842374Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2024-11-21T17:46:24.842377Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2024-11-21T17:46:24.842408Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2024-11-21T17:46:24.842567Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.842928Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T17:46:24.842954Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T17:46:24.843024Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T17:46:24.843031Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1639:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.843041Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T17:46:24.843048Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 >> KqpOlap::OlapRead_ScanQuery >> test.py::test[window-win_by_all_percentile_interval-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::ParallelCreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:46:08.018515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:46:08.018533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:46:08.018536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:46:08.018540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:46:08.018544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:46:08.018547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:46:08.018553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:46:08.018610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:08.026612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:46:08.026630Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:46:08.028162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:08.028225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:46:08.028270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:46:08.030239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:46:08.030283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:46:08.030352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:46:08.030501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:46:08.030982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:46:08.031144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:46:08.031150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:46:08.031158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:46:08.031162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:46:08.031166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:46:08.031193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:46:08.032648Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:46:08.044981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:46:08.045060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:08.045117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:46:08.045157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:46:08.045163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:08.045738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:46:08.045758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:46:08.045796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:08.045803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:46:08.045806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:46:08.045809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:46:08.046272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:08.046281Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:46:08.046284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:46:08.046557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:08.046563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:08.046567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:46:08.046571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:46:08.046985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:46:08.047310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:46:08.047342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:46:08.047477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:46:08.047493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:46:08.047498Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:46:08.047550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:46:08.047557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:46:08.047577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:46:08.047586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:46:08.047896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:46:08.047905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:46:08.047930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:46:08.047935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:46:08.047983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:46:08.047987Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:46:08.047995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:46:08.047997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:46:08.048001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:46:08.048004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:46:08.048006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:46:08.048009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:46:08.048015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:46:08.048019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:46:08.048021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 69090816 2024-11-21T17:46:24.454379Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2024-11-21T17:46:24.454478Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:46:24.454498Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 287762810984 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:46:24.454507Z node 67 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000005 2024-11-21T17:46:24.454547Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:46:24.454564Z node 67 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2024-11-21T17:46:24.454588Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:46:24.454598Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:46:24.454605Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:46:24.454737Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:46:24.454962Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T17:46:24.455271Z node 67 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:46:24.455278Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:46:24.455305Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:46:24.455322Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:46:24.455339Z node 67 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:46:24.455343Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:202:2205], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T17:46:24.455347Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:202:2205], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T17:46:24.455351Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:202:2205], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T17:46:24.455398Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:46:24.455403Z node 67 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T17:46:24.455416Z node 67 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:46:24.455420Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:46:24.455426Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T17:46:24.455431Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:46:24.455436Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:46:24.455440Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:46:24.455450Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:46:24.455454Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:46:24.455459Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2024-11-21T17:46:24.455463Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:46:24.455466Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T17:46:24.455469Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:46:24.455539Z node 67 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:24.455548Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:24.455552Z node 67 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:46:24.455556Z node 67 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:46:24.455560Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:46:24.455614Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:46:24.455619Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:46:24.455629Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:46:24.455693Z node 67 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:24.455700Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:24.455703Z node 67 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:46:24.455707Z node 67 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:46:24.455710Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:46:24.455800Z node 67 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:24.455808Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:46:24.455812Z node 67 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:46:24.455815Z node 67 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:46:24.455818Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:46:24.455826Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T17:46:24.456427Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:46:24.456471Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:46:24.456514Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:46:24.456680Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:46:24.456738Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:46:24.456745Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:46:24.456875Z node 67 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:46:24.456891Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:46:24.456895Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [67:389:2381] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:46:24.456964Z node 67 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DropMe" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:46:24.456993Z node 67 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DropMe" took 39us result status StatusPathDoesNotExist 2024-11-21T17:46:24.457028Z node 67 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DropMe\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DropMe" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[window-win_func_into_udf--Debug] >> KqpOlapStatistics::StatsUsageNotPK [GOOD] >> KqpOlapBlobsSharing::SplitEmpty >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter >> KqpOlapAggregations::Aggregation_MaxR_GroupL_OrderL >> KqpOlapBlobsSharing::BlobsSharingSplit3_1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStatistics::StatsUsageNotPK [GOOD] Test command err: Trying to start YDB, gRPC: 3112, MsgBus: 29921 2024-11-21T17:46:24.920555Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790388950716204:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:24.920572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000df3/r3tmp/tmpf3gC54/pdisk_1.dat 2024-11-21T17:46:24.967669Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3112, node 1 2024-11-21T17:46:24.979595Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:24.979604Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:24.979605Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:24.979635Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29921 TClient is connected to server localhost:29921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:25.021424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:25.021447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:25.022557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:25.022772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:25.035368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:25.046578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393245684153:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:25.046645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393245684153:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:25.046689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393245684153:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:25.046719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393245684153:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:25.046740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393245684153:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:25.046761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393245684153:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:25.046781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393245684153:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:25.046803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393245684153:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:25.046826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393245684153:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:25.046848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393245684153:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:25.046869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393245684153:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:25.046893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393245684153:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:25.050660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393245684154:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:25.050686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393245684154:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:25.050728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393245684154:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:25.050751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393245684154:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:25.050775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393245684154:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:25.050801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393245684154:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:25.050820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393245684154:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:25.050851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393245684154:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:25.050875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393245684154:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:25.050901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393245684154:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:25.050927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393245684154:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:25.050951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393245684154:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:25.054426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790393245684155:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:25.054448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790393245684155:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:25.054485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790393245684155:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:25.054507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790393245684155:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:25.054530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790393245684155:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:25.054552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790393245684155:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:25.054574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790393245684155:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:25.054601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790393245684155:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:25.054634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790393245684155 ... 186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:25.059529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:25.059532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:25.059540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:25.059548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:25.059554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:25.059562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:25.059568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:25.059576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:25.059598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:25.059606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:25.059620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:25.059628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:25.059639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:25.059646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:25.059660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:25.059668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:25.059677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:25.059684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:25.059776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:25.059784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:25.059802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:25.059810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:25.059822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:25.059830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:25.059838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:25.059846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:25.059852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:25.059860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:25.059865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:25.059868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:25.059889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:25.059897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:25.059911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:25.059919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:25.059929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:25.059938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:25.059951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:25.059959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:25.059967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:25.059975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:25.094069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T17:46:25.218088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790393245684447:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:25.218141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:25.225816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790393245684481:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:25.225838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:25.227975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T17:46:25.234268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790393245684528:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:25.234289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:25.236399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:25.247148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790393245684574:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:25.247168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |75.9%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_3_1 Test command err: Trying to start YDB, gRPC: 6929, MsgBus: 22764 2024-11-21T17:46:18.234705Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790359627152262:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:18.234846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e12/r3tmp/tmpYPU6t8/pdisk_1.dat 2024-11-21T17:46:18.290186Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6929, node 1 2024-11-21T17:46:18.302768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:18.302781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:18.302783Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:18.302816Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22764 2024-11-21T17:46:18.335405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:18.335430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:18.336708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:18.367516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:18.380331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:18.387390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:46:18.390757Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:46:18.390813Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T17:46:18.391512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:18.391582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:18.391640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:18.391667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:18.391689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:18.391719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:18.391746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:18.391770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:18.391802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:18.391824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:18.391842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:18.391865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790359627152915:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:18.392314Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2024-11-21T17:46:18.392336Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:46:18.392340Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:46:18.392358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:46:18.392387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:18.392403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:18.392406Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:46:18.392414Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:46:18.392421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:18.392426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:18.392431Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:46:18.392444Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:46:18.392451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:18.392455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:18.392460Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:46:18.392468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:46:18.392473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:18.392478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:18.392480Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:46:18.392489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:18.392493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:18.392495Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:46:18.392501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:18.392506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=Res ... RD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:23.408719Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:23.408734Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:23.408745Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:23.408749Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:23.408756Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:23.408759Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:23.408772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:23.408789Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211183000 at tablet 72075186224037891 2024-11-21T17:46:23.408797Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:23.408799Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:23.408807Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:23.408809Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:23.408815Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:23.408817Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:23.408820Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:23.408829Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790359627152914:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:23.409167Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2024-11-21T17:46:23.409180Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2024-11-21T17:46:23.409188Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:23.409192Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:23.409198Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:23.409204Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:23.409207Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:23.409210Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:23.409211Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:23.409214Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:23.409224Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:23.409234Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211183000 at tablet 72075186224037890 2024-11-21T17:46:23.409236Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:23.409239Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:23.409241Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:23.409244Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:23.409246Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:23.409247Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:23.409249Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:23.409253Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790359627152934:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:23.482948Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439790359627152915:2290];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:23.482974Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7439790359627152914:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:23.482978Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439790359627152934:2291];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:23.482981Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439790359627152913:2288];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:23.983189Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439790359627152915:2290];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:23.983198Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7439790359627152914:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:23.983224Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439790359627152934:2291];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:23.983228Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439790359627152913:2288];fline=actor.cpp:33;event=skip_flush_writing; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1266B1AA 1. /-S/util/system/yassert.cpp:55: Panic @ 0x12662916 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x13680663 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x12459D20 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:247: Execute_ @ 0x1245B8C4 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x1246A9F6 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x127C571D 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x1246A3B9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x127C5E92 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x127D90AC 10. ??:0: ?? @ 0x7F3631351D8F 11. ??:0: ?? @ 0x7F3631351E3F 12. ??:0: ?? @ 0x1179D028 >> KqpOlap::OlapRead_ScanQuery [GOOD] >> KqpOlapAggregations::Json_GetValue_Minus >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] >> KqpOlapAggregations::Aggregation_ResultCountAll_FilterL >> KqpOlapBlobsSharing::MultipleSplitsWithRestartsWhenWait >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> KqpOlap::PredicatePushdown ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapRead_ScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 64861, MsgBus: 13196 2024-11-21T17:46:25.193778Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790393344896476:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:25.193918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000df1/r3tmp/tmpNkT6w3/pdisk_1.dat 2024-11-21T17:46:25.238281Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64861, node 1 2024-11-21T17:46:25.250779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:25.250794Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:25.250796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:25.250830Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13196 TClient is connected to server localhost:13196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:25.294997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:25.295020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:25.296142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:25.326135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:25.337778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:25.346937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393344897117:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:25.347014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393344897117:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:25.347067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393344897117:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:25.347091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393344897117:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:25.347112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393344897117:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:25.347134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393344897117:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:25.347157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393344897117:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:25.347174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393344897117:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:25.347191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393344897117:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:25.347212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393344897117:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:25.347263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393344897117:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:25.347287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393344897117:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:25.350607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393344897121:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:25.350633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393344897121:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:25.350668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393344897121:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:25.350686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393344897121:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:25.350708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393344897121:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:25.350729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393344897121:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:25.350754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393344897121:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:25.350776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393344897121:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:25.350802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393344897121:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:25.350819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393344897121:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:25.350840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393344897121:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:25.350861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393344897121:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:25.354075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393344897118:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:25.354105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393344897118:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:25.354137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393344897118:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:25.354165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393344897118:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:25.354184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393344897118:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:25.354204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393344897118:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:25.354224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393344897118:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:25.354247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393344897118:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:25.354269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74397903933448971 ... :Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:25.384581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:25.384585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:25.384599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:25.384606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:25.384616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:25.384624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:25.384633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:25.384640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:25.384646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:25.384653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:25.384693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:25.384701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:25.384717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:25.384723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:25.384735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:25.384742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:25.384757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:25.384764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:25.384774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:25.384781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:25.384818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:25.384828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:25.384835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:25.384838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:25.384849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:25.384852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:25.384859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:25.384862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:25.384869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:25.384877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:25.384880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:25.384882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:25.384896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:25.384902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:25.384910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:25.384915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:25.384921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:25.384923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:25.384931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:25.384936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:25.384941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:25.384943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:25.426003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; 2024-11-21T17:46:25.472477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790393344897622:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:25.472538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:25.479716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790393344897640:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:25.479743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:25.479766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790393344897645:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:25.480496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:25.482008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790393344897647:2414], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:46:25.630619Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211185537, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_Limit2 >> KqpOlap::ManyColumnShardsFilterPushdownEmptySet >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> KqpOlapAggregations::Json_GetValue_Minus [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter >> KqpOlapIndexes::IndexesModificationError >> KqpOlapAggregations::BlockGenericWithDistinct ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2024-11-21T17:45:03.549989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:03.550162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:03.550166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:03.550170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:03.550182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:03.550185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:03.550193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:03.550976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:03.565679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:03.565704Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:03.574462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:03.574570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:03.574771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T17:45:03.576955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:03.577040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:03.577097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:03.577184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:03.584466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:03.585629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:03.585642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:03.585664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:03.585671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:03.585676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:03.585721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.734826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T17:45:03.735184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.735359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T17:45:03.735397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T17:45:03.735536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.749172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:03.749212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T17:45:03.749264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.749275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T17:45:03.749279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:03.749284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:03.751490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.751504Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T17:45:03.751509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:03.751994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.752003Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:03.752008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:03.752014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:03.760765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:03.761549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:03.761598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:45:03.761817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:03.761824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T17:45:03.761828Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:04.312141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:04.312658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 655 RawX2: 4294969525 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T17:45:04.312671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:04.313319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:04.313328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:04.313364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:04.313377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:04.316789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:04.316809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:04.316862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:04.316868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:664:2236], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T17:45:04.317537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:04.317547Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:04.317562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:04.317566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:04.317573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:04.317581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:04.317586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:04.317591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:04.317604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T17:45:04.317611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:45:04.317614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T17:45:04.319315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:04.319349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:04.319355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T17:45:04.319361Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T17:45:04.319368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:04.319392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T17:45:04.319398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:521:2138] 2024-11-21T ... 369766Z node 166 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:46:24.369795Z node 166 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:46:24.369800Z node 167 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:46:24.369826Z node 167 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:46:24.369830Z node 168 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:46:24.369856Z node 168 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:46:24.369860Z node 169 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:46:24.369886Z node 169 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:46:24.369896Z node 167 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:24.369922Z node 167 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:24.369936Z node 167 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:24.369940Z node 168 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:24.369943Z node 168 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:24.369950Z node 168 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:24.369954Z node 169 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:24.369958Z node 169 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:24.369964Z node 169 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:24.369968Z node 170 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:24.369971Z node 170 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:24.369980Z node 170 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:24.369995Z node 171 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:24.370000Z node 171 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:24.370009Z node 171 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:24.370016Z node 164 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:24.370020Z node 164 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:24.370027Z node 164 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:24.370030Z node 165 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:24.370034Z node 165 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:24.370041Z node 165 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:24.370045Z node 166 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:24.370048Z node 166 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:24.370114Z node 165 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:24.370119Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:24.370122Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:24.370140Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[165:957:2101] 2024-11-21T17:46:24.370146Z node 166 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:24.370191Z node 167 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:24.370195Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:24.370197Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:24.370203Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[167:959:2101] 2024-11-21T17:46:24.370237Z node 168 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:24.370241Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:24.370243Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:24.370247Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[168:961:2101] 2024-11-21T17:46:24.370280Z node 169 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:24.370286Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:24.370288Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:24.370293Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[169:963:2101] 2024-11-21T17:46:24.370323Z node 170 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:24.370326Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:24.370329Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:24.370334Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[170:965:2101] 2024-11-21T17:46:24.370366Z node 171 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:24.370369Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:24.370371Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:24.370375Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[171:967:2101] 2024-11-21T17:46:24.370408Z node 164 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:24.370411Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:24.370413Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:24.370418Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[164:969:2101] 2024-11-21T17:46:24.371677Z node 166 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:24.371692Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:24.371694Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:24.371702Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[166:971:2101] 2024-11-21T17:46:24.376465Z node 169 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:24.376489Z node 169 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [169:928:2098] 2024-11-21T17:46:24.376515Z node 170 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:24.376521Z node 170 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [170:929:2098] 2024-11-21T17:46:24.376537Z node 171 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:24.376543Z node 171 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [171:930:2098] 2024-11-21T17:46:24.376568Z node 164 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:24.376575Z node 164 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [164:923:2098] 2024-11-21T17:46:24.376591Z node 165 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:24.376597Z node 165 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [165:924:2098] 2024-11-21T17:46:24.376617Z node 166 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:24.376622Z node 166 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [166:925:2098] 2024-11-21T17:46:24.376647Z node 167 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:24.376654Z node 167 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [167:926:2098] 2024-11-21T17:46:24.376678Z node 168 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:24.376687Z node 168 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [168:927:2098] 2024-11-21T17:46:24.377282Z node 168 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[168:961:2101] 2024-11-21T17:46:24.377370Z node 169 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[169:963:2101] 2024-11-21T17:46:24.377424Z node 170 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[170:965:2101] 2024-11-21T17:46:24.377477Z node 164 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[164:969:2101] 2024-11-21T17:46:24.377483Z node 165 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[165:957:2101] 2024-11-21T17:46:24.377492Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:24.377498Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:24.377500Z node 168 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:24.377511Z node 171 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[171:967:2101] 2024-11-21T17:46:24.377530Z node 166 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[166:971:2101] 2024-11-21T17:46:24.377551Z node 167 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[167:959:2101] 2024-11-21T17:46:24.377572Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:24.377575Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:24.377576Z node 169 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:24.377629Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:24.377632Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:24.377633Z node 170 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:24.377644Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:24.377646Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:24.377648Z node 164 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:24.377659Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:24.377662Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:24.377664Z node 171 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:24.377673Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:24.377676Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:24.377677Z node 165 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:24.377693Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:24.377695Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:24.377697Z node 166 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:24.377767Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:24.377772Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:24.377774Z node 167 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_GetValue_Minus [GOOD] Test command err: Trying to start YDB, gRPC: 3455, MsgBus: 1429 2024-11-21T17:46:26.048527Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790394972712655:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:26.048796Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000de6/r3tmp/tmp7p5LMp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3455, node 1 2024-11-21T17:46:26.106460Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:26.110074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:26.110085Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:26.110086Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:26.110113Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1429 TClient is connected to server localhost:1429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:26.150026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:26.150062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:26.151283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:26.159041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:26.168624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:26.178901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790394972713305:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.178984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790394972713305:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:26.179032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790394972713305:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:26.179062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790394972713305:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:26.179089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790394972713305:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:26.179117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790394972713305:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:26.179144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790394972713305:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:26.179171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790394972713305:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:26.179206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790394972713305:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:26.179237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790394972713305:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:26.179265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790394972713305:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:26.179293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790394972713305:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:26.183198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790394972713299:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.183226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790394972713299:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:26.183267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790394972713299:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:26.183294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790394972713299:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:26.183320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790394972713299:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:26.183345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790394972713299:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:26.183371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790394972713299:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:26.183396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790394972713299:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:26.183424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790394972713299:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:26.183448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790394972713299:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:26.183472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790394972713299:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:26.183497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790394972713299:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:26.183970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:26.183990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:26.184003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:26.184008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:26.184025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:26.184036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:26.184048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:26.184053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:26.184070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:26.184080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:26.184087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;pr ... cute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:26.192819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:26.192822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:26.192843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:26.192847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:26.192860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:26.192864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:26.192884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:26.192888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:26.192901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:26.192904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:26.192913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:26.192915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.'col-abc'"), JSON_VALUE(jsondoc, "$.'col-abc'") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.'col-abc'") = "val-abc" AND id = 1; 2024-11-21T17:46:26.338262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790394972713578:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:26.338284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790394972713603:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:26.338294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:26.339118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:26.340868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790394972713607:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:26.515976Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211186391, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_VALUE(jsonval, "$.'col-abc'"), JSON_VALUE(jsondoc, "$.'col-abc'") FROM `/Root/tableWithNulls` WHERE JSON_VALUE(jsonval, "$.'col-abc'") = "val-abc" AND id = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"KqpOlapApply == val-abc","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val-abc"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"Filter-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval"],"scan_by":["id [1, 1]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [1, 1]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","jsondoc","jsonval"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Function":{"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5}]},"Column":{"Id":7}}},{"Assign":{"Constant":{"Bytes":"val-abc"},"Column":{"Id":8}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":7},{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Filter":{"Predicate":{"Id":11}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"KqpOlapApply == val-abc","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1286) '('"_id" '"66246915-222b9ecf-a4ef2f8e-fcf2abf8") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $19 (Int32 '1)) (let $20 (Just $19)) (let $21 '($20 $19)) (let $22 (If (== $19 (Int32 '2147483647)) $21 '((+ $20 $19) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($21 $22)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (TupleType (OptionalType $6) $6)) (let $8 (TupleType (ListType (TupleType $7 $7)))) (let $9 '('"id" $6)) (let $10 (DataType 'Utf8)) (let $11 (OptionalType $10)) (let $12 (DqPhyStage '() (lambda '() (block '( (let $23 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $24 '('"id" '"jsondoc" '"jsonval")) (let $25 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $26 (OptionalType (DataType 'JsonDocument))) (let $27 '((VariantType (TupleType (TupleType (DataType 'Uint8) (DataType 'String)) $11)))) (let $28 (ResourceType '"JsonNode")) (let $29 (OptionalType $28)) (let $30 '((ResourceType '"JsonPath"))) (let $31 (DictType $10 $28)) (let $32 '($31)) (let $33 (CallableType '() $27 '($29) $30 $32)) (let $34 '('('"strict"))) (let $35 (Udf '"Json2.SqlValueConvertToUtf8" (Void) (VoidType) '"" $33 (VoidType) '"" $34)) (let $36 (lambda '($54) (block '( (let $55 '((DataType 'Json) '"" '1)) (let $56 (CallableType '() '($28) $55)) (let $57 (Udf '"Json2.Parse" (Void) (VoidType) '"" $56 (VoidType) '"" '())) (return (Just (Apply $57 $54))) )))) (let $37 (Nothing $29)) (let $38 (CallableType '() $30 '($10))) (let $39 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $38 (VoidType) '"" '())) (let $40 (Apply $39 (Utf8 '"$.'col-abc'"))) (let $41 (Dict $31)) (let $42 (lambda '($58) (Nothing $11))) (let $43 (lambda '($59) $59)) (let $44 (KqpWideReadOlapTableRanges $23 %kqp%tx_result_binding_0_0 $24 '() $25 (lambda '($45) (block '( (let $46 (DataType 'Json)) (let $47 (StructType $9 '('"jsondoc" $26) '('"jsonval" (OptionalType $46)))) (let $48 (KqpOlapApply $47 '('"jsonval") (lambda '($51) (block '( (let $52 (IfPresent $51 $36 $37)) (let $53 (Apply $35 $52 $40 $41)) (return (Visit $53 '0 $42 '1 $43)) ))))) (let $49 '('eq $48 (String '"val-abc"))) (let $50 '('?? $49 (Bool 'false))) (return (KqpOlapFilter $45 $50)) ))))) (return (FromFlow (NarrowMap $44 (lambda '($60 $61 $62) (block '( (let $63 (IfPresent $62 $36 $37)) (let $64 (Apply $35 $63 $40 $41)) (let $65 (Visit $64 '0 $42 '1 $43)) (let $66 (CallableType '() $27 '($26) $30 $32)) (let $67 (Udf '"Json2.JsonDocumentSqlValueConvertToUtf8" (Void) (VoidType) '"" $66 (VoidType) '"" $34)) (let $68 (Apply $67 $61 $40 $41)) (let $69 (Visit $68 '0 $42 '1 $43)) (return (AsStruct '('"column1" $65) '('"column2" $69) '('"id" $60))) )))))) ))) '('('"_logical_id" '1357) '('"_id" '"d3ddd87d-4ff460cf-59d73b82-49edc8b1")))) (let $13 (DqCnUnionAll (TDqOutput $12 '0))) (let $14 (DqPhyStage '($13) (lambda '($70) $70) '('('"_logical_id" '2217) '('"_id" '"4277bc19-1e794c98-a2832172-e4ca55e")))) (let $15 '('"id" '"column1" '"column2")) (let $16 (DqCnResult (TDqOutput $14 '0) $15)) (let $17 (KqpTxResultBinding $8 '0 '0)) (let $18 (KqpPhysicalTx '($12 $14) '($16) '('($5 $17)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $18) '((KqpTxResultBinding (ListType (StructType '('"column1" $11) '('"column2" $11) $9)) '1 '0)) '('('"type" '"scan_query")))) ) >> TSchemeShardSplitBySizeTest::SplitShardsWhithPgKey [GOOD] >> test.py::test[window-win_func_into_udf--Debug] [GOOD] >> test.py::test[window-win_func_into_udf--Plan] [GOOD] >> test.py::test[window-win_func_into_udf--Results] >> KqpOlapAggregations::Aggregation_Some_GroupByNull >> KqpOlapIndexes::IndexesModificationError [GOOD] >> KqpOlapAggregations::BlockGenericWithDistinct [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::SplitShardsWhithPgKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:45:47.054397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:47.054426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:47.054431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:47.054435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:47.054448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:47.054452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:47.054461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:47.054548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:47.065665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:47.065686Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:47.068843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:47.069704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:47.069738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:47.071035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:47.072144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:47.072271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:47.072352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:47.074912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:47.075267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:47.075279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:47.075320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:47.075327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:47.075333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:47.075351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.076663Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:45:47.094374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:47.094479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.094545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:47.094620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:47.094630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.095987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:47.096029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:47.096082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.096093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:47.096098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:47.096103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:47.097113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.097128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:47.097134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:47.097677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.097688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.097696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:47.097702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:47.098316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:47.101160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:47.101235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:47.101434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:47.101474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:47.101488Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:47.101558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:47.101566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:47.101604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:47.101618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:47.102084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:47.102092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:47.102141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:47.102146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:45:47.102253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:47.102263Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:47.102275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:47.102279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:47.102286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:47.102291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:47.102295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:47.102299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:47.102314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:47.102320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:47.102340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:45:47.102654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:47.102665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:47.102670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:45:47.102675Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:45:47.102679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:47.102693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... rdId: 72075186233409574 } TablePartitions { EndOfRangeKeyPrefix: "\001\000\010\000\000\000\000\000\000\0008DL\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409575 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409576 } TableStats { DataSize: 119340 RowCount: 1000 IndexSize: 102 LastAccessTime: 0 LastUpdateTime: 1563 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 16 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 119340 IndexSize: 102 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 884 Memory: 1465344 Network: 0 Storage: 120971 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 119442 DataSize: 119340 IndexSize: 102 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 119442 DataSize: 119340 IndexSize: 102 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:46:26.824624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:26.824635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710672:0 ProgressState, operation type: TxSplitTablePartition, at tablet72057594046678944 2024-11-21T17:46:26.824674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710672:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 32 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:46:26.824691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710672:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 33 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:46:26.825884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:32 msg type: 268697601 2024-11-21T17:46:26.825914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:33 msg type: 268697601 2024-11-21T17:46:26.825925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710672, partId: 0, tablet: 72057594037968897 2024-11-21T17:46:26.825930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710672, shardIdx: 72057594046678944:32, partId: 0 2024-11-21T17:46:26.825934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710672, shardIdx: 72057594046678944:33, partId: 0 2024-11-21T17:46:26.826037Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 32 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:46:26.826084Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 32, type DataShard, boot OK, tablet id 72075186233409577 2024-11-21T17:46:26.826163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 32 TabletID: 72075186233409577 Origin: 72057594037968897 2024-11-21T17:46:26.826169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710672, shardIdx: 72057594046678944:32, partId: 0 2024-11-21T17:46:26.826183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 32 TabletID: 72075186233409577 Origin: 72057594037968897 2024-11-21T17:46:26.826189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710672:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:46:26.826194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710672:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 32 TabletID: 72075186233409577 Origin: 72057594037968897 2024-11-21T17:46:26.826268Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 33 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:46:26.826293Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 33, type DataShard, boot OK, tablet id 72075186233409578 2024-11-21T17:46:26.826386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 33 TabletID: 72075186233409578 Origin: 72057594037968897 2024-11-21T17:46:26.826389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710672, shardIdx: 72057594046678944:33, partId: 0 2024-11-21T17:46:26.826397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 33 TabletID: 72075186233409578 Origin: 72057594037968897 2024-11-21T17:46:26.826399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710672:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:46:26.826402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710672:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 33 TabletID: 72075186233409578 Origin: 72057594037968897 2024-11-21T17:46:26.826409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710672:0 2 -> 3 2024-11-21T17:46:26.827456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:26.827650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:26.827700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:26.827704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:26.827722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186233409577 splitOp: 281474976710672:0 alterVersion: 2 at tablet: 72057594046678944 2024-11-21T17:46:26.827742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186233409578 splitOp: 281474976710672:0 alterVersion: 2 at tablet: 72057594046678944 2024-11-21T17:46:26.828794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72075186233409577 cookie: 72057594046678944:32 msg type: 269553152 2024-11-21T17:46:26.828844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72075186233409578 cookie: 72057594046678944:33 msg type: 269553152 2024-11-21T17:46:26.828864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710672, partId: 0, tablet: 72075186233409577 2024-11-21T17:46:26.828867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710672, partId: 0, tablet: 72075186233409578 2024-11-21T17:46:26.839741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710672 TabletId: 72075186233409577 2024-11-21T17:46:26.839763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId#281474976710672:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710672:0, at schemeshard: 72057594046678944 message# OperationCookie: 281474976710672 TabletId: 72075186233409577 2024-11-21T17:46:26.840549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944, message: OperationCookie: 281474976710672 TabletId: 72075186233409578 2024-11-21T17:46:26.840562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId#281474976710672:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710672:0, at schemeshard: 72057594046678944 message# OperationCookie: 281474976710672 TabletId: 72075186233409578 2024-11-21T17:46:26.840569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710672:0 3 -> 131 2024-11-21T17:46:26.841191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:26.841707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:26.841774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710672:0, at schemeshard: 72057594046678944 2024-11-21T17:46:26.841781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#281474976710672:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:46:26.841787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TSplitMerge TTransferData operationId#281474976710672:0 Starting split on src datashard 72075186233409573 splitOpId# 281474976710672:0 at tablet 72057594046678944 2024-11-21T17:46:26.842612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710672:0 from tablet: 72057594046678944 to tablet: 72075186233409573 cookie: 72057594046678944:28 msg type: 269553154 2024-11-21T17:46:26.842644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710672, partId: 0, tablet: 72075186233409573 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::IndexesModificationError [GOOD] Test command err: Trying to start YDB, gRPC: 13926, MsgBus: 25150 2024-11-21T17:46:27.011774Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790401764363962:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:27.011994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dd7/r3tmp/tmp0EjqY7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13926, node 1 2024-11-21T17:46:27.067117Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:27.069442Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:27.069450Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:27.069451Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:27.069471Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25150 TClient is connected to server localhost:25150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:27.113480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:27.113503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:27.114638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:27.139363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:27.157049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:27.167094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790401764364622:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:27.167136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790401764364622:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:27.167161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790401764364622:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:27.167178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790401764364622:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:27.167191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790401764364622:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:27.167207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790401764364622:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:27.167229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790401764364622:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:27.167245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790401764364622:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:27.167260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790401764364622:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:27.167277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790401764364622:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.167291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790401764364622:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:27.167307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790401764364622:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:27.170591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790401764364624:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:27.170616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790401764364624:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:27.170647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790401764364624:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:27.170671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790401764364624:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:27.170700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790401764364624:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:27.170717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790401764364624:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:27.170738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790401764364624:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:27.170760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790401764364624:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:27.170782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790401764364624:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:27.170802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790401764364624:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.170828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790401764364624:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:27.170844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790401764364624:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:27.171294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:27.171301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:27.171316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:27.171320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:27.171337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:27.171341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:27.171349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:27.171359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:27.171367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:27.171370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:27.171376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... 11-21T17:46:27.177220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:27.177225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:27.177231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:27.177234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:27.177237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:27.177255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:27.177261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:27.177269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:27.177275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.177281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:27.177287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:27.177295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:27.177300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:27.177306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:27.177311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:27.177368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:27.177374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:27.177379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:27.177381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:27.177388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:27.177394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:27.177398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:27.177404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:27.177407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:27.177413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:27.177416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:27.177419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:27.177435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:27.177441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:27.177449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:27.177455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.177461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:27.177471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:27.177482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:27.177488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:27.177493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:27.177498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:27.216201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T17:46:27.317417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790401764364916:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.317455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.336465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:46:27.341982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790401764364969:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.342006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.346184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790401764364984:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.346203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.348575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790401764364993:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.348593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.349745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:27.354975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790401764365040:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.354993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.356219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715663:0, at schemeshard: 72057594046644480 >> test.py::test[blocks-distinct_opt_state_all--Debug] [GOOD] >> test.py::test[blocks-distinct_opt_state_all--Plan] [GOOD] >> test.py::test[blocks-distinct_opt_state_all--Results] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Debug] [GOOD] >> KqpOlapAggregations::Aggregation_MaxL [GOOD] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Plan] [GOOD] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlockGenericWithDistinct [GOOD] Test command err: Trying to start YDB, gRPC: 23389, MsgBus: 17548 2024-11-21T17:46:26.987370Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790393836966994:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:26.987389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dcf/r3tmp/tmp8vCh4G/pdisk_1.dat 2024-11-21T17:46:27.059591Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23389, node 1 2024-11-21T17:46:27.067523Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:27.067539Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:27.067540Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:27.067571Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17548 2024-11-21T17:46:27.089137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:27.089167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:27.090232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:27.136844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:27.155436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:27.162176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790398131934933:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:27.162219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790398131934933:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:27.162258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790398131934933:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:27.162283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790398131934933:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:27.162304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790398131934933:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:27.162331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790398131934933:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:27.162346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790398131934933:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:27.162360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790398131934933:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:27.162375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790398131934933:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:27.162391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790398131934933:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.162406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790398131934933:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:27.162422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790398131934933:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:27.164587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790398131934935:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:27.164607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790398131934935:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:27.164646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790398131934935:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:27.164662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790398131934935:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:27.164687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790398131934935:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:27.164708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790398131934935:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:27.164727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790398131934935:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:27.164748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790398131934935:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:27.164772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790398131934935:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:27.164798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790398131934935:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.164814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790398131934935:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:27.164834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790398131934935:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:27.165247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:27.165259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:27.165270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:27.165278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:27.165293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:27.165301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:27.165309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:27.165317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:27.165325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:27.165328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:27.165334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... 24-11-21T17:46:27.171136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:27.171143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:27.171151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:27.171158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:27.171166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:27.171171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:27.171179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:27.171203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:27.171211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:27.171225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:27.171233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.171243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:27.171251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:27.171264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:27.171272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:27.171281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:27.171288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls` WHERE level = 5 AND Cast(id AS String) = "5"; 2024-11-21T17:46:27.317345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790398131935240:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.317372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790398131935229:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.317393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.317998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:27.319532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790398131935243:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT COUNT(DISTINCT id) FROM `/Root/tableWithNulls` WHERE level = 5 AND Cast(id AS String) = "5"; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"item.id == \"5\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"Aggregate-Filter-Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["id"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":4}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":5},"Column":{"Id":7}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":3},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":9}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":8},{"Id":9}]},"Column":{"Id":10}}},{"Filter":{"Predicate":{"Id":10}}},{"Projection":{"Columns":[{"Id":1}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.id == \"5\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"GroupBy":"item.id","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (Bool 'false)) (let $2 (lambda '($20) $20)) (let $3 '('('"_logical_id" '1067) '('"_id" '"7572281b-e489f657-7d49f0ab-c2ac4492") '('"_wide_channels" (StructType '('"id" (DataType 'Int32)))))) (let $4 (DqPhyStage '() (lambda '() (block '( (let $14 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $15 (KqpBlockReadOlapTableRanges $14 (Void) '('"id" '"level") '() '() (lambda '($16) (block '( (let $17 '('eq '"level" (Int32 '"5"))) (let $18 '('?? $17 $1)) (return (TKqpOlapExtractMembers (KqpOlapFilter $16 $18) '('"id"))) ))))) (return (FromFlow (WideCombiner (WideFilter (WideFromBlocks $15) (lambda '($19) (== (SafeCast $19 (DataType 'String)) (String '"5")))) '-1073741824 $2 (lambda '($21 $22) $21) (lambda '($23 $24 $25) $25) (lambda '($26 $27) $27)))) ))) $3)) (let $5 (DqCnHashShuffle (TDqOutput $4 '0) '('0))) (let $6 (Uint64 '1)) (let $7 (DataType 'Uint64)) (let $8 '('('"_logical_id" '1772) '('"_id" '"70e52605-1d95287c-9c0defd6-89fee57f") '('"_wide_channels" (StructType '('_yql_agg_0 (OptionalType $7)))))) (let $9 (DqPhyStage '($5) (lambda '($28) (block '( (let $29 (lambda '($32 $33))) (let $30 (WideCombiner (ToFlow $28) '"" $2 $29 $29 $2)) (let $31 (Condense1 (NarrowMap $30 (lambda '($34) (AsStruct '('"id" $34)))) (lambda '($35) $6) (lambda '($36 $37) $1) (lambda '($38 $39) (Inc $39)))) (return (FromFlow (ExpandMap $31 (lambda '($40) (Just $40))))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '0))) (let $11 (DqPhyStage '($10) (lambda '($41) (block '( (let $42 (WideCondense1 (ToFlow $41) (lambda '($44) $44) (lambda '($45 $46) $1) (lambda '($47 $48) (IfPresent $47 (lambda '($49) (IfPresent $48 (lambda '($50) (Just (AggrAdd $49 $50))) $47)) $48)))) (let $43 (Condense (NarrowMap (Take $42 $6) (lambda '($51) (AsStruct '('Count0 (Unwrap $51))))) (Nothing (OptionalType (StructType '('Count0 $7)))) (lambda '($52 $53) $1) (lambda '($54 $55) (Just $54)))) (return (FromFlow (Map $43 (lambda '($56) (AsStruct '('"column0" (Coalesce (Member $56 'Count0) (Uint64 '0)))))))) ))) '('('"_logical_id" '2507) '('"_id" '"9947253e-3acd3278-fab9b813-75d16bf1")))) (let $12 '($4 $9 $11)) (let $13 (DqCnResult (TDqOutput $11 '0) '('"column0"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $12 '($13) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType (StructType '('"column0" $7))) '0 '0)) '('('"type" '"query")))) ) >> KqpOlap::SimpleCount >> KqpOlapAggregations::BlocksRead >> DataShardVolatile::UpsertBrokenLockArbiter [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart >> test.py::test[window-win_func_into_udf--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_compl--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_MaxL [GOOD] Test command err: Trying to start YDB, gRPC: 21518, MsgBus: 11369 2024-11-21T17:46:14.457232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790342180918367:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:14.457248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e41/r3tmp/tmpSdNA06/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21518, node 1 2024-11-21T17:46:14.516073Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:14.520331Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:14.520342Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:14.520343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:14.520370Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11369 TClient is connected to server localhost:11369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:14.559038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:14.559057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:14.560221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:14.590602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:14.595584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:14.606034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342180919023:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.606087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342180919023:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.606119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342180919023:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.606141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342180919023:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.606155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342180919023:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.606184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342180919023:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.606204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342180919023:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.606226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342180919023:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.606242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342180919023:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:14.606259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342180919023:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.606273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342180919023:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:14.606288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790342180919023:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:14.608150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342180919024:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.608166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342180919024:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.608181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342180919024:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.608193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342180919024:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.608205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342180919024:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.608217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342180919024:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.608248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342180919024:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.608270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342180919024:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.608284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342180919024:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:14.608307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342180919024:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.608326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342180919024:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:14.608343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790342180919024:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:14.608750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:14.608761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:14.608768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:14.608771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:14.608781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:14.608787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:14.608792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:14.608796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:14.608801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:14.608810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:14.608816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:25.841790Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:26.019694Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:26.019716Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:26.111600Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:26.111622Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:26.193537Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:26.193561Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:26.275454Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:26.275484Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:26.380331Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:26.401696Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T17:46:26.432554Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:26.432584Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:26.514969Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:26.514994Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:26.598024Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:26.598051Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:26.680248Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:26.680281Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:26.763430Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:26.763460Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:26.833546Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:26.885714Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:26.885737Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:27.034161Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:27.034192Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:27.127421Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:27.127447Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:27.219965Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:27.219988Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:27.302296Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:27.302323Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWQ0MzBhMTQtOGUzNDUxOWYtYzYzMmEyNmQtMWE3YjY0NGM=. TraceId : 01jd7x9yg0cra6x61j3fyswzcw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:27.399014Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:27.419859Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] |75.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |75.9%| [TM] {RESULT} ydb/core/tablet_flat/ut_large/unittest |75.9%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapIndexes::IndexesInLocalMetadata >> KqpOlapWrite::TestRemoveTableBeforeIndexation [GOOD] |75.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects >> KqpOlap::SimpleCount [GOOD] >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns-isStream >> KqpOlapAggregations::BlocksRead [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2024-11-21T17:46:10.084910Z :SpecifyClustersExplicitly INFO: Random seed for debugging is 1732211170084901 2024-11-21T17:46:10.196585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790325676028054:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:10.196909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:10.202471Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790328937799921:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:10.202674Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e98/r3tmp/tmpP8sFQr/pdisk_1.dat 2024-11-21T17:46:10.224607Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:10.227428Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:10.249823Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21912, node 1 2024-11-21T17:46:10.263140Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e98/r3tmp/yandexw1xTZ2.tmp 2024-11-21T17:46:10.263154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e98/r3tmp/yandexw1xTZ2.tmp 2024-11-21T17:46:10.263217Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e98/r3tmp/yandexw1xTZ2.tmp 2024-11-21T17:46:10.263262Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:10.267717Z INFO: TTestServer started on Port 5691 GrpcPort 21912 TClient is connected to server localhost:5691 PQClient connected to localhost:21912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:10.287642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:10.296362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:10.296387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:10.297885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:46:10.325650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:10.325681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:10.326643Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:10.326955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:46:10.526424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790325676028752:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.526469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790325676028734:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.526513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.527397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:10.528193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790325676028792:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.528244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.544436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790325676028763:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T17:46:10.559749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:10.559644Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790328937800234:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:10.560151Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDRiODc3OTEtNzU1YjczN2ItZWQ2NjVhODgtZTQ1Yjk3ZTA=, ActorId: [2:7439790328937800207:2277], ActorState: ExecuteState, TraceId: 01jd7x9q74drdjefv9dpnegtaj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:10.560618Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:10.607616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:10.628211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:10.631910Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790325676029119:2332], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:10.632296Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGJlNWMwMy04MWJkYmMyMC05Zjk0ZWU0Ni0zYWVmNDBkNg==, ActorId: [1:7439790325676028731:2299], ActorState: ExecuteState, TraceId: 01jd7x9q6x8z753y69x60np9h4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:10.632429Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:21912", true, true, 1000); 2024-11-21T17:46:10.683789Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd7x9qbc7kcf35nbzemrm8c1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWZhNmE0M2ItOGJhYzNhNjItNDA2NmNlNmQtNjVjZDc2MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790325676029220:2930] 2024-11-21T17:46:15.196477Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790325676028054:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:15.196522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:15.202648Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790328937799921:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:15.202686Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:46:15.784786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:21912 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:15.797424Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:21912 MetaRequest { ... (2-2) 2024-11-21T17:46:26.360851Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2024-11-21T17:46:26.360863Z :DEBUG: [/Root] [/Root] [6d2a701-b944b50c-da9c09f5-583837bb] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T17:46:25.254000Z WriteTime: 2024-11-21T17:46:25.254000Z Ip: "ipv6:[::1]:58000" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:58000" } } } } 2024-11-21T17:46:26.360894Z :INFO: [/Root] [/Root] [6d2a701-b944b50c-da9c09f5-583837bb] Closing read session. Close timeout: 3.000000s 2024-11-21T17:46:26.360902Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T17:46:26.360908Z :INFO: [/Root] [/Root] [6d2a701-b944b50c-da9c09f5-583837bb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1229 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:26.361083Z :INFO: [/Root] [/Root] [6d2a701-b944b50c-da9c09f5-583837bb] Closing read session. Close timeout: 0.000000s 2024-11-21T17:46:26.361088Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T17:46:26.361093Z :INFO: [/Root] [/Root] [6d2a701-b944b50c-da9c09f5-583837bb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1230 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:26.361106Z :NOTICE: [/Root] [/Root] [6d2a701-b944b50c-da9c09f5-583837bb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:46:26.361149Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_18080197434397689881_v1 grpc read done: success# 0, data# { } 2024-11-21T17:46:26.361159Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_18080197434397689881_v1 grpc read failed 2024-11-21T17:46:26.361164Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_18080197434397689881_v1 grpc closed 2024-11-21T17:46:26.361179Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_18080197434397689881_v1 is DEAD 2024-11-21T17:46:26.361454Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790392026845563:2503] disconnected; active server actors: 1 2024-11-21T17:46:26.361463Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790392026845563:2503] client user disconnected session shared/user_3_1_18080197434397689881_v1 2024-11-21T17:46:26.363793Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:26.363813Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_18080197434397689881_v1 2024-11-21T17:46:26.363821Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439790392026845566:2506] destroyed 2024-11-21T17:46:26.363870Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_18080197434397689881_v1 2024-11-21T17:46:26.664743Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.664749Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.664753Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:26.664822Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:26.664917Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:26.664961Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.665023Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2024-11-21T17:46:26.665254Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.665257Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.665260Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:26.665300Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:26.665358Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:26.665376Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.665406Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:26.665623Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:46:26.665760Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2024-11-21T17:46:26.665774Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2024-11-21T17:46:26.665797Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:26.665802Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:46:26.665805Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:46:26.665809Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T17:46:26.666140Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.666142Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.666144Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:26.666180Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:26.666254Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:26.666269Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.666294Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:26.666381Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.666415Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:26.666436Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:26.666441Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:46:26.666447Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2024-11-21T17:46:26.666699Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.666701Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.666703Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:26.666748Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:26.666809Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:26.666836Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:26.666856Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:26.666910Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:46:26.666929Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:46:26.666955Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T17:46:26.666963Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:26.666967Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:26.666971Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T17:46:26.666990Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:46:26.666993Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:46:28.667586Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:28.667596Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:28.667600Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:28.667720Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:28.667846Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:28.667902Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:28.668063Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:28.668109Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:28.668148Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:28.668170Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> KqpOlapAggregations::Aggregation_Some_GroupByNull [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::TestRemoveTableBeforeIndexation [GOOD] Test command err: Trying to start YDB, gRPC: 11802, MsgBus: 13008 2024-11-21T17:46:18.237867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790360275531048:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:18.238026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e1a/r3tmp/tmppVaR7Q/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11802, node 1 2024-11-21T17:46:18.294615Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:18.296834Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:18.296846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:18.296848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:18.296880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13008 TClient is connected to server localhost:13008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:18.338493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:18.338524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:18.339677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:18.370196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:18.371818Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:18.377451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:18.389452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:18.389532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:18.389579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:18.389604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:18.389632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:18.389660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:18.389679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:18.389697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:18.389725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:18.389749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:18.389767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:18.389790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:18.393206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:18.393231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:18.393268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:18.393284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:18.393301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:18.393336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:18.393355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:18.393372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:18.393390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:18.393406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:18.393422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:18.393438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:18.393930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:18.393939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:18.393953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:18.393958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:18.393977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:18.393982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:18.393990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:18.393997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:18.394005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:18.394012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_ ... dicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889; 2024-11-21T17:46:28.407877Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2024-11-21T17:46:28.407879Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:28.407883Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:28.407889Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:28.407891Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891; 2024-11-21T17:46:28.407893Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:28.407895Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:28.407896Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:28.407898Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:28.407899Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:28.407902Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:28.407901Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:28.407908Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:516;problem=Background activities cannot be started: no index at tablet; 2024-11-21T17:46:28.407908Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:28.407912Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211188000 at tablet 72075186224037891 2024-11-21T17:46:28.407914Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:28.407916Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7439790360275531713:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:516;problem=Background activities cannot be started: no index at tablet; 2024-11-21T17:46:28.407918Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211188000 at tablet 72075186224037889 2024-11-21T17:46:28.407919Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:28.407921Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:28.407924Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:28.407926Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:28.407928Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:28.407929Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:28.407930Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:28.407934Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790360275531706:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:28.408341Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2024-11-21T17:46:28.408352Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2024-11-21T17:46:28.408357Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:28.408367Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:28.408373Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:28.408376Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:28.408378Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:28.408381Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:28.408383Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:28.408385Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:28.408391Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:28.408399Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211188000 at tablet 72075186224037890 2024-11-21T17:46:28.408400Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:28.408402Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:28.408404Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:28.408406Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:28.408407Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:28.408409Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:28.408411Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:28.408414Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790360275531726:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; >> KqpOlap::OlapDeleteImmediate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::BlocksRead [GOOD] Test command err: Trying to start YDB, gRPC: 23012, MsgBus: 21426 2024-11-21T17:46:28.183834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790404019192440:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:28.183864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dcb/r3tmp/tmpkrJllE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23012, node 1 2024-11-21T17:46:28.252160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:28.252176Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:28.252177Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:28.252210Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:28.252518Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21426 2024-11-21T17:46:28.285422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:28.285443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:21426 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:46:28.286529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:28.295708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:28.307094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:28.316543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790404019193083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:28.316605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790404019193083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:28.316653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790404019193083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:28.316694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790404019193083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:28.316717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790404019193083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:28.316734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790404019193083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:28.316753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790404019193083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:28.316774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790404019193083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:28.316797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790404019193083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:28.316817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790404019193083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:28.316846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790404019193083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:28.316870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790404019193083:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:28.319305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404019193087:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:28.319323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404019193087:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:28.319340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404019193087:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:28.319350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404019193087:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:28.319366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404019193087:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:28.319377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404019193087:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:28.319390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404019193087:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:28.319404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404019193087:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:28.319420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404019193087:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:28.319438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404019193087:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:28.319452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404019193087:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:28.319465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404019193087:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:28.322299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790404019193082:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:28.322316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790404019193082:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:28.322346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790404019193082:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:28.322365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790404019193082:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:28.322387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790404019193082:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:28.322408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790404019193082:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:28.322427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790404019193082:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:28.322451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790404019193082:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:28.322470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:74397904040191930 ... .326054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:28.326059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:28.326065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:28.326067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:28.326075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:28.326080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:28.326084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:28.326086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:28.326113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:28.326119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:28.326123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:28.326126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:28.326133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:28.326138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:28.326142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:28.326144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:28.326147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:28.326149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:28.326152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:28.326154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:28.326170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:28.326176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:28.326184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:28.326190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:28.326197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:28.326200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:28.326216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:28.326224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:28.326232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:28.326238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA UseBlocks; PRAGMA Kikimr.OptEnableOlapPushdown = "false"; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; 2024-11-21T17:46:28.473497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790404019193375:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:28.473520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790404019193386:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:28.473527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:28.474213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:28.475647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790404019193389:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:28.620516Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211188526, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; PRAGMA UseBlocks; PRAGMA Kikimr.OptEnableOlapPushdown = "false"; SELECT id, resource_id FROM `/Root/tableWithNulls` WHERE level = 5; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"item.level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level","resource_id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadColumns":["id","level","resource_id"],"SsaProgram":{"Version":5,"Command":[{"Projection":{"Columns":[{"Id":1},{"Id":3},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.level == 5","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DqPhyStage '() (lambda '() (block '( (let $5 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $6 '('"id" '"level" '"resource_id")) (let $7 (KqpBlockReadOlapTableRanges $5 (Void) $6 '() '() (lambda '($9) $9))) (let $8 (lambda '($10 $11 $12 $13) (block '( (let $14 (BlockFunc '"Equals" (BlockType (OptionalType (DataType 'Bool))) $11 (AsScalar (Int32 '"5")))) (return $10 $12 (BlockCoalesce $14 (AsScalar (Bool 'false))) $13) )))) (return (FromFlow (NarrowMap (WideFromBlocks (BlockCompress (WideMap $7 $8) '2)) (lambda '($15 $16) (AsStruct '('"id" $15) '('"resource_id" $16)))))) ))) '('('"_logical_id" '564) '('"_id" '"d0e9841b-cd65d45a-289f2407-5b6c4cc7")))) (let $2 (DqCnUnionAll (TDqOutput $1 '"0"))) (let $3 (DqPhyStage '($2) (lambda '($17) $17) '('('"_logical_id" '613) '('"_id" '"bed2ed63-622f2518-efacf016-88938b32")))) (let $4 (DqCnResult (TDqOutput $3 '"0") '('"id" '"resource_id"))) (return (KqpPhysicalQuery '((KqpPhysicalTx '($1 $3) '($4) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"id" (DataType 'Int32)) '('"resource_id" (OptionalType (DataType 'Utf8))))) '"0" '"0")) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::SimpleCount [GOOD] Test command err: Trying to start YDB, gRPC: 61340, MsgBus: 4261 2024-11-21T17:46:28.171196Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790404064041219:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:28.171464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dcc/r3tmp/tmpvTHxln/pdisk_1.dat 2024-11-21T17:46:28.229294Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61340, node 1 2024-11-21T17:46:28.244085Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:28.244099Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:28.244101Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:28.244140Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4261 2024-11-21T17:46:28.272456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:28.272485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:28.273550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:28.310197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:28.323407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:28.333189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790404064041871:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:28.333268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790404064041871:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:28.333313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790404064041871:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:28.333340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790404064041871:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:28.333376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790404064041871:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:28.333401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790404064041871:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:28.333422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790404064041871:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:28.333447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790404064041871:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:28.333471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790404064041871:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:28.333500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790404064041871:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:28.333522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790404064041871:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:28.333543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790404064041871:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:28.334011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:28.334029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:28.334041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:28.334057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:28.334073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:28.334082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:28.334091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:28.334101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:28.334109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:28.334112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:28.334118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:28.334122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:28.334179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:28.334189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:28.334206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:28.334211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:28.334222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:28.334231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:28.334247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:28.334256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:28.334267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:28.334275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:28.337415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404064041872:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:28.337443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404064041872:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:28.337482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790404064041872:2289];tablet_id=7207518622403 ... 211188609165,"d_finished":0,"c":0,"l":1732211188612922,"d":3757},"events":[{"name":"bootstrap","f":1732211188609751,"d_finished":380,"c":1,"l":1732211188610131,"d":380},{"a":1732211188612889,"name":"ack","f":1732211188610671,"d_finished":24,"c":1,"l":1732211188610695,"d":57},{"a":1732211188612886,"name":"processing","f":1732211188610668,"d_finished":324,"c":10,"l":1732211188612647,"d":360},{"name":"ProduceResults","f":1732211188610036,"d_finished":238,"c":13,"l":1732211188612909,"d":238},{"a":1732211188612909,"name":"Finish","f":1732211188612909,"d_finished":0,"c":0,"l":1732211188612922,"d":13},{"name":"task_result","f":1732211188610858,"d_finished":277,"c":9,"l":1732211188612647,"d":277}],"id":"72075186224037888::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;) 2024-11-21T17:46:28.612980Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439790404064042439:2468];TabletId=72075186224037888;ScanId=1;TxId=281474976715663;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:46:28.608994Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4552;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4552;selected_rows=0; 2024-11-21T17:46:28.612983Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439790404064042439:2468];TabletId=72075186224037888;ScanId=1;TxId=281474976715663;ScanGen=1;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:46:28.612989Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439790404064042439:2468];TabletId=72075186224037888;ScanId=1;TxId=281474976715663;ScanGen=1;fline=context.h:72;profile=;; 2024-11-21T17:46:28.612998Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439790404064042439:2468];TabletId=72075186224037888;ScanId=1;TxId=281474976715663;ScanGen=1;fline=context.h:73;fetching=ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;; 2024-11-21T17:46:28.613069Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439790404064042440:2469];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:46:28.613080Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439790404064042440:2469];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;); 2024-11-21T17:46:28.613085Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439790404064042440:2469];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;); 2024-11-21T17:46:28.613092Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7439790404064042440:2469] finished for tablet 72075186224037889 2024-11-21T17:46:28.613096Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7439790404064042440:2469] send ScanData to [1:7439790404064042378:2462] txId: 281474976715663 scanId: 1 gen: 1 tablet: 72075186224037889 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:46:28.613144Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:7439790404064042440:2469] and sent to [1:7439790404064042378:2462] packs: 0 txId: 281474976715663 scanId: 1 gen: 1 tablet: 72075186224037889 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","f_processing","f_task_result"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.003}],"full":{"a":1732211188609479,"name":"_full_task","f":1732211188609479,"d_finished":0,"c":0,"l":1732211188613104,"d":3625},"events":[{"name":"bootstrap","f":1732211188610148,"d_finished":215,"c":1,"l":1732211188610363,"d":215},{"a":1732211188613066,"name":"ack","f":1732211188610757,"d_finished":27,"c":1,"l":1732211188610784,"d":65},{"a":1732211188613065,"name":"processing","f":1732211188610755,"d_finished":239,"c":10,"l":1732211188612711,"d":278},{"name":"ProduceResults","f":1732211188610288,"d_finished":181,"c":13,"l":1732211188613090,"d":181},{"a":1732211188613090,"name":"Finish","f":1732211188613090,"d_finished":0,"c":0,"l":1732211188613104,"d":14},{"name":"task_result","f":1732211188610786,"d_finished":198,"c":9,"l":1732211188612710,"d":198}],"id":"72075186224037889::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;) 2024-11-21T17:46:28.613154Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439790404064042440:2469];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:46:28.609425Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=65368;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=65368;selected_rows=0; 2024-11-21T17:46:28.613156Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439790404064042440:2469];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:46:28.613160Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439790404064042440:2469];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;fline=context.h:72;profile=;; 2024-11-21T17:46:28.613169Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439790404064042440:2469];TabletId=72075186224037889;ScanId=1;TxId=281474976715663;ScanGen=1;fline=context.h:73;fetching=ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;; 2024-11-21T17:46:28.613225Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439790404064042441:2470];TabletId=72075186224037890;ScanId=1;TxId=281474976715663;ScanGen=1;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:46:28.613237Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439790404064042441:2470];TabletId=72075186224037890;ScanId=1;TxId=281474976715663;ScanGen=1;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;); 2024-11-21T17:46:28.613245Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439790404064042441:2470];TabletId=72075186224037890;ScanId=1;TxId=281474976715663;ScanGen=1;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;); 2024-11-21T17:46:28.613251Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7439790404064042441:2470] finished for tablet 72075186224037890 2024-11-21T17:46:28.613256Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:7439790404064042441:2470] send ScanData to [1:7439790404064042378:2462] txId: 281474976715663 scanId: 1 gen: 1 tablet: 72075186224037890 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:46:28.613289Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:7439790404064042441:2470] and sent to [1:7439790404064042378:2462] packs: 0 txId: 281474976715663 scanId: 1 gen: 1 tablet: 72075186224037890 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","f_processing","f_task_result"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.003}],"full":{"a":1732211188609688,"name":"_full_task","f":1732211188609688,"d_finished":0,"c":0,"l":1732211188613262,"d":3574},"events":[{"name":"bootstrap","f":1732211188610432,"d_finished":200,"c":1,"l":1732211188610632,"d":200},{"a":1732211188613222,"name":"ack","f":1732211188610809,"d_finished":14,"c":1,"l":1732211188610823,"d":54},{"a":1732211188613220,"name":"processing","f":1732211188610808,"d_finished":205,"c":10,"l":1732211188612768,"d":247},{"name":"ProduceResults","f":1732211188610559,"d_finished":175,"c":13,"l":1732211188613250,"d":175},{"a":1732211188613250,"name":"Finish","f":1732211188613250,"d_finished":0,"c":0,"l":1732211188613262,"d":12},{"name":"task_result","f":1732211188610824,"d_finished":180,"c":9,"l":1732211188612768,"d":180}],"id":"72075186224037890::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;;) 2024-11-21T17:46:28.613299Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439790404064042441:2470];TabletId=72075186224037890;ScanId=1;TxId=281474976715663;ScanGen=1;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:46:28.609646Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4352;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4352;selected_rows=0; 2024-11-21T17:46:28.613305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:7439790404064042441:2470];TabletId=72075186224037890;ScanId=1;TxId=281474976715663;ScanGen=1;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:46:28.613310Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439790404064042441:2470];TabletId=72075186224037890;ScanId=1;TxId=281474976715663;ScanGen=1;fline=context.h:72;profile=;; 2024-11-21T17:46:28.613318Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:7439790404064042441:2470];TabletId=72075186224037890;ScanId=1;TxId=281474976715663;ScanGen=1;fline=context.h:73;fetching=ef=(column_ids=3;column_names=uid;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,3;column_names=timestamp,uid;);;ff=(column_ids=3,4;column_names=level,uid;);;program_input=(column_ids=3,4;column_names=level,uid;);;; 2024-11-21T17:46:28.658704Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211188505, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_GroupByNull [GOOD] Test command err: Trying to start YDB, gRPC: 1378, MsgBus: 2382 2024-11-21T17:46:27.394944Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790399930648846:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:27.395156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dce/r3tmp/tmp0lsRES/pdisk_1.dat 2024-11-21T17:46:27.441652Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1378, node 1 2024-11-21T17:46:27.454807Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:27.454829Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:27.454830Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:27.454857Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2382 2024-11-21T17:46:27.496092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:27.496132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:27.497209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:27.512718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:27.520745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:27.529522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790399930649486:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:27.529590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790399930649486:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:27.529642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790399930649486:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:27.529669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790399930649486:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:27.529691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790399930649486:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:27.529713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790399930649486:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:27.529741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790399930649486:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:27.529765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790399930649486:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:27.529800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790399930649486:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:27.529828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790399930649486:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.529850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790399930649486:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:27.529873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790399930649486:2290];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:27.530346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:27.530359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:27.530370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:27.530374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:27.530393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:27.530398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:27.530408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:27.530421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:27.530433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:27.530442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:27.530449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:27.530457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:27.530508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:27.530521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:27.530537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:27.530541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.530556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:27.530564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:27.530578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:27.530587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:27.530596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:27.530604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:27.533702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790399930649485:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:27.533725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790399930649485:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:27.533754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790399930649485:2289];tablet_id=720751862240378 ... line=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:27.541943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:27.541947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:27.541959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:27.541965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:27.541972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:27.541975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:27.541981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:27.541984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:27.541989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:27.541995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:27.542017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:27.542021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:27.542034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:27.542038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.542048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:27.542051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:27.542064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:27.542067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:27.542075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:27.542078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SOME(id), SOME(level) FROM `/Root/tableWithNulls` WHERE id = 6 GROUP BY level ORDER BY level; 2024-11-21T17:46:27.684436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790399930649788:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.684459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790399930649777:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.684511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.685104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:27.686733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790399930649791:2393], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:46:28.542847Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211187742, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SOME(id), SOME(level) FROM `/Root/tableWithNulls` WHERE id = 6 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["id [6, 6]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id","level"],"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":1,"Arguments":[{"Id":1}]},"Column":{"Id":7}},{"Function":{"Id":1,"Arguments":[{"Id":3}]},"Column":{"Id":8}}],"KeyColumns":[{"Id":3}]}},{"Projection":{"Columns":[{"Id":7},{"Id":8},{"Id":3}]}}]},"E-Cost":"No estimate","ReadRangesExpectedSize":1}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1274) '('"_id" '"add2ca55-32dd431e-c3ff9674-3d4a1a7e") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $25 (Int32 '"6")) (let $26 (Just $25)) (let $27 (Int32 '1)) (let $28 '($26 $27)) (let $29 (If (== $25 (Int32 '2147483647)) $28 '((+ $26 $27) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($28 $29)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (OptionalType $6)) (let $8 (TupleType $7 $6)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 '('"level" $7)) (let $11 (StructType '('_yql_agg_0 $6) '('_yql_agg_1 $7) $10)) (let $12 '('('"_logical_id" '1333) '('"_id" '"2d37cbd4-f638d919-c24e35c5-829b7e02") '('"_wide_channels" $11))) (let $13 (DqPhyStage '() (lambda '() (block '( (let $30 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $31 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $32 (KqpWideReadOlapTableRanges $30 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $31 (lambda '($33) (block '( (let $34 '('_yql_agg_0 'some '"id")) (let $35 '('_yql_agg_1 'some '"level")) (return (TKqpOlapAgg $33 '($34 $35) '('"level"))) ))))) (return (FromFlow $32)) ))) $12)) (let $14 (DqCnHashShuffle (TDqOutput $13 '0) '('2))) (let $15 (StructType '('"column1" $6) '('"column2" $7) $10)) (let $16 '('('"_logical_id" '1871) '('"_id" '"7a6b5cf6-fa67e45c-5df61516-da68a0c9") '('"_wide_channels" $15))) (let $17 (DqPhyStage '($14) (lambda '($36) (block '( (let $37 (lambda '($44 $45 $46 $47) $45 $46)) (let $38 (lambda '($48 $49 $50 $51 $52 $53) $52 (Coalesce $53 $50))) (let $39 (lambda '($54 $55 $56) $55 $56 $54)) (let $40 (WideCombiner (ToFlow $36) '"" (lambda '($41 $42 $43) $43) $37 $38 $39)) (return (FromFlow (WideSort $40 '('('2 (Bool 'true)))))) ))) $16)) (let $18 (DqCnMerge (TDqOutput $17 '0) '('('2 '"Asc")))) (let $19 (DqPhyStage '($18) (lambda '($57) (FromFlow (NarrowMap (ToFlow $57) (lambda '($58 $59 $60) (AsStruct '('"column1" $58) '('"column2" $59) '('"level" $60)))))) '('('"_logical_id" '1883) '('"_id" '"d191ba60-7870eaa1-d00736be-58b22a8a")))) (let $20 '($13 $17 $19)) (let $21 '('"level" '"column1" '"column2")) (let $22 (DqCnResult (TDqOutput $19 '0) $21)) (let $23 (KqpTxResultBinding $9 '0 '0)) (let $24 (KqpPhysicalTx $20 '($22) '('($5 $23)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $24) '((KqpTxResultBinding (ListType $15) '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling+AllowSpilling |75.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::OlapDeleteImmediate [GOOD] >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns-isStream [GOOD] >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling+AllowSpilling [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func--Debug] >> KqpOlapClickbench::ClickBenchSmoke >> KqpOlapSysView::StatsSysView >> DataShardVolatile::UpsertNoLocksArbiterRestart [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart >> KqpOlapBlobsSharing::MultipleMergesWithRestartsAfterWait ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapDelete::DeleteWithDiffrentTypesPKColumns-isStream [GOOD] Test command err: Trying to start YDB, gRPC: 1382, MsgBus: 11768 2024-11-21T17:46:28.877473Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790402677421389:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:28.877491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dc1/r3tmp/tmpK2ZbMA/pdisk_1.dat 2024-11-21T17:46:28.919555Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1382, node 1 2024-11-21T17:46:28.931015Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:28.931028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:28.931029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:28.931059Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11768 TClient is connected to server localhost:11768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:28.978798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:28.978832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:28.979855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:29.003641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:29.005455Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:29.028276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:29.088411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:29.103702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:29.115713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:29.177388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790406972390247:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:29.177421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:29.200056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:29.205801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:29.212601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:29.267554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:29.322711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:29.332455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:29.340743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790406972390765:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:29.340766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:29.340774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790406972390770:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:29.341404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:29.345211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790406972390772:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } CREATE TABLE `/Root/ColumnTableTest` (time Timestamp NOT NULL, class Utf8 NOT NULL, uniq Utf8 NOT NULL, PRIMARY KEY (time, class, uniq)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:46:29.505398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:46:29.512665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790406972391124:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:29.512704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790406972391124:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:29.512729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790406972391124:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:29.512743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790406972391124:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:29.512755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790406972391124:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:29.512774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790406972391124:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:29.512784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790406972391124:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:29.512800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790406972391124:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:29.512816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790406972391124:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:29.512842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790406972391124:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:29.512857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790406972391124:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:29.512876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790406972391124:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:29.513195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:29.513208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:29.513216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:29.513219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:29.513228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:29.513231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:29.513237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:29.513240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:29.513246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:29.513248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:29.513252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:29.513255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:29.513299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:29.513309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:29.513319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:29.513321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:29.513328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:29.513334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:29.513345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:29.513352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:29.513358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:29.513380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow13TimestampTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=312;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=312;columns=3; 2024-11-21T17:46:29.630117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037919;tx_state=complete;fline=interaction.h:353;batch=time: [ 2024-11-21 17:46:29.563076 ] class: [ "test" ] uniq: [ "test" ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715672}],"starts":[{"inc":{"count_not_include":1},"id":281474976715672}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715672}]},"p":{"include":2147483647}}]}; 2024-11-21T17:46:29.668744Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211189688, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::DisableBlockEngineInAggregationWithSpilling+AllowSpilling [GOOD] Test command err: Trying to start YDB, gRPC: 12514, MsgBus: 7814 2024-11-21T17:46:29.318427Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790408582145104:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:29.318672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dbe/r3tmp/tmpjrF0ob/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12514, node 1 2024-11-21T17:46:29.375517Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:29.375786Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:29.375795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:29.375797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:29.375837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7814 TClient is connected to server localhost:7814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:29.420220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:29.420264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:29.421340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:29.451457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:29.467289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:29.476996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790408582145761:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:29.477054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790408582145761:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:29.477085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790408582145761:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:29.477106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790408582145761:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:29.477117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790408582145761:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:29.477129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790408582145761:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:29.477144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790408582145761:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:29.477160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790408582145761:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:29.477176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790408582145761:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:29.477192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790408582145761:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:29.477211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790408582145761:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:29.477226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790408582145761:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:29.479364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790408582145762:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:29.479380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790408582145762:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:29.479398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790408582145762:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:29.479409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790408582145762:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:29.479429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790408582145762:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:29.479444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790408582145762:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:29.479457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790408582145762:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:29.479470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790408582145762:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:29.479480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790408582145762:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:29.479494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790408582145762:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:29.479507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790408582145762:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:29.479520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790408582145762:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:29.482306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790408582145765:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:29.482322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790408582145765:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:29.482346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790408582145765:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:29.482363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790408582145765:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:29.482372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790408582145765:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:29.482380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790408582145765:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:29.482387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790408582145765:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:29.482399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790408582145765:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:29.482412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790408582145765: ... ss=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:29.485626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:29.485633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:29.485637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:29.485645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:29.485653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:29.485657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:29.485659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:29.485674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:29.485680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:29.485689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:29.485695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:29.485700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:29.485703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:29.485711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:29.485716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:29.485722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:29.485724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:29.485764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:29.485771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:29.485775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:29.485777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:29.485783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:29.485787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:29.485791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:29.485794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:29.485798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:29.485800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:29.485803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:29.485805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:29.485819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:29.485825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:29.485833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:29.485836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:29.485842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:29.485844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:29.485852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:29.485857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:29.485862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:29.485864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:29.525065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T17:46:29.737408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790408582146211:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:29.737447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790408582146200:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:29.737475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:29.738071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:29.739516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790408582146214:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility+EnableLlvm >> KqpOlapAggregations::CountAllPushdown-UseLlvm >> KqpOlapWrite::WriteDeleteCleanGC |75.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |75.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge >> test.py::test[blocks-distinct_opt_state_all--Results] [GOOD] >> test.py::test[blocks-interval_add_interval_scalar--Debug] >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols [GOOD] >> KqpOlapAggregations::CountAllPushdown-UseLlvm [GOOD] >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility+EnableLlvm [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols [GOOD] Test command err: Trying to start YDB, gRPC: 22133, MsgBus: 18346 2024-11-21T17:46:18.303830Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790363308707529:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:18.303847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e0e/r3tmp/tmpzAAueY/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22133, node 1 2024-11-21T17:46:18.360201Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:18.364949Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:18.364961Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:18.364962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:18.364998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18346 TClient is connected to server localhost:18346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:18.404211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:18.404271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:18.405344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:18.433212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:18.441558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:18.450788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790363308708175:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:18.450842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790363308708175:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:18.450867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790363308708175:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:18.450885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790363308708175:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:18.450900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790363308708175:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:18.450915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790363308708175:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:18.450929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790363308708175:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:18.450948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790363308708175:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:18.450971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790363308708175:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:18.450982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790363308708175:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:18.450997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790363308708175:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:18.451015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790363308708175:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:18.453926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790363308708176:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:18.453949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790363308708176:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:18.453983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790363308708176:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:18.454001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790363308708176:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:18.454026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790363308708176:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:18.454052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790363308708176:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:18.454074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790363308708176:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:18.454095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790363308708176:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:18.454122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790363308708176:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:18.454145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790363308708176:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:18.454169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790363308708176:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:18.454192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790363308708176:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:18.454663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:18.454678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:18.454691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:18.454695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:18.454714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:18.454716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:18.454722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:18.454726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:18.454731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:18.454734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:18.454737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:29.089038Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:29.233705Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:29.233725Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:29.315870Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:29.315894Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:29.398105Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:29.398130Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:29.480275Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:29.480290Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:29.552496Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:29.573311Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T17:46:29.634961Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:29.634984Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:29.717074Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:29.717097Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:29.799209Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:29.799232Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:29.881506Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:29.881534Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:29.963754Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:29.963784Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:29.995917Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:30.132154Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:30.132176Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:30.235214Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:30.235239Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:30.317476Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:30.317501Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:30.399924Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:30.399949Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:30.483209Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:30.483228Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1893:3008], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa2926rn8r2c3nya8rv06. SessionId : ydb://session/3?node_id=2&id=N2NjZWI5YzItZGVkNmU2MGItODVhOWY2MDktYWVkN2M5N2I=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:30.544958Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:30.566910Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlap::CompositeRangeOlap >> KqpOlapSparsed::SwitchingStandalone >> test.py::test[blocks-interval_add_interval_scalar--Debug] [GOOD] >> test.py::test[blocks-interval_add_interval_scalar--Plan] [GOOD] >> test.py::test[blocks-interval_add_interval_scalar--Results] >> test.py::test[window-win_func_over_group_by_compl--Debug] [GOOD] >> test.py::test[window-win_func_over_group_by_compl--Plan] [GOOD] >> test.py::test[window-win_func_over_group_by_compl--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllPushdown-UseLlvm [GOOD] Test command err: Trying to start YDB, gRPC: 28742, MsgBus: 6872 2024-11-21T17:46:30.442818Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790414959571429:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:30.442834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000da9/r3tmp/tmpxCTPro/pdisk_1.dat 2024-11-21T17:46:30.503454Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28742, node 1 2024-11-21T17:46:30.513185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:30.513199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:30.513200Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:30.513236Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6872 TClient is connected to server localhost:6872 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:46:30.544391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:30.544447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:30.545491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:30.557271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:30.560461Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:46:30.570148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:30.580445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790414959572086:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:30.580517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790414959572086:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:30.580577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790414959572086:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:30.580600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790414959572086:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:30.580623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790414959572086:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:30.580642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790414959572086:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:30.580663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790414959572086:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:30.580682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790414959572086:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:30.580699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790414959572086:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:30.580724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790414959572086:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:30.580739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790414959572086:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:30.580755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790414959572086:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:30.581192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:30.581206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:30.581217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:30.581221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:30.581237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:30.581245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:30.581254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:30.581263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:30.581271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:30.581279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:30.581285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:30.581292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:30.581344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:30.581374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:30.581395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:30.581402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:30.581417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:30.581425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:30.581441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:30.581450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:30.581461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:30.581467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:30.583348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414959572083:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:30.583365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414959572083:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_ ... d=Chunks; 2024-11-21T17:46:30.589415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:30.589419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:30.589435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:30.589443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:30.589451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:30.589460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:30.589468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:30.589476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:30.589482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:30.589490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:30.589517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:30.589526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:30.589540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:30.589548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:30.589559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:30.589567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:30.589577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:30.589584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:30.589590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:30.589596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T17:46:30.628747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T17:46:30.857414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790414959572531:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:30.857415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790414959572523:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:30.857429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:30.858136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:30.859664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790414959572537:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T17:46:31.012048Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211191000, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '476) '('"_id" '"6ce2ea30-c76d98a2-e570671e-b6f1a74") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '() '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 '"count" '"*")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '995) '('"_id" '"c77566ba-d44c0f9-92afb19f-f8b47cb6")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1098) '('"_id" '"29046f5-9e1693e0-48afe0b8-14209fc3") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::CountAllPushdownBackwardCompatibility+EnableLlvm [GOOD] Test command err: Trying to start YDB, gRPC: 13892, MsgBus: 7852 2024-11-21T17:46:30.439083Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790414258153183:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:30.439101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000db0/r3tmp/tmpBiiYxp/pdisk_1.dat 2024-11-21T17:46:30.481930Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13892, node 1 2024-11-21T17:46:30.503808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:30.503824Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:30.503826Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:30.503862Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7852 TClient is connected to server localhost:7852 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:46:30.540763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:30.540788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:30.541867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:30.565215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:30.567677Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:30.573974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:30.585541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790414258153846:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:30.585596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790414258153846:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:30.585647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790414258153846:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:30.585681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790414258153846:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:30.585705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790414258153846:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:30.585721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790414258153846:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:30.585738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790414258153846:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:30.585761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790414258153846:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:30.585785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790414258153846:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:30.585808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790414258153846:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:30.585837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790414258153846:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:30.585864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790414258153846:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:30.589601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414258153847:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:30.589638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414258153847:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:30.589687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414258153847:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:30.589719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414258153847:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:30.589743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414258153847:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:30.589765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414258153847:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:30.589786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414258153847:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:30.589811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414258153847:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:30.589835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414258153847:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:30.589856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414258153847:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:30.589881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414258153847:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:30.589898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790414258153847:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:30.590430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:30.590445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:30.590460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:30.590465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:30.590494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:30.590504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:30.590514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:30.590524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:30.590533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:30.590542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_swi ... hunks; 2024-11-21T17:46:30.600991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:30.600995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:30.601013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:30.601023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:30.601032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:30.601042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:30.601051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:30.601061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:30.601068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:30.601077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:30.601110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:30.601121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:30.601139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:30.601147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:30.601160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:30.601169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:30.601186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:30.601195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:30.601206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:30.601215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:30.632907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1175912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8228912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=12930912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=12930912;columns=5; 2024-11-21T17:46:30.850388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790414258154298:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:30.850391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790414258154287:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:30.850405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:30.851018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:30.852717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790414258154301:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:46:31.295210Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211191000, txId: 18446744073709551615] shutting down JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":4,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["olapStore\/olapTable"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/olapStore\/olapTable","reads":[{"scan_by":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["timestamp (-∞, +∞)","uid (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"olapStore\/olapTable","ReadColumns":null,"SsaProgram":{"Version":5,"Command":[{"GroupBy":{"Aggregates":[{"Function":{"Id":2},"Column":{"Id":6}}]}},{"Projection":{"Columns":[{"Id":6}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"column0" (DataType 'Uint64))))) (let $1 (DataType 'Uint64)) (let $2 '('('"_logical_id" '475) '('"_id" '"ae48e8ea-9680da3e-30f4c901-46cf5181") '('"_wide_channels" (StructType '('_yql_agg_0 $1))))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $16 (KqpTable '"/Root/olapStore/olapTable" '"72057594046644480:3" '"" '1)) (let $17 (KqpWideReadOlapTableRanges $16 (Void) '() '() '() (lambda '($18) (TKqpOlapAgg $18 '('('_yql_agg_0 '"count" '"*")) '())))) (return (FromFlow $17)) ))) $2)) (let $4 (DqCnUnionAll (TDqOutput $3 '0))) (let $5 (DqPhyStage '($4) (lambda '($19) (block '( (let $20 (Bool 'false)) (let $21 (WideCondense1 (ToFlow $19) (lambda '($23) $23) (lambda '($24 $25) $20) (lambda '($26 $27) (AggrAdd $26 $27)))) (let $22 (Condense (NarrowMap (Take $21 (Uint64 '1)) (lambda '($28) (AsStruct '('Count0 $28)))) (Nothing (OptionalType (StructType '('Count0 $1)))) (lambda '($29 $30) $20) (lambda '($31 $32) (Just $31)))) (return (FromFlow (Map $22 (lambda '($33) (AsList (AsStruct '('"column0" (Coalesce (Member $33 'Count0) (Uint64 '0))))))))) ))) '('('"_logical_id" '994) '('"_id" '"a5fc51b6-ae96729d-28665719-f245bfa7")))) (let $6 (DqCnValue (TDqOutput $5 '0))) (let $7 '('('"type" '"scan"))) (let $8 (KqpPhysicalTx '($3 $5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (ListType (StructType '('"column0" $1)))) (let $11 '('('"_logical_id" '1097) '('"_id" '"88744a43-da065619-998672a6-e366149d") '('"_partition_mode" '"single"))) (let $12 (DqPhyStage '() (lambda '() (Iterator %kqp%tx_result_binding_0_0)) $11)) (let $13 (DqCnResult (TDqOutput $12 '0) '('"column0"))) (let $14 (KqpTxResultBinding $10 '0 '0)) (let $15 (KqpPhysicalTx '($12) '($13) '('($9 $14)) $7)) (return (KqpPhysicalQuery '($8 $15) '((KqpTxResultBinding $10 '1 '0)) '('('"type" '"scan_query")))) ) >> KqpOlapSysView::StatsSysViewRanges [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewRanges [GOOD] Test command err: Trying to start YDB, gRPC: 15442, MsgBus: 17050 2024-11-21T17:46:14.396497Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790345430485138:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:14.396784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e44/r3tmp/tmpzRdzya/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15442, node 1 2024-11-21T17:46:14.457133Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:14.464296Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:14.464310Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:14.464312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:14.464358Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17050 TClient is connected to server localhost:17050 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:46:14.497565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:14.497594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:14.498711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:14.527068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:14.539502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:14.551125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345430485791:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.551189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345430485791:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.551235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345430485791:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.551268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345430485791:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.551290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345430485791:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.551312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345430485791:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.551336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345430485791:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.551366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345430485791:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.551385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345430485791:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:14.551402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345430485791:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.551425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345430485791:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:14.551457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790345430485791:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:14.551978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:14.551994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:14.552008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:14.552018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:14.552035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:14.552044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:14.552053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:14.552067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:14.552077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:14.552081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:14.552087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:14.552091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:14.552154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:14.552165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:14.552181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:14.552189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.552201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:14.552209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:14.552238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:14.552247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:14.552258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:14.552267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:14.555335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345430485792:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.555357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345430485792:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.555386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790345430485792:2289];tablet_id=7207518622 ... d: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 7 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037890:1:7:9:0:24960:0] EntityType: COL BlobRangeSize: 5744 PathId: 3 Rows: 670 RawBytes: 2680 BlobRangeOffset: 23224 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: level PortionId: 7 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037890:1:7:9:0:24960:0] EntityType: COL BlobRangeSize: 1736 PathId: 3 Rows: 670 RawBytes: 761744 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 7 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037890:1:7:9:0:24960:0] EntityType: COL BlobRangeSize: 9504 PathId: 3 Rows: 683 RawBytes: 5464 BlobRangeOffset: 20688 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: timestamp PortionId: 9 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:9:11:0:25392:0] EntityType: COL BlobRangeSize: 2936 PathId: 3 Rows: 683 RawBytes: 5088 BlobRangeOffset: 15520 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: resource_id PortionId: 9 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:9:11:0:25392:0] EntityType: COL BlobRangeSize: 5168 PathId: 3 Rows: 683 RawBytes: 10245 BlobRangeOffset: 9680 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 9 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037890:1:9:11:0:25392:0] EntityType: COL BlobRangeSize: 5840 PathId: 3 Rows: 683 RawBytes: 2732 BlobRangeOffset: 23624 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: level PortionId: 9 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037890:1:9:11:0:25392:0] EntityType: COL BlobRangeSize: 1768 PathId: 3 Rows: 683 RawBytes: 778618 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 9 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037890:1:9:11:0:25392:0] EntityType: COL BlobRangeSize: 9680 PathId: 3 Rows: 638 RawBytes: 5104 BlobRangeOffset: 19440 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: timestamp PortionId: 11 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:11:13:0:23856:0] EntityType: COL BlobRangeSize: 2752 PathId: 3 Rows: 638 RawBytes: 4745 BlobRangeOffset: 14584 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: resource_id PortionId: 11 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:11:13:0:23856:0] EntityType: COL BlobRangeSize: 4856 PathId: 3 Rows: 638 RawBytes: 9570 BlobRangeOffset: 9096 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 11 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037890:1:11:13:0:23856:0] EntityType: COL BlobRangeSize: 5488 PathId: 3 Rows: 638 RawBytes: 2552 BlobRangeOffset: 22192 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: level PortionId: 11 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037890:1:11:13:0:23856:0] EntityType: COL BlobRangeSize: 1664 PathId: 3 Rows: 638 RawBytes: 726330 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 11 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037890:1:11:13:0:23856:0] EntityType: COL BlobRangeSize: 9096 PathId: 3 Rows: 684 RawBytes: 5472 BlobRangeOffset: 20720 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: timestamp PortionId: 13 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:13:15:0:25440:0] EntityType: COL BlobRangeSize: 2936 PathId: 3 Rows: 684 RawBytes: 5081 BlobRangeOffset: 15552 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: resource_id PortionId: 13 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:13:15:0:25440:0] EntityType: COL BlobRangeSize: 5168 PathId: 3 Rows: 684 RawBytes: 10260 BlobRangeOffset: 9696 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 13 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037890:1:13:15:0:25440:0] EntityType: COL BlobRangeSize: 5856 PathId: 3 Rows: 684 RawBytes: 2736 BlobRangeOffset: 23656 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: level PortionId: 13 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037890:1:13:15:0:25440:0] EntityType: COL BlobRangeSize: 1784 PathId: 3 Rows: 684 RawBytes: 778900 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 13 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037890:1:13:15:0:25440:0] EntityType: COL BlobRangeSize: 9696 PathId: 3 Rows: 711 RawBytes: 5688 BlobRangeOffset: 21440 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: timestamp PortionId: 15 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:15:17:0:26280:0] EntityType: COL BlobRangeSize: 3048 PathId: 3 Rows: 711 RawBytes: 5287 BlobRangeOffset: 16104 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: resource_id PortionId: 15 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:15:17:0:26280:0] EntityType: COL BlobRangeSize: 5336 PathId: 3 Rows: 711 RawBytes: 10665 BlobRangeOffset: 10032 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 15 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037890:1:15:17:0:26280:0] EntityType: COL BlobRangeSize: 6072 PathId: 3 Rows: 711 RawBytes: 2844 BlobRangeOffset: 24488 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: level PortionId: 15 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037890:1:15:17:0:26280:0] EntityType: COL BlobRangeSize: 1792 PathId: 3 Rows: 711 RawBytes: 811035 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 15 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037890:1:15:17:0:26280:0] EntityType: COL BlobRangeSize: 10032 PathId: 3 Rows: 653 RawBytes: 5224 BlobRangeOffset: 19856 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: timestamp PortionId: 17 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:17:19:0:24456:0] EntityType: COL BlobRangeSize: 2816 PathId: 3 Rows: 653 RawBytes: 4866 BlobRangeOffset: 14864 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: resource_id PortionId: 17 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:17:19:0:24456:0] EntityType: COL BlobRangeSize: 4992 PathId: 3 Rows: 653 RawBytes: 9795 BlobRangeOffset: 9272 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 17 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037890:1:17:19:0:24456:0] EntityType: COL BlobRangeSize: 5592 PathId: 3 Rows: 653 RawBytes: 2612 BlobRangeOffset: 22672 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: level PortionId: 17 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037890:1:17:19:0:24456:0] EntityType: COL BlobRangeSize: 1784 PathId: 3 Rows: 653 RawBytes: 744147 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 17 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037890:1:17:19:0:24456:0] EntityType: COL BlobRangeSize: 9272 PathId: 3 Rows: 631 RawBytes: 5048 BlobRangeOffset: 19232 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: timestamp PortionId: 19 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:19:21:0:23664:0] EntityType: COL BlobRangeSize: 2728 PathId: 3 Rows: 631 RawBytes: 4694 BlobRangeOffset: 14416 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: resource_id PortionId: 19 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:19:21:0:23664:0] EntityType: COL BlobRangeSize: 4816 PathId: 3 Rows: 631 RawBytes: 9465 BlobRangeOffset: 8992 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: uid PortionId: 19 InternalEntityId: 3 ChunkIdx: 0 BlobId: [72075186224037890:1:19:21:0:23664:0] EntityType: COL BlobRangeSize: 5424 PathId: 3 Rows: 631 RawBytes: 2524 BlobRangeOffset: 21960 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: level PortionId: 19 InternalEntityId: 4 ChunkIdx: 0 BlobId: [72075186224037890:1:19:21:0:23664:0] EntityType: COL BlobRangeSize: 1704 PathId: 3 Rows: 631 RawBytes: 720642 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: message PortionId: 19 InternalEntityId: 5 ChunkIdx: 0 BlobId: [72075186224037890:1:19:21:0:23664:0] EntityType: COL BlobRangeSize: 8992 PathId: 3 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE PathId == UInt64("3") AND Activity == 1 GROUP BY TabletId, PathId, Kind ORDER BY TabletId, Kind RESULT: 2024-11-21T17:46:27.470923Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211185402, txId: 281474976715668] shutting down TabletId: 72075186224037888 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` GROUP BY PathId, Kind, TabletId ORDER BY PathId DESC, Kind DESC, TabletId DESC ; RESULT: 2024-11-21T17:46:29.449894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:46:29.449909Z node 1 :IMPORT WARN: Table profiles were not loaded TabletId: 72075186224037891 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 5 TabletId: 72075186224037888 Kind: INSERTED PathId: 5 TabletId: 72075186224037891 Kind: INSERTED PathId: 4 TabletId: 72075186224037889 Kind: INSERTED PathId: 4 TabletId: 72075186224037888 Kind: INSERTED PathId: 4 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 2024-11-21T17:46:29.568166Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211187507, txId: 281474976715670] shutting down ==================================== QUERY: SELECT PathId, Kind, TabletId FROM `/Root/olapStore/.sys/store_primary_index_stats` WHERE PathId > UInt64("0") AND PathId < UInt32("4") OR PathId > UInt64("4") AND PathId <= UInt64("5") GROUP BY PathId, Kind, TabletId ORDER BY PathId DESC, Kind DESC, TabletId DESC ; RESULT: TabletId: 72075186224037891 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 5 TabletId: 72075186224037888 Kind: INSERTED PathId: 5 TabletId: 72075186224037890 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Kind: INSERTED PathId: 3 TabletId: 72075186224037888 Kind: INSERTED PathId: 3 2024-11-21T17:46:31.748552Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211189622, txId: 281474976715672] shutting down >> KqpOlap::CompositeRangeOlap [GOOD] >> test.py::test[blocks-interval_add_interval_scalar--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--Debug] >> DataShardVolatile::UpsertBrokenLockArbiterRestart [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart >> KqpOlapAggregations::Aggregation_ResultL_FilterL_OrderL_Limit2 >> KqpInplaceUpdate::Negative_SingleRowWithValueCast >> test.py::test[aggregate-group_by_ru_with_window_func--Debug] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func--Plan] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::CompositeRangeOlap [GOOD] Test command err: Trying to start YDB, gRPC: 13546, MsgBus: 27296 2024-11-21T17:46:31.603833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790417746942192:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:31.604124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000d8a/r3tmp/tmpDHoT7J/pdisk_1.dat 2024-11-21T17:46:31.657708Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13546, node 1 2024-11-21T17:46:31.666104Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:31.666117Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:31.666119Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:31.666155Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27296 TClient is connected to server localhost:27296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:31.705245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:31.705266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:31.706357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:31.735363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:31.741281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:31.751483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790417746942848:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:31.751546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790417746942848:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:31.751587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790417746942848:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:31.751603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790417746942848:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:31.751617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790417746942848:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:31.751631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790417746942848:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:31.751647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790417746942848:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:31.751663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790417746942848:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:31.751687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790417746942848:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:31.751708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790417746942848:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:31.751725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790417746942848:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:31.751740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790417746942848:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:31.752071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:31.752084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:31.752092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:31.752094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:31.752106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:31.752108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:31.752114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:31.752117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:31.752126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:31.752128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:31.752132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:31.752134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:31.752180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:31.752192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:31.752205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:31.752207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:31.752213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:31.752215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:31.752226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:31.752297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:31.752314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:31.752318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:31.754767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790417746942850:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:31.754785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790417746942850:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:31.754808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790417746942850:2289];tablet_id=7207518622 ... 1-21T17:46:31.760017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:31.760024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:31.760032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:31.760035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:31.760043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:31.760051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:31.760055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:31.760063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:31.760084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:31.760093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:31.760105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:31.760108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:31.760116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:31.760120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:31.760133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:31.760140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:31.760149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:31.760152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:31.760191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:31.760198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:31.760204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:31.760207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:31.760218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:31.760225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:31.760244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:31.760247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:31.760253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:31.760261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:31.760266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:31.760271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:31.760291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:31.760298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:31.760310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:31.760317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:31.760326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:31.760333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:31.760345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:31.760352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:31.760359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:31.760361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:31.799179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2568;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2568;columns=5; 2024-11-21T17:46:31.868641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790417746943144:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:31.868663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790417746943170:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:31.868669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:31.869422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:31.871033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790417746943173:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:46:31.991255Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211192000, txId: 18446744073709551615] shutting down 2024-11-21T17:46:32.028901Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211192019, txId: 18446744073709551615] shutting down 2024-11-21T17:46:32.063228Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211192061, txId: 18446744073709551615] shutting down 2024-11-21T17:46:32.102087Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211192103, txId: 18446744073709551615] shutting down 2024-11-21T17:46:32.138295Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211192138, txId: 18446744073709551615] shutting down 2024-11-21T17:46:32.180742Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211192173, txId: 18446744073709551615] shutting down 2024-11-21T17:46:32.222308Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211192215, txId: 18446744073709551615] shutting down >> KqpEffects::InsertRevert_Literal_Duplicates >> test.py::test[aggregate-group_by_ru_with_window_func--Results] >> KqpInplaceUpdate::SingleRowPgNotNull ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit3_1 Test command err: Trying to start YDB, gRPC: 64701, MsgBus: 7385 2024-11-21T17:46:25.789231Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790390079477577:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:25.789257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000de7/r3tmp/tmpOxGITe/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64701, node 1 2024-11-21T17:46:25.842793Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:25.847039Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:25.847051Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:25.847052Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:25.847078Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7385 TClient is connected to server localhost:7385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:25.889432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:25.889458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:25.890658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:25.895823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:25.905564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:25.911858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:46:25.915101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:46:25.915152Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2024-11-21T17:46:25.915796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:25.915843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:25.915903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:25.915925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:25.915945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:25.915970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:25.915990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:25.916012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:25.916039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:25.916060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:25.916080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:25.916100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790390079478223:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:25.916683Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037889 2024-11-21T17:46:25.916706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:46:25.916710Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:46:25.916728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:46:25.916752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:25.916765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:25.916768Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:46:25.916775Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:46:25.916782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:25.916791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:25.916794Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:46:25.916807Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:46:25.916821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:25.916829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:25.916831Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:46:25.916838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:46:25.916851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:25.916863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:25.916870Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:46:25.916878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:25.916886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:25.916887Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:46:25.916892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:25.916896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=Rest ... eason=no_changes; 2024-11-21T17:46:30.930938Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2024-11-21T17:46:30.930951Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2024-11-21T17:46:30.930957Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2024-11-21T17:46:30.930967Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:30.930971Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:30.930980Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:30.930991Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:30.930996Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:30.930999Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:30.931001Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:30.931005Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:30.931018Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:30.931034Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211190000 at tablet 72075186224037890 2024-11-21T17:46:30.931041Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:30.931045Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:30.931049Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:30.931051Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:30.931052Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:30.931053Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:30.931055Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:30.931060Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790390079478255:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:30.931446Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891; 2024-11-21T17:46:30.931460Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:30.931464Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:30.931471Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:30.931479Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:30.931489Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:30.931493Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:30.931495Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:30.931498Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:30.931508Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:30.931528Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211190000 at tablet 72075186224037891 2024-11-21T17:46:30.931537Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:30.931541Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:30.931544Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:30.931548Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:30.931550Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:30.931554Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:30.931556Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:30.931561Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790390079478222:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:31.125020Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439790390079478221:2288];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:31.125020Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439790390079478223:2290];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:31.125032Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7439790390079478222:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:31.125037Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439790390079478255:2291];fline=actor.cpp:33;event=skip_flush_writing; 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1266B1AA 1. /-S/util/system/yassert.cpp:55: Panic @ 0x12662916 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x13680663 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x12459D20 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:240: Execute_ @ 0x1245B6F9 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x1246A9F6 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x127C571D 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x1246A3B9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x127C5E92 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x127D90AC 10. ??:0: ?? @ 0x7FBDDAC3FD8F 11. ??:0: ?? @ 0x7FBDDAC3FE3F 12. ??:0: ?? @ 0x1179D028 >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict >> KqpInplaceUpdate::Negative_SingleRowWithValueCast [GOOD] |76.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |76.0%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat >> test.py::test[blocks-lazy_nonstrict_nested--Debug] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--Plan] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--Results] >> test.py::test[window-win_func_over_group_by_compl--Results] [GOOD] >> KqpEffects::UpdateOn_Params >> test.py::test[window-yql-18879-default.txt-Debug] |76.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |76.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost >> KqpInplaceUpdate::SingleRowPgNotNull [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast [GOOD] Test command err: Trying to start YDB, gRPC: 25602, MsgBus: 1271 2024-11-21T17:46:32.999546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790420912569663:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:32.999603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00187e/r3tmp/tmpzH3mQ9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25602, node 1 2024-11-21T17:46:33.055818Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:33.063012Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:33.063040Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:33.063042Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:33.063083Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1271 TClient is connected to server localhost:1271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:33.099141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:33.099177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:33.100334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:33.127290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.138569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.154000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.172469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.183512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.296050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790425207538350:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.296086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.336048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.342483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.397697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.405797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.412876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.420370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.428491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790425207538865:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.428516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.428595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790425207538870:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.429231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:33.433459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790425207538872:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:33.606533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> DataShardVolatile::UpsertDependenciesShardsRestart [GOOD] >> KqpWrite::UpsertNullKey >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit |76.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |76.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull [GOOD] Test command err: Trying to start YDB, gRPC: 13241, MsgBus: 31053 2024-11-21T17:46:33.479384Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790425897260509:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:33.479436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001870/r3tmp/tmp7ti8Ny/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13241, node 1 2024-11-21T17:46:33.533513Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:33.534150Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:33.534164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:33.534166Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:33.534208Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31053 TClient is connected to server localhost:31053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:33.579165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:33.579197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:33.580336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:33.602636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.613304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.626926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.645174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.654309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.730584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790425897261838:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.730610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.763761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.770877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.784722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.799362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.813034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.827051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.842627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790425897262356:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.842653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.842696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790425897262361:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.843483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:33.846225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790425897262363:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:34.001043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::UpsertDependenciesShardsRestart [GOOD] Test command err: 2024-11-21T17:46:00.853395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:00.853886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:00.853911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001735/r3tmp/tmpNAfZt9/pdisk_1.dat 2024-11-21T17:46:00.961845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:00.983242Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:01.028556Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:46:01.028871Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:46:01.028917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:01.028934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:01.039564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:01.275862Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:46:01.275895Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:46:01.275935Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:610:2519] 2024-11-21T17:46:01.286233Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:46:01.286467Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:46:01.286483Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:46:01.286544Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:46:01.286619Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:46:01.286636Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:46:01.287139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:01.287292Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:46:01.287418Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:46:01.287428Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:46:01.301567Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:626:2534], Recipient [1:635:2540]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:01.301787Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:626:2534], Recipient [1:635:2540]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:01.301878Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:635:2540] 2024-11-21T17:46:01.301928Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:01.302917Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:626:2534], Recipient [1:635:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:01.310209Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:01.310252Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:01.310414Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:01.310438Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:01.310445Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:01.310494Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:01.314423Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:01.314501Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:01.314537Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:651:2549] 2024-11-21T17:46:01.314543Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:01.314551Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:01.314556Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:01.314696Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:635:2540], Recipient [1:635:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:01.314706Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:01.314766Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:01.314789Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:01.314891Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:642:2544], Recipient [1:635:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:01.314897Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:01.314904Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:632:2538], serverId# [1:642:2544], sessionId# [0:0:0] 2024-11-21T17:46:01.314912Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:01.314919Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:01.314926Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:46:01.314932Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:46:01.314937Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:46:01.314942Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:01.314948Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:01.314960Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:642:2544] 2024-11-21T17:46:01.314964Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:46:01.314987Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:01.315053Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:46:01.315063Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:01.315080Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:01.315089Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:46:01.315093Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:46:01.315099Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:46:01.315103Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:46:01.315151Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:46:01.315156Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:46:01.315160Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:46:01.315164Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:46:01.315177Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:46:01.315181Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:46:01.315185Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:46:01.315189Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:46:01.315194Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:46:01.315442Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:652:2550], Recipient [1:635:2540]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:46:01.315451Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:01.325771Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:01.325803Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:46:01.325809Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:46:01.325829Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224 ... count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:34.406930Z node 17 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2024-11-21T17:46:34.406939Z node 17 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [17:1091:2865] 2024-11-21T17:46:34.406943Z node 17 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2024-11-21T17:46:34.406948Z node 17 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2024-11-21T17:46:34.406953Z node 17 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T17:46:34.407009Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [17:1002:2802], Recipient [17:976:2789]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037890 OperationCookie: 281474976715664 2024-11-21T17:46:34.407019Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037890 for split OpId 281474976715664 2024-11-21T17:46:34.407056Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [17:1002:2802], Recipient [17:1002:2802]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:34.407061Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:34.407118Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037890] received poison pill [17:1085:2859] 2024-11-21T17:46:34.407125Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037890] notify reset [17:1085:2859] 2024-11-21T17:46:34.407147Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [17:1085:2859], Recipient [17:976:2789]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:46:34.407150Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:46:34.407156Z node 17 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T17:46:34.407163Z node 17 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:34.407170Z node 17 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2024-11-21T17:46:34.407175Z node 17 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T17:46:34.407178Z node 17 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T17:46:34.407183Z node 17 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2024-11-21T17:46:34.407189Z node 17 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:46:34.407196Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [17:723:2071] 2024-11-21T17:46:34.407199Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [17:723:2071] 2024-11-21T17:46:34.407213Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046316545] send [17:726:2071] 2024-11-21T17:46:34.407216Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046316545] push event to server [17:726:2071] 2024-11-21T17:46:34.407280Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [17:24:2071], Recipient [17:1002:2802]: {TEvRegisterTabletResult TabletId# 72075186224037890 Entry# 3000} 2024-11-21T17:46:34.407284Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T17:46:34.407290Z node 17 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 3000 2024-11-21T17:46:34.407294Z node 17 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T17:46:34.407303Z node 17 :TX_DATASHARD DEBUG: 72075186224037891 ack snapshot OpId 281474976715664 2024-11-21T17:46:34.407308Z node 17 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2024-11-21T17:46:34.407319Z node 17 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:34.407327Z node 17 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2024-11-21T17:46:34.407334Z node 17 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [17:1092:2866] 2024-11-21T17:46:34.407338Z node 17 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2024-11-21T17:46:34.407342Z node 17 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2024-11-21T17:46:34.407345Z node 17 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2024-11-21T17:46:34.407368Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [17:1007:2805], Recipient [17:976:2789]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037891 OperationCookie: 281474976715664 2024-11-21T17:46:34.407373Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715664 2024-11-21T17:46:34.407398Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [17:1087:2861], Recipient [17:1002:2802]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:34.407402Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:34.407407Z node 17 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [17:1085:2859], serverId# [17:1087:2861], sessionId# [0:0:0] 2024-11-21T17:46:34.407416Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [17:1007:2805], Recipient [17:1007:2805]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:34.407419Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:34.407447Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037891] received poison pill [17:1086:2860] 2024-11-21T17:46:34.407451Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037891] notify reset [17:1086:2860] 2024-11-21T17:46:34.407475Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [17:1086:2860], Recipient [17:976:2789]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:46:34.407478Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:46:34.407503Z node 17 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2024-11-21T17:46:34.407507Z node 17 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:34.407510Z node 17 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037891 2024-11-21T17:46:34.407513Z node 17 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037891 has no attached operations 2024-11-21T17:46:34.407516Z node 17 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037891 2024-11-21T17:46:34.407519Z node 17 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2024-11-21T17:46:34.407523Z node 17 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2024-11-21T17:46:34.407528Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [17:723:2071] 2024-11-21T17:46:34.407531Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [17:723:2071] 2024-11-21T17:46:34.407548Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [17:24:2071], Recipient [17:1007:2805]: {TEvRegisterTabletResult TabletId# 72075186224037891 Entry# 3000} 2024-11-21T17:46:34.407551Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T17:46:34.407554Z node 17 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 3000 2024-11-21T17:46:34.407558Z node 17 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2024-11-21T17:46:34.407578Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046316545] send [17:726:2071] 2024-11-21T17:46:34.407581Z node 17 :PIPE_CLIENT DEBUG: TClient[72057594046316545] push event to server [17:726:2071] 2024-11-21T17:46:34.407600Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [17:1088:2862], Recipient [17:1007:2805]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:34.407604Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:34.407608Z node 17 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [17:1086:2860], serverId# [17:1088:2862], sessionId# [0:0:0] 2024-11-21T17:46:34.407655Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [17:24:2071], Recipient [17:1002:2802]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2024-11-21T17:46:34.407659Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T17:46:34.407665Z node 17 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 3000 2024-11-21T17:46:34.407671Z node 17 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037890: waitStep# 3000 readStep# 3000 observedStep# 3000 2024-11-21T17:46:34.407676Z node 17 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037890 promoting UnprotectedReadEdge to v3000/18446744073709551615 2024-11-21T17:46:34.407709Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [17:24:2071], Recipient [17:1007:2805]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2024-11-21T17:46:34.407713Z node 17 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T17:46:34.407716Z node 17 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 0 next step 3000 2024-11-21T17:46:34.407719Z node 17 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 3000 readStep# 3000 observedStep# 3000 2024-11-21T17:46:34.407723Z node 17 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037891 promoting UnprotectedReadEdge to v3000/18446744073709551615 2024-11-21T17:46:34.428411Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715664 2024-11-21T17:46:34.429143Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037889] send [17:1059:2839] 2024-11-21T17:46:34.429156Z node 17 :PIPE_CLIENT DEBUG: TClient[72075186224037889] push event to server [17:1059:2839] 2024-11-21T17:46:34.429198Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [17:391:2384], Recipient [17:1061:2840] 2024-11-21T17:46:34.429217Z node 17 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2024-11-21T17:46:34.429677Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715664 2024-11-21T17:46:34.429694Z node 17 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 2 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T17:46:34.429848Z node 17 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [17:964:2782], Recipient [17:976:2789]: NKikimr::TEvTablet::TEvFollowerGcApplied ... split finished >> test.py::test[blocks-lazy_nonstrict_nested--Results] [GOOD] >> KqpOlapSysView::StatsSysViewBytesColumnActualization [GOOD] >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::UpdateOn_Select >> test.py::test[aggregate-group_by_ru_with_window_func--Results] [GOOD] >> KqpOlapWrite::WriteDeleteCleanGC [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 26788, MsgBus: 24677 2024-11-21T17:46:33.023190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790427441989292:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:33.023534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00188e/r3tmp/tmpqDPRRX/pdisk_1.dat 2024-11-21T17:46:33.087151Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26788, node 1 2024-11-21T17:46:33.101285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:33.101299Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:33.101302Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:33.101339Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24677 2024-11-21T17:46:33.123548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:33.123583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:33.124751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:33.163275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.179397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.241880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.258138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.268029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:33.295011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790427441990827:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.295047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.322495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.329247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.343795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.357054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.364210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.370976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:33.379646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790427441991318:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.379677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790427441991323:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.379678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.380285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:33.384555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790427441991325:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 62277, MsgBus: 29603 2024-11-21T17:46:33.952979Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790426763887796:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:33.953259Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00188e/r3tmp/tmphQLHt1/pdisk_1.dat 2024-11-21T17:46:33.963875Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62277, node 2 2024-11-21T17:46:33.973371Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:33.973393Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:33.973395Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:33.973437Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29603 TClient is connected to server localhost:29603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:34.053314Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:34.053340Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:34.054432Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:34.056080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.059840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.072166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.089681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.099623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.202488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790431058856623:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:34.202511Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:34.208334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:34.215888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:34.225379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:34.280454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:34.288383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:34.295266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:34.304206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790431058857139:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:34.304253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:34.304267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790431058857144:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:34.304943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:34.308621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790431058857146:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewBytesColumnActualization [GOOD] Test command err: Trying to start YDB, gRPC: 3017, MsgBus: 6039 2024-11-21T17:46:11.914377Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790333018960508:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:11.914394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e64/r3tmp/tmpUDa411/pdisk_1.dat 2024-11-21T17:46:11.970254Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3017, node 1 2024-11-21T17:46:11.978888Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:11.978903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:11.978905Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:11.978941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6039 TClient is connected to server localhost:6039 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:46:12.014965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:12.014990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:46:12.016098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:12.022317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:12.035114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:12.044272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790337313928454:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:12.044326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790337313928454:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:12.044356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790337313928454:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:12.044372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790337313928454:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:12.044387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790337313928454:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:12.044398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790337313928454:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:12.044411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790337313928454:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:12.044430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790337313928454:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:12.044444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790337313928454:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:12.044457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790337313928454:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:12.044470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790337313928454:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:12.044485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790337313928454:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:12.044877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:12.044890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:12.044902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:12.044907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:12.044922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:12.044930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:12.044940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:12.044944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:12.044953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:12.044960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:12.044966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:12.044971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:12.045023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:12.045032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:12.045049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:12.045058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:12.045070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:12.045078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:12.045095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:12.045103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:12.045114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:12.045122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:12.048001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790337313928456:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:12.048023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790337313928456:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:12.048051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790337313928456:2289];tablet_id=720751862240378 ... xUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:12.055891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:12.055895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:12.055906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:12.055909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:12.056003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:12.056008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:12.056016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:12.056020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:12.056032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:12.056036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:12.056043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:12.056046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:12.056053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:12.056056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:12.056073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:12.056077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:12.056105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:12.056112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:12.056125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:12.056128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:12.056137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:12.056141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:12.056153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:12.056156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:12.056164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:12.056167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:12.093204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=51200216;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=51200216;columns=2; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2024-11-21T17:46:16.914684Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790333018960508:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:16.914724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=51200216;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=51200216;columns=2; 2024-11-21T17:46:18.492038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=82cb9d4a-a83011ef-a16339a3-399f8d61;fline=with_appended.cpp:80;portions=3,4,5,;task_id=82cb9d4a-a83011ef-a16339a3-399f8d61; 2024-11-21T17:46:18.499985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=82cc15e0-a83011ef-a0e20962-ff563940;fline=with_appended.cpp:80;portions=3,4,5,;task_id=82cc15e0-a83011ef-a0e20962-ff563940; WAIT_COMPACTION: 3 2024-11-21T17:46:18.935235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=83347edc-a83011ef-b0650276-53bc1b4b;fline=with_appended.cpp:80;portions=3,4,5,;task_id=83347edc-a83011ef-b0650276-53bc1b4b; WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64') RESULT: 2024-11-21T17:46:23.514431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790384558569260:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:23.514430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790384558569268:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:23.514486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:23.515856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:23.518726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790384558569274:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T17:46:23.673501Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211183652, txId: 281474976710662] shutting down Wait changes: 0/0 2024-11-21T17:46:26.965243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:46:26.965260Z node 1 :IMPORT WARN: Table profiles were not loaded ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64') RESULT: 2024-11-21T17:46:28.713777Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211188698, txId: 281474976710664] shutting down 0/0 2024-11-21T17:46:28.718065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:28.726499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting actualization: 9/0.000007s 2024-11-21T17:46:29.474779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=89becfa0-a83011ef-87459a75-f37ccfbf;fline=with_appended.cpp:80;portions=6,7,8,;task_id=89becfa0-a83011ef-87459a75-f37ccfbf; 2024-11-21T17:46:29.505953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=89c3f782-a83011ef-93665032-233257dc;fline=with_appended.cpp:80;portions=6,7,8,;task_id=89c3f782-a83011ef-93665032-233257dc; 2024-11-21T17:46:29.507486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=89c3f764-a83011ef-ab12b134-fd4bbf8a;fline=with_appended.cpp:80;portions=6,7,8,;task_id=89c3f764-a83011ef-ab12b134-fd4bbf8a; ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64','_yql_delete_flag') RESULT: 2024-11-21T17:46:29.774067Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211189759, txId: 281474976710668] shutting down Wait changes: 0/0 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 AND EntityName IN ('new_column_ui64','_yql_delete_flag') RESULT: 2024-11-21T17:46:34.822128Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194805, txId: 281474976710670] shutting down 0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::WriteDeleteCleanGC [GOOD] Test command err: Trying to start YDB, gRPC: 17453, MsgBus: 10133 2024-11-21T17:46:30.486968Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790413485930426:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:30.487102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000da0/r3tmp/tmpmz2kxt/pdisk_1.dat 2024-11-21T17:46:30.539533Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17453, node 1 2024-11-21T17:46:30.556325Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:30.556340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:30.556342Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:30.556377Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10133 2024-11-21T17:46:30.588657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:30.588685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:30.589885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:30.617256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:30.626201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:30.635935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:30.635998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:30.636046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:30.636068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:30.636089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:30.636113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:30.636130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:30.636153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:30.636179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:30.636194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:30.636210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:30.636250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:30.636734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:30.636742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:30.636754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:30.636761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:30.636777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:30.636781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:30.636793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:30.636802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:30.636810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:30.636814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:30.636819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:30.636823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:30.636881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:30.636885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:30.636901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:30.636904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:30.636926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:30.636930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:30.636945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:30.636949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:30.636959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:30.636962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:30.639684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790413485931078:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:30.639711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790413485931078:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:30.639755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790413485931078:2288];tablet_id=7207518622 ... 28Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790413485931078:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:35.069732Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:35.069732Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790413485931078:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:35.069736Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790413485931078:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:35.069737Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:35.069741Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790413485931078:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:35.069742Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:35.069745Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790413485931078:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:35.069746Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:35.069747Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790413485931078:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:35.069749Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:35.069749Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790413485931078:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:35.069754Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:35.069757Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439790413485931078:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:35.069757Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037889;self_id=[1:7439790413485931079:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;storage_id=__DEFAULT;tablet_id=72075186224037889;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=4;reason=empty; 2024-11-21T17:46:35.069761Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037888;self_id=[1:7439790413485931078:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;storage_id=__DEFAULT;tablet_id=72075186224037888;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=0;reason=empty; 2024-11-21T17:46:35.069766Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790413485931080:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891; 2024-11-21T17:46:35.069775Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:35.069783Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790413485931080:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:35.069789Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7439790413485931080:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:516;problem=Background activities cannot be started: no index at tablet; 2024-11-21T17:46:35.069795Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211195000 at tablet 72075186224037891 2024-11-21T17:46:35.069803Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439790413485931080:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:35.069805Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037891;self_id=[1:7439790413485931080:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:516;problem=Background activities cannot be started: no index at tablet; 2024-11-21T17:46:35.069992Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890; 2024-11-21T17:46:35.070007Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:35.070010Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:35.070015Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:35.070022Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:35.070025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:35.070027Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:35.070029Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:35.070032Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:35.070039Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:35.070046Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;storage_id=__DEFAULT;tablet_id=72075186224037890;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=2;reason=empty; 2024-11-21T17:46:35.070050Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211195000 at tablet 72075186224037890 2024-11-21T17:46:35.070053Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:35.070055Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:46:35.070057Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:35.070059Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:35.070061Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:35.070063Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:35.070064Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:35.070068Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:35.070070Z node 1 :TX_COLUMNSHARD_BLOBS DEBUG: tablet_id=72075186224037890;self_id=[1:7439790413485931090:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;storage_id=__DEFAULT;tablet_id=72075186224037890;fline=blob_manager.cpp:308;event=TBlobManager::BuildGCTask skip;current_gen=1;current_step=2;reason=empty; >> KqpWrite::UpsertNullKey [GOOD] >> test.py::test[window-yql-18879-default.txt-Debug] [GOOD] >> test.py::test[window-yql-18879-default.txt-Plan] [GOOD] >> test.py::test[window-yql-18879-default.txt-Results] >> KqpImmediateEffects::Delete >> KqpWrite::InsertRevert >> KqpOlap::PredicatePushdown [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange >> KqpEffects::UpdateOn_Select [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::UpsertNullKey [GOOD] Test command err: Trying to start YDB, gRPC: 20316, MsgBus: 3885 2024-11-21T17:46:34.617989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790431035769049:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:34.618181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00185f/r3tmp/tmpwddtRW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20316, node 1 2024-11-21T17:46:34.671007Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:34.679800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:34.679815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:34.679818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:34.679852Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3885 TClient is connected to server localhost:3885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:34.718183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:34.718211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:34.719273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:34.746415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.764361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.827190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.843941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.900094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.964171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790431035770607:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:34.964197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:34.993702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.000057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.054509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.065410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.071921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.079308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.087722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790435330738410:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:35.087752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790435330738415:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:35.087751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:35.088371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:35.092338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790435330738417:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |76.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |76.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdown [GOOD] Test command err: Trying to start YDB, gRPC: 4838, MsgBus: 7163 2024-11-21T17:46:26.259610Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790397372396500:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:26.259739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000de3/r3tmp/tmp9jjko4/pdisk_1.dat 2024-11-21T17:46:26.317184Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4838, node 1 2024-11-21T17:46:26.326427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:26.326445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:26.326447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:26.326489Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7163 TClient is connected to server localhost:7163 2024-11-21T17:46:26.360835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:26.360864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:46:26.361878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:26.390848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:26.396007Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:26.403084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:26.412960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790397372397152:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.413021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790397372397152:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:26.413074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790397372397152:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:26.413104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790397372397152:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:26.413121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790397372397152:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:26.413147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790397372397152:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:26.413171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790397372397152:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:26.413199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790397372397152:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:26.413221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790397372397152:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:26.413238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790397372397152:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:26.413270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790397372397152:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:26.413294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790397372397152:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:26.413807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:26.413822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:26.413831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:26.413840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:26.413854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:26.413863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:26.413871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:26.413883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:26.413894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:26.413898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:26.413904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:26.413913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:26.413969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:26.413981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:26.413994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:26.414002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:26.414012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:26.414021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:26.414036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:26.414046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:26.414061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:26.414069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:26.416888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790397372397151:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.416909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790397372397151:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NA ... T17:46:33.967438Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` + 2. < 5.f ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` + 2. < 5.f ORDER BY `timestamp` 2024-11-21T17:46:34.032409Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194007, txId: 18446744073709551615] shutting down 2024-11-21T17:46:34.069638Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194070, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` - 2.f >= 1. ORDER BY `timestamp` --- Run pushed down query --- 2024-11-21T17:46:34.119409Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194105, txId: 18446744073709551615] shutting down --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` - 2.f >= 1. ORDER BY `timestamp` 2024-11-21T17:46:34.153690Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194161, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` * 3. > 4.f ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` * 3. > 4.f ORDER BY `timestamp` 2024-11-21T17:46:34.204570Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194189, txId: 18446744073709551615] shutting down 2024-11-21T17:46:34.239119Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194238, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` / 2.f <= 1. ORDER BY `timestamp` 2024-11-21T17:46:34.291325Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194273, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` / 2.f <= 1. ORDER BY `timestamp` 2024-11-21T17:46:34.328940Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194329, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` % 3. != 1.f ORDER BY `timestamp` 2024-11-21T17:46:34.394108Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194364, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `level` % 3. != 1.f ORDER BY `timestamp` 2024-11-21T17:46:34.461061Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194434, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `timestamp` >= Timestamp("1970-01-01T00:00:03.000001Z") AND `level` < 4 ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `timestamp` >= Timestamp("1970-01-01T00:00:03.000001Z") AND `level` < 4 ORDER BY `timestamp` 2024-11-21T17:46:34.524698Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194490, txId: 18446744073709551615] shutting down 2024-11-21T17:46:34.557507Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194560, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE (`timestamp`, `level`) >= (Timestamp("1970-01-01T00:00:03.000001Z"), 3) ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE (`timestamp`, `level`) >= (Timestamp("1970-01-01T00:00:03.000001Z"), 3) ORDER BY `timestamp` 2024-11-21T17:46:34.625201Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194595, txId: 18446744073709551615] shutting down 2024-11-21T17:46:34.677043Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194658, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` != "10001" XOR "XXX" == "YYY" ORDER BY `timestamp` 2024-11-21T17:46:34.757155Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194707, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `resource_id` != "10001" XOR "XXX" == "YYY" ORDER BY `timestamp` 2024-11-21T17:46:34.796406Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194791, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE IF(`level` > 3, -`level`, +`level`) < 2 ORDER BY `timestamp` 2024-11-21T17:46:34.856956Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194833, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE IF(`level` > 3, -`level`, +`level`) < 2 ORDER BY `timestamp` 2024-11-21T17:46:34.885956Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194896, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE StartsWith(`message` ?? `resource_id`, "10000") ORDER BY `timestamp` 2024-11-21T17:46:34.942120Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211194924, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE StartsWith(`message` ?? `resource_id`, "10000") ORDER BY `timestamp` 2024-11-21T17:46:34.972656Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211195000, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT EndsWith(`message` ?? `resource_id`, "xxx") ORDER BY `timestamp` 2024-11-21T17:46:35.020126Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211195008, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE NOT EndsWith(`message` ?? `resource_id`, "xxx") ORDER BY `timestamp` 2024-11-21T17:46:35.050528Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211195057, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE ChooseMembers(TableRow(), ['level', 'uid', 'resource_id']) == <|level:1, uid:"uid_3000001", resource_id:"10001"|> ORDER BY `timestamp` 2024-11-21T17:46:35.109532Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211195085, txId: 18446744073709551615] shutting down --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE ChooseMembers(TableRow(), ['level', 'uid', 'resource_id']) == <|level:1, uid:"uid_3000001", resource_id:"10001"|> ORDER BY `timestamp` 2024-11-21T17:46:35.145690Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211195148, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE ChooseMembers(TableRow(), ['level', 'uid', 'resource_id']) != <|level:1, uid:"uid_3000001", resource_id:"10001"|> ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE ChooseMembers(TableRow(), ['level', 'uid', 'resource_id']) != <|level:1, uid:"uid_3000001", resource_id:"10001"|> ORDER BY `timestamp` 2024-11-21T17:46:35.203939Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211195183, txId: 18446744073709551615] shutting down 2024-11-21T17:46:35.236430Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211195239, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `uid` LIKE "_id%000_" ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `uid` LIKE "_id%000_" ORDER BY `timestamp` 2024-11-21T17:46:35.333474Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211195274, txId: 18446744073709551615] shutting down 2024-11-21T17:46:35.381813Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211195337, txId: 18446744073709551615] shutting down --- Run normal query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'false'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `uid` ILIKE "UID%002" ORDER BY `timestamp` --- Run pushed down query --- --!syntax_v1 PRAGMA Kikimr.OptEnableOlapPushdown = 'true'; SELECT `timestamp` FROM `/Root/olapStore/olapTable` WHERE `uid` ILIKE "UID%002" ORDER BY `timestamp` 2024-11-21T17:46:35.477312Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211195414, txId: 18446744073709551615] shutting down 2024-11-21T17:46:35.532447Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211195505, txId: 18446744073709551615] shutting down >> KqpOlapAggregations::Aggregation_MinL [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Select [GOOD] Test command err: Trying to start YDB, gRPC: 20602, MsgBus: 8789 2024-11-21T17:46:34.174150Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790431083122695:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:34.174461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001862/r3tmp/tmpgSY2uT/pdisk_1.dat 2024-11-21T17:46:34.219763Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20602, node 1 2024-11-21T17:46:34.237694Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:34.237708Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:34.237710Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:34.237746Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8789 TClient is connected to server localhost:8789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:34.274092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:34.274119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:34.275204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:34.284340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.294069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.309052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.327138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.344134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:34.462479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790431083124229:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:34.462511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:34.503386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:34.509990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:34.519075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:34.526366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:34.582203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:34.589603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:34.599989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790431083124745:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:34.600012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790431083124750:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:34.600016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:34.600734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:34.609697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790431083124752:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 6514, MsgBus: 65240 2024-11-21T17:46:35.058101Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790434003921086:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:35.058499Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001862/r3tmp/tmpUK6gaH/pdisk_1.dat 2024-11-21T17:46:35.068984Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6514, node 2 2024-11-21T17:46:35.081076Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:35.081094Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:35.081095Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:35.081126Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65240 TClient is connected to server localhost:65240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:35.158404Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:35.158434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:35.159503Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:35.160702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.169836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.177836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.194429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.206129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.329793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790434003922628:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:35.329818Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:35.334365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.340990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.351955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.359516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.415156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.424056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.437826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790434003923145:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:35.437860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:35.437938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790434003923150:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:35.438497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:35.442473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790434003923152:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestExecutorMemUsage >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::DeleteAfterInsert |76.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest |76.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut |76.0%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut >> KqpWrite::InsertRevert [GOOD] |76.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_MinL [GOOD] Test command err: Trying to start YDB, gRPC: 29191, MsgBus: 6922 2024-11-21T17:46:22.756124Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790377414323172:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:22.756159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dfb/r3tmp/tmpG8WDQS/pdisk_1.dat 2024-11-21T17:46:22.801605Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29191, node 1 2024-11-21T17:46:22.815639Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:22.815653Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:22.815655Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:22.815689Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6922 TClient is connected to server localhost:6922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:22.857179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:22.857208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:22.858324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:22.862075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:22.874254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:22.883861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377414323821:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:22.883906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377414323821:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:22.883935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377414323821:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:22.883951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377414323821:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:22.883965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377414323821:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:22.883982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377414323821:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:22.883998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377414323821:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:22.884017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377414323821:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:22.884037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377414323821:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:22.884052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377414323821:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.884066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377414323821:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:22.884081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377414323821:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:22.884450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:22.884462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:22.884471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:22.884478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:22.884492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:22.884498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:22.884505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:22.884512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:22.884521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:22.884524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:22.884534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:22.884545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:22.884598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:22.884605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:22.884616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:22.884622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.884628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:22.884634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:22.884644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:22.884650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:22.884657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:22.884663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:22.887250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790377414323829:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:22.887269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790377414323829:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:22.887298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790377414323829:2289];tablet_id=7207518622403 ... DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:34.190876Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:34.378341Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:34.378370Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:34.467334Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:34.467360Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:34.549224Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:34.549247Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:34.631231Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:34.631251Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:34.713697Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:34.734543Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T17:46:34.785784Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:34.785804Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:34.867645Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:34.867662Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:34.949686Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:34.949709Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:35.031687Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:35.031710Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:35.113676Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:35.113691Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:35.155631Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:35.261963Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:35.261987Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:35.375468Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:35.375493Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:35.457570Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:35.457595Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:35.539625Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:35.539661Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:35.621625Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:35.621649Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xa6n83dt70vnrcn51bcgb. SessionId : ydb://session/3?node_id=2&id=YTExZDQwOWQtMTczYjQwZTctM2U4MGM1YTYtYTcwZjgyMjI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:35.703677Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:35.724357Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::InsertRevert [GOOD] Test command err: Trying to start YDB, gRPC: 28629, MsgBus: 61767 2024-11-21T17:46:35.778330Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790435399352366:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:35.778562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001849/r3tmp/tmpRKzvu2/pdisk_1.dat 2024-11-21T17:46:35.821208Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28629, node 1 2024-11-21T17:46:35.846413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:35.846427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:35.846429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:35.846466Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61767 2024-11-21T17:46:35.878607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:35.878639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:35.879748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:35.902544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.904377Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:35.917661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.979720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.994429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:36.004097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:36.055506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790439694321194:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.055534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.084629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.091114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.101452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.108047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.115758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.122166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.131052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790439694321686:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.131078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790439694321691:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.131082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.131715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:36.135401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790439694321693:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:36.349383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange [GOOD] Test command err: Trying to start YDB, gRPC: 30584, MsgBus: 8718 2024-11-21T17:46:35.961775Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790433716915821:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:35.961956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001843/r3tmp/tmpzug0X9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30584, node 1 2024-11-21T17:46:36.015838Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:36.023340Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:36.023357Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:36.023359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:36.023419Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8718 TClient is connected to server localhost:8718 2024-11-21T17:46:36.061942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:36.061973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:36.063047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:36.086819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:36.094852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:46:36.160187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:36.181413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:36.191740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.255076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790438011884674:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.255099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.290413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.297089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.303870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.358675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.367347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.374194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.382788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790438011885181:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.382813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790438011885186:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.382813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.383382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:36.387231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790438011885188:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:36.589234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] >> KqpImmediateEffects::DeleteAfterInsert [GOOD] |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_huge/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] Test command err: 2024-11-21T17:45:59.796888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:59.797512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:59.797552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00173e/r3tmp/tmp8PnNQ2/pdisk_1.dat 2024-11-21T17:45:59.907905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:45:59.927962Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:59.970904Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:45:59.971250Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:45:59.971298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:59.971316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:59.984628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:00.225515Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:46:00.225548Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:46:00.225587Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:610:2519] 2024-11-21T17:46:00.234881Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:46:00.235140Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:46:00.235157Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:46:00.235212Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:46:00.235259Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:46:00.235273Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:46:00.235783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:00.235952Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:46:00.236081Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:46:00.236091Z node 1 :TX_PROXY DEBUG: Actor# [1:610:2519] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:46:00.250899Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:626:2534], Recipient [1:635:2540]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:00.251165Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:626:2534], Recipient [1:635:2540]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:00.251271Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:635:2540] 2024-11-21T17:46:00.251348Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:00.252492Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:626:2534], Recipient [1:635:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:00.260075Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:00.260131Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:00.260319Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:46:00.260343Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:46:00.260354Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:46:00.260408Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:00.264603Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:46:00.264718Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:00.264762Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:651:2549] 2024-11-21T17:46:00.264769Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:46:00.264774Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:46:00.264780Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:00.264962Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:635:2540], Recipient [1:635:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:00.264970Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:00.265037Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:46:00.265062Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:46:00.265170Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:642:2544], Recipient [1:635:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:00.265176Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:00.265184Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:632:2538], serverId# [1:642:2544], sessionId# [0:0:0] 2024-11-21T17:46:00.265194Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:46:00.265202Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:00.265210Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:46:00.265215Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:46:00.265235Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:46:00.265240Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:46:00.265248Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:46:00.265263Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:642:2544] 2024-11-21T17:46:00.265267Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:46:00.265297Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:46:00.265359Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:46:00.265371Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:46:00.265393Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:46:00.265402Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:46:00.265406Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:46:00.265412Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:46:00.265416Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:46:00.265471Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:46:00.265476Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:46:00.265479Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:46:00.265483Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:46:00.265495Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:46:00.265499Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:46:00.265503Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:46:00.265506Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:46:00.265513Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:46:00.265816Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:652:2550], Recipient [1:635:2540]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:46:00.265828Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:46:00.276203Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:46:00.276256Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:46:00.276266Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:46:00.276282Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status ... ipient [16:990:2803]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:37.146550Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:37.146564Z node 16 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [16:985:2800], serverId# [16:1009:2814], sessionId# [0:0:0] 2024-11-21T17:46:37.146596Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [16:740:2609], Recipient [16:990:2803]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T17:46:37.146601Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T17:46:37.146607Z node 16 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2024-11-21T17:46:37.146631Z node 16 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T17:46:37.146770Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:46:37.157143Z node 16 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2024-11-21T17:46:37.157203Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:990:2803], Recipient [16:740:2609]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T17:46:37.157212Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:46:37.157218Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 ... reading final result 2024-11-21T17:46:37.204281Z node 16 :TX_PROXY DEBUG: actor# [16:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T17:46:37.204305Z node 16 :TX_PROXY DEBUG: actor# [16:52:2099] TxId# 281474976715664 ProcessProposeKqpTransaction 2024-11-21T17:46:37.204549Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [16:1031:2833], Recipient [16:990:2803]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:37.204558Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:37.204567Z node 16 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [16:1030:2832], serverId# [16:1031:2833], sessionId# [0:0:0] 2024-11-21T17:46:37.215014Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xah7t57fgtyxk49d3kjqp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=M2RlZmRiOTYtYmUxYjhlNDMtMTUwZDlmY2YtYTUzMTBkNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:37.216435Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [16:1042:2836], Recipient [16:990:2803]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 6000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T17:46:37.216480Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:46:37.216500Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037888 on unit CheckRead 2024-11-21T17:46:37.216521Z node 16 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037888 is Executed 2024-11-21T17:46:37.216527Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:46:37.216533Z node 16 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:46:37.216537Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:46:37.216553Z node 16 :TX_DATASHARD TRACE: Activated operation [0:1] at 72075186224037888 2024-11-21T17:46:37.216559Z node 16 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037888 is Executed 2024-11-21T17:46:37.216563Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:46:37.216566Z node 16 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:46:37.216570Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:46:37.216587Z node 16 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 6000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T17:46:37.216662Z node 16 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v6000/18446744073709551615 2024-11-21T17:46:37.216670Z node 16 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[16:1042:2836], 0} after executionsCount# 1 2024-11-21T17:46:37.216678Z node 16 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[16:1042:2836], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:46:37.216696Z node 16 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[16:1042:2836], 0} finished in read 2024-11-21T17:46:37.216706Z node 16 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037888 is Executed 2024-11-21T17:46:37.216710Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:46:37.216716Z node 16 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:46:37.216719Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:46:37.216732Z node 16 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037888 is Executed 2024-11-21T17:46:37.216735Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:46:37.216739Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:1] at 72075186224037888 has finished 2024-11-21T17:46:37.216744Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:46:37.216769Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T17:46:37.216833Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [16:1044:2837], Recipient [16:740:2609]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 6000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T17:46:37.216846Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2024-11-21T17:46:37.216854Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2024-11-21T17:46:37.216862Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T17:46:37.216866Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2024-11-21T17:46:37.216869Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T17:46:37.216872Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T17:46:37.216879Z node 16 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2024-11-21T17:46:37.216883Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T17:46:37.216886Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T17:46:37.216889Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2024-11-21T17:46:37.216892Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2024-11-21T17:46:37.216903Z node 16 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 6000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T17:46:37.216921Z node 16 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v6000/18446744073709551615 2024-11-21T17:46:37.216926Z node 16 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[16:1044:2837], 0} after executionsCount# 1 2024-11-21T17:46:37.216931Z node 16 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[16:1044:2837], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:46:37.216939Z node 16 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[16:1044:2837], 0} finished in read 2024-11-21T17:46:37.216944Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T17:46:37.216948Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T17:46:37.216953Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T17:46:37.216956Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2024-11-21T17:46:37.216962Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T17:46:37.216965Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T17:46:37.216968Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2024-11-21T17:46:37.216971Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T17:46:37.216981Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T17:46:37.217234Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [16:1042:2836], Recipient [16:990:2803]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:46:37.217244Z node 16 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T17:46:37.217506Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [16:1044:2837], Recipient [16:740:2609]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:46:37.217516Z node 16 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 28867, MsgBus: 26571 2024-11-21T17:46:35.587459Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790436316410646:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:35.587788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001851/r3tmp/tmpiwQ0Dz/pdisk_1.dat 2024-11-21T17:46:35.638055Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28867, node 1 2024-11-21T17:46:35.653217Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:35.653229Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:35.653231Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:35.653267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26571 2024-11-21T17:46:35.687800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:35.687828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:35.688913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:35.699019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.711314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.773702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.790940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.801283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:35.857945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790436316412181:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:35.857977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:35.898015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.907654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.920213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.935686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:35.992186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.003649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.013563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790440611379993:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.013581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.013634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790440611379998:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.014250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:36.016186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790440611380000:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:46:36.189626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19974, MsgBus: 28472 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001851/r3tmp/tmprBOakE/pdisk_1.dat 2024-11-21T17:46:36.467726Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19974, node 2 2024-11-21T17:46:36.474727Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:36.474745Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:36.474747Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:36.474783Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28472 TClient is connected to server localhost:28472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:36.559726Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:36.559761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:36.560909Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:36.562053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:36.575222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:36.584292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:36.599920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:36.611144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:36.756368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790439875075598:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.756390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.763415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.771131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.780342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.787237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.794612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.810366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:36.878786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790439875076114:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.878812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.878966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790439875076119:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:36.879811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:36.883360Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:46:36.883456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790439875076121:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:37.045216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts >> test.py::test[window-yql-18879-default.txt-Results] [GOOD] |76.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |76.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |76.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> KqpSysColV1::StreamInnerJoinTables >> KqpOlapAggregations::Aggregation_ResultCountAll_FilterL [GOOD] >> Donor::MultipleEvicts [GOOD] >> KqpSystemView::ReadSuccess |76.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |76.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |76.1%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |76.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts [GOOD] Test command err: RandomSeed# 14832393288792830933 0 donors: 2024-11-21T17:46:38.593504Z 26 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T17:46:38.593541Z 26 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1214885809770830844] 2024-11-21T17:46:38.594240Z 26 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 2024-11-21T17:46:38.600402Z 24 00h00m20.012048s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T17:46:38.600448Z 24 00h00m20.012048s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1214885809770830844] 2024-11-21T17:46:38.600967Z 24 00h00m20.012048s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2024-11-21T17:46:38.606813Z 26 00h00m20.013072s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T17:46:38.606853Z 26 00h00m20.013072s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1214885809770830844] 2024-11-21T17:46:38.607280Z 26 00h00m20.013072s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 2024-11-21T17:46:38.613149Z 24 00h00m20.014096s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T17:46:38.613187Z 24 00h00m20.014096s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1214885809770830844] 2024-11-21T17:46:38.613678Z 24 00h00m20.014096s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2024-11-21T17:46:38.619536Z 26 00h00m20.015120s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T17:46:38.619566Z 26 00h00m20.015120s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1214885809770830844] 2024-11-21T17:46:38.620017Z 26 00h00m20.015120s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 2024-11-21T17:46:38.627162Z 24 00h00m20.016144s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T17:46:38.627210Z 24 00h00m20.016144s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1214885809770830844] 2024-11-21T17:46:38.627858Z 24 00h00m20.016144s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2024-11-21T17:46:38.636879Z 26 00h00m20.017168s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T17:46:38.636928Z 26 00h00m20.017168s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1214885809770830844] 2024-11-21T17:46:38.637630Z 26 00h00m20.017168s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 2024-11-21T17:46:38.646623Z 24 00h00m20.018192s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T17:46:38.646673Z 24 00h00m20.018192s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1214885809770830844] 2024-11-21T17:46:38.647398Z 24 00h00m20.018192s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2024-11-21T17:46:38.654961Z 26 00h00m20.019216s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T17:46:38.655007Z 26 00h00m20.019216s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1214885809770830844] 2024-11-21T17:46:38.655755Z 26 00h00m20.019216s :BS_SYNCER ERROR: VDISK[82000000:_:2:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> BasicUsage::WriteSessionCloseWaitsForWrites ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultCountAll_FilterL [GOOD] Test command err: Trying to start YDB, gRPC: 12019, MsgBus: 5797 2024-11-21T17:46:26.222200Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790397457590857:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:26.222424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000de4/r3tmp/tmpi7bdUL/pdisk_1.dat 2024-11-21T17:46:26.289503Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12019, node 1 2024-11-21T17:46:26.295528Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:26.295540Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:26.295542Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:26.295574Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5797 2024-11-21T17:46:26.323551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:26.323593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:26.324634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:26.354527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:26.363731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:26.373289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397457591511:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.373335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397457591511:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:26.373397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397457591511:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:26.373413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397457591511:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:26.373428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397457591511:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:26.373448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397457591511:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:26.373466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397457591511:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:26.373481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397457591511:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:26.373496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397457591511:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:26.373515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397457591511:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:26.373529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397457591511:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:26.373547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397457591511:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:26.374123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:26.374141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:26.374152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:26.374156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:26.374171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:26.374174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:26.374182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:26.374192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:26.374201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:26.374205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:26.374213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:26.374218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:26.374277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:26.374282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:26.374297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:26.374301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:26.374311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:26.374315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:26.374330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:26.374337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:26.374347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:26.374350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:26.376585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790397457591512:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.376603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790397457591512:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:26.376623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790397457591512:2290];tablet_id=7207518622403 ... DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:36.867433Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:36.989527Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:36.989551Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.051101Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.051148Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.113380Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.113422Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.185085Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.185107Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.238713Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:37.259642Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T17:46:37.311906Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.311925Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.373205Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.373233Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.434560Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.434601Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.496096Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.496129Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.568670Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.568706Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.578984Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:37.704901Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.704928Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.797828Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.797851Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.879877Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.879904Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.961925Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.961957Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:38.054345Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:38.054376Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1890:3007], TxId: 281474976715663, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xaa0g81gm8ce0qq37bwkt. SessionId : ydb://session/3?node_id=2&id=YzNjZWRjYmQtNGE5YWU5NDEtMjM5MDMwYmItY2NhOGRlNzI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:38.095392Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:38.126440Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> BasicUsage::FallbackToSingleDb |76.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |76.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |76.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> BasicUsage::WriteSessionNoAvailableDatabase >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] >> KqpOlap::CompactionPlanner [GOOD] >> KqpSysColV1::StreamInnerJoinTables [GOOD] |76.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[aggregate-group_by_ru_with_window_func--Results] [GOOD] |76.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] >> BasicUsage::WriteSessionWriteInHandlers >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_Limit2 [GOOD] >> KqpSystemView::ReadSuccess [GOOD] |76.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |76.2%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> BasicUsage::BasicWriteSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 1569, MsgBus: 6283 2024-11-21T17:46:38.709351Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790445436187677:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:38.709538Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0014b2/r3tmp/tmp7pEBVI/pdisk_1.dat 2024-11-21T17:46:38.767461Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1569, node 1 2024-11-21T17:46:38.782144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:38.782162Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:38.782164Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:38.782200Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6283 2024-11-21T17:46:38.811123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:38.811170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:38.812245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:38.846408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:38.852549Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:38.857421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:38.874383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:38.889850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:38.902872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:39.012877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790449731156520:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.012925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.050283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.060068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.071453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.084219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.098111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.111433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.124972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790449731157033:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.125006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.125057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790449731157038:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.126023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:39.132705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790449731157040:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:39.357065Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211199397, txId: 281474976715671] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] |76.2%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::CompactionPlanner [GOOD] Test command err: Trying to start YDB, gRPC: 7947, MsgBus: 5897 2024-11-21T17:46:22.507794Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790377769104820:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:22.507810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e05/r3tmp/tmp1AkIm2/pdisk_1.dat 2024-11-21T17:46:22.559342Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7947, node 1 2024-11-21T17:46:22.577744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:22.577754Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:22.577755Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:22.577780Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5897 TClient is connected to server localhost:5897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:22.641092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:22.641118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:22.641447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:22.642176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:46:22.661973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:22.672308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377769105484:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:22.672366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377769105484:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:22.672409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377769105484:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:22.672426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377769105484:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:22.672442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377769105484:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:22.672457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377769105484:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:22.672471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377769105484:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:22.672490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377769105484:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:22.672510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377769105484:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:22.672532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377769105484:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.672554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377769105484:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:22.672576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790377769105484:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:22.675142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790377769105482:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:22.675168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790377769105482:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:22.675197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790377769105482:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:22.675214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790377769105482:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:22.675229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790377769105482:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:22.675239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790377769105482:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:22.675253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790377769105482:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:22.675269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790377769105482:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:22.675284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790377769105482:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:22.675300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790377769105482:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.675319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790377769105482:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:22.675341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790377769105482:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:22.675824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:22.675839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:22.675850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:22.675859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:22.675874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:22.675882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:22.675890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:22.675895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:22.675902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:22.675904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:22.675909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;pr ... xecute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:22.681741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:22.681743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:22.681746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:22.681749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:22.681762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:22.681768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:22.681776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:22.681782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.681788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:22.681790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:22.681802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:22.681810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:22.681817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:22.681824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:22.720677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2024-11-21T17:46:22.775911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790377769105775:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:22.775986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:22.777901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715660:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2024-11-21T17:46:27.508012Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790377769104820:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:27.508058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:27.908024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790399243942457:2452], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.908047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:27.910135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715661:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2024-11-21T17:46:28.038554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=88dbac8e-a83011ef-99e51333-dffecc8b;fline=with_appended.cpp:80;portions=4,;task_id=88dbac8e-a83011ef-99e51333-dffecc8b; 2024-11-21T17:46:28.040062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=88dba16c-a83011ef-92d85ee0-2319ef17;fline=with_appended.cpp:80;portions=4,;task_id=88dba16c-a83011ef-92d85ee0-2319ef17; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2024-11-21T17:46:28.084585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=88e4ce0e-a83011ef-b5fb3a25-5748c982;fline=with_appended.cpp:80;portions=4,;task_id=88e4ce0e-a83011ef-b5fb3a25-5748c982; WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 WAIT_COMPACTION: 3 2024-11-21T17:46:33.101356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790425013746419:2530], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.101392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:33.102830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715662:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=11795408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=11795408;columns=5; 2024-11-21T17:46:33.222014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=8bf1a6d0-a83011ef-b509d618-afa85550;fline=with_appended.cpp:80;portions=7,;task_id=8bf1a6d0-a83011ef-b509d618-afa85550; 2024-11-21T17:46:33.223028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=8bf1a8ba-a83011ef-8899a722-21b58c28;fline=with_appended.cpp:80;portions=7,;task_id=8bf1a8ba-a83011ef-8899a722-21b58c28; 2024-11-21T17:46:33.269660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=8bfbcc50-a83011ef-b206e9c4-a1072c7b;fline=with_appended.cpp:80;portions=7,;task_id=8bfbcc50-a83011ef-b206e9c4-a1072c7b; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=82565408;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=82565408;columns=5; 2024-11-21T17:46:33.960981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=8c5715ce-a83011ef-986f1448-9ff07bbb;fline=with_appended.cpp:80;portions=9,;task_id=8c5715ce-a83011ef-986f1448-9ff07bbb; 2024-11-21T17:46:33.962367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=8c5819ce-a83011ef-b0b6c185-a7f00556;fline=with_appended.cpp:80;portions=9,;task_id=8c5819ce-a83011ef-b0b6c185-a7f00556; WAIT_COMPACTION: 8 WAIT_COMPACTION: 8 WAIT_COMPACTION: 8 WAIT_COMPACTION: 8 2024-11-21T17:46:37.559047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:46:37.559062Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_COMPACTION: 8 2024-11-21T17:46:38.994033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790446488583113:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:38.994061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.000106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790446488583122:2625], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.000185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.000356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790446488583127:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.002427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:3, at schemeshard: 72057594046644480 2024-11-21T17:46:39.007231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790450783550425:2629], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } 2024-11-21T17:46:39.214551Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211199061, txId: 18446744073709551615] shutting down |76.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[blocks-lazy_nonstrict_nested--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 17738, MsgBus: 14087 2024-11-21T17:46:38.848360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790446361857581:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:38.848383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001492/r3tmp/tmpIZmxsi/pdisk_1.dat 2024-11-21T17:46:38.908882Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17738, node 1 2024-11-21T17:46:38.924480Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:38.924494Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:38.924496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:38.924540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14087 2024-11-21T17:46:38.948841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:38.948873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:38.950272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14087 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:38.977373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:38.979902Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:46:38.990875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:39.052851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:46:39.078114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.090155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:39.157589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790450656826425:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.157621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.199789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.254834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.265523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.279167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.286047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.292772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.309190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790450656826934:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.309211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.309261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790450656826939:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.309903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:39.313029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790450656826941:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:46:39.507654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.521594Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jd7xakgq1w4775q7hzt4sge7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzdmZDAzZjQtM2E0ODllOWQtZDQ5OWEwNzQtMjc5OGFkY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:46:39.522991Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211199520, txId: 281474976710672] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultTL_FilterL_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 27859, MsgBus: 27823 2024-11-21T17:46:26.409931Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790393984526396:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:26.409947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dda/r3tmp/tmpgFWEAC/pdisk_1.dat 2024-11-21T17:46:26.455832Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27859, node 1 2024-11-21T17:46:26.471692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:26.471703Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:26.471705Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:26.471738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27823 TClient is connected to server localhost:27823 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:26.510753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:26.510781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:26.511904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:26.517749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:26.525930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:26.537333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393984527041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.537399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393984527041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:26.537428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393984527041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:26.537446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393984527041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:26.537457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393984527041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:26.537476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393984527041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:26.537496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393984527041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:26.537521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393984527041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:26.537544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393984527041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:26.537567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393984527041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:26.537584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393984527041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:26.537604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393984527041:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:26.539880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393984527042:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.539897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393984527042:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:26.539930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393984527042:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:26.539948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393984527042:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:26.539964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393984527042:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:26.539980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393984527042:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:26.539994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393984527042:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:26.540009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393984527042:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:26.540030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393984527042:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:26.540052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393984527042:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:26.540080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393984527042:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:26.540100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393984527042:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:26.542781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393984527043:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.542802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393984527043:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:26.542836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393984527043:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:26.542853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393984527043:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:26.542867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393984527043:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:26.542881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393984527043:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:26.542895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393984527043:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:26.542910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790393984527043:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:26.542925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74397903939845270 ... TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.740533Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.898726Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.898759Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:37.987717Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:37.987749Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:38.073878Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:38.073910Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:38.156296Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:38.156328Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:38.261220Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:38.271648Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T17:46:38.322847Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:38.322880Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:38.407143Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:38.407165Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:38.489086Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:38.489111Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:38.571212Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:38.571244Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:38.653372Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:38.653421Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:38.698660Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:38.828462Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:38.828492Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:38.934968Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:38.935000Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:39.016854Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:39.016871Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:39.098733Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:39.098758Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:39.181244Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 65538 2024-11-21T17:46:39.181272Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1745:3002], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWNlOTk0MWUtYzg5MjMzYTQtMTcxMzlkMTAtNDI1NjZkNzc=. TraceId : 01jd7xa9xp58mz1nvms1bnqqex. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. wakeup with tag 2 2024-11-21T17:46:39.255017Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1219:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:39.275764Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1219:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> BasicUsage::PropagateSessionClosed >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] >> BasicUsage::WaitEventBlocksBeforeDiscovery >> BasicUsage::GetAllStartPartitionSessions >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] >> KqpSystemView::NodesRange2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2024-11-21T17:45:22.461599Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:32:2064] 2024-11-21T17:45:22.461633Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2024-11-21T17:45:22.461659Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:32:2064] 2024-11-21T17:45:22.461664Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2024-11-21T17:45:22.461670Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:33:2065] 2024-11-21T17:45:22.461673Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2024-11-21T17:45:22.461705Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:33:2065] 2024-11-21T17:45:22.461709Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2024-11-21T17:45:22.461742Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:45:22.461800Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:39:2067] 2024-11-21T17:45:22.461806Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant 2024-11-21T17:45:22.461836Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:39:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:45:22.461853Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:40:2067] 2024-11-21T17:45:22.461856Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/tenant 2024-11-21T17:45:22.461860Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:40:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:45:22.461876Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:41:2067] 2024-11-21T17:45:22.461879Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/tenant 2024-11-21T17:45:22.461883Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:41:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:45:22.461894Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2024-11-21T17:45:22.461902Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:39:2067] 2024-11-21T17:45:22.461908Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2024-11-21T17:45:22.461913Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:40:2067] 2024-11-21T17:45:22.461917Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2024-11-21T17:45:22.461921Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2067] 2024-11-21T17:45:22.461930Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:36:2067] 2024-11-21T17:45:22.461956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:37:2067] 2024-11-21T17:45:22.461965Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/tenant] Set up state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:22.461972Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:38:2067] 2024-11-21T17:45:22.461980Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2067][/root/tenant] Ignore empty state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2024-11-21T17:45:22.462026Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:32:2064], cookie# 0, event size# 103 2024-11-21T17:45:22.462033Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T17:45:22.462046Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T17:45:22.462080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2024-11-21T17:45:22.462087Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:39:2067] 2024-11-21T17:45:22.462092Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:36:2067] 2024-11-21T17:45:22.462099Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/tenant] Update to strong state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2024-11-21T17:45:22.876904Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:32:2064] 2024-11-21T17:45:22.876931Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Successful handshake: owner# 800, generation# 1 2024-11-21T17:45:22.876947Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:33:2065] 2024-11-21T17:45:22.876951Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2024-11-21T17:45:22.876959Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:32:2064] 2024-11-21T17:45:22.876964Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Commit generation: owner# 800, generation# 1 2024-11-21T17:45:22.876989Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:33:2065] 2024-11-21T17:45:22.876992Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2024-11-21T17:45:22.877007Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:35:2067][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:45:22.877050Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:39:2067] 2024-11-21T17:45:22.877054Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Upsert description: path# /root/tenant 2024-11-21T17:45:22.877071Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Subscribe: subscriber# [3:39:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:45:22.877100Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:40:2067] 2024-11-21T17:45:22.877102Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/tenant 2024-11-21T17:45:22.877106Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:40:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:45:22.877118Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:41:2067] 2024-11-21T17:45:22.877120Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Upsert description: path# /root/tenant 2024-11-21T17:45:22.877123Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Subscribe: subscriber# [3:41:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:45:22.877131Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:39:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2024-11-21T17:45:22.877143Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:39:2067] 2024-11-21T17:45:22.877148Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:40:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2024-11-21T17:45:22.877151Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:40:2067] 2024-11-21T17:45:22.877154Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:41:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:9:2056] 2024-11-21T17:45:22.877157Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:41:2067] 2024-11-21T17:45:22.877164Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:36:2067] 2024-11-21T17:45:22.877178Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:37:2067] 2024-11-21T17:45:22.877186Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:35:2067][/root/tenant] Set up state: owner# [3:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:45:22.877193Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:35:2067][/ ... omainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2024-11-21T17:46:39.658942Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:32:2064] 2024-11-21T17:46:39.658964Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Successful handshake: owner# 910, generation# 1 2024-11-21T17:46:39.658985Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:32:2064] 2024-11-21T17:46:39.658990Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Commit generation: owner# 910, generation# 1 2024-11-21T17:46:39.658996Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:33:2065] 2024-11-21T17:46:39.659000Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Successful handshake: owner# 910, generation# 1 2024-11-21T17:46:39.659025Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:33:2065] 2024-11-21T17:46:39.659029Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Commit generation: owner# 910, generation# 1 2024-11-21T17:46:39.659041Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:35:2067][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:46:39.659094Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:39:2067] 2024-11-21T17:46:39.659101Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T17:46:39.659122Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Subscribe: subscriber# [397:39:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:46:39.659140Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:40:2067] 2024-11-21T17:46:39.659144Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T17:46:39.659148Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Subscribe: subscriber# [397:40:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:46:39.659163Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:41:2067] 2024-11-21T17:46:39.659166Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T17:46:39.659170Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Subscribe: subscriber# [397:41:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:46:39.659181Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:39:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:3:2050] 2024-11-21T17:46:39.659190Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:39:2067] 2024-11-21T17:46:39.659196Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:40:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:6:2053] 2024-11-21T17:46:39.659201Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:40:2067] 2024-11-21T17:46:39.659206Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:41:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:9:2056] 2024-11-21T17:46:39.659211Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:41:2067] 2024-11-21T17:46:39.659221Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:36:2067] 2024-11-21T17:46:39.659237Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:37:2067] 2024-11-21T17:46:39.659246Z node 397 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][397:35:2067][/Root/Tenant/table_inside] Set up state: owner# [397:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:46:39.659253Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:38:2067] 2024-11-21T17:46:39.659260Z node 397 :SCHEME_BOARD_SUBSCRIBER INFO: [main][397:35:2067][/Root/Tenant/table_inside] Ignore empty state: owner# [397:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2024-11-21T17:46:40.069610Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:32:2064] 2024-11-21T17:46:40.069628Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Successful handshake: owner# 910, generation# 1 2024-11-21T17:46:40.069642Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:32:2064] 2024-11-21T17:46:40.069645Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Commit generation: owner# 910, generation# 1 2024-11-21T17:46:40.069649Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:33:2065] 2024-11-21T17:46:40.069652Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Successful handshake: owner# 910, generation# 1 2024-11-21T17:46:40.069669Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:33:2065] 2024-11-21T17:46:40.069672Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Commit generation: owner# 910, generation# 1 2024-11-21T17:46:40.069680Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:35:2067][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:46:40.069719Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:39:2067] 2024-11-21T17:46:40.069723Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T17:46:40.069740Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Subscribe: subscriber# [399:39:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:46:40.069758Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:40:2067] 2024-11-21T17:46:40.069762Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T17:46:40.069767Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Subscribe: subscriber# [399:40:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:46:40.069780Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:41:2067] 2024-11-21T17:46:40.069783Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T17:46:40.069788Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Subscribe: subscriber# [399:41:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T17:46:40.069799Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:39:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2024-11-21T17:46:40.069807Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:39:2067] 2024-11-21T17:46:40.069813Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:40:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2024-11-21T17:46:40.069817Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:40:2067] 2024-11-21T17:46:40.069825Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:41:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2024-11-21T17:46:40.069830Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:41:2067] 2024-11-21T17:46:40.069839Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:36:2067] 2024-11-21T17:46:40.069852Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:37:2067] 2024-11-21T17:46:40.069860Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][399:35:2067][/Root/Tenant/table_inside] Set up state: owner# [399:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:46:40.069867Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:38:2067] 2024-11-21T17:46:40.069873Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: [main][399:35:2067][/Root/Tenant/table_inside] Ignore empty state: owner# [399:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() |76.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> KqpSysColV0::SelectRowById |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> BasicUsage::RetryDiscoveryWithCancel >> TRtmrTestReboots::CreateRtmrVolumeWithReboots [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs |76.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] Test command err: RandomSeed# 16513996345476435697 Step = 0 SEND TEvPut with key [1:1:0:0:0:51943:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:51943:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:85877:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:85877:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:192081:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:192081:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:267203:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:267203:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 3 2024-11-21T17:46:22.178313Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 6 SEND TEvPut with key [1:1:6:0:0:377427:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:377427:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2024-11-21T17:46:22.204322Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 7 SEND TEvPut with key [1:1:7:0:0:48850:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:48850:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 8 SEND TEvPut with key [1:1:8:0:0:411812:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:411812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 9 SEND TEvPut with key [1:1:9:0:0:293766:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:293766:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start node 3 Step = 10 SEND TEvPut with key [1:1:10:0:0:127358:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:127358:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 11 SEND TEvPut with key [1:1:11:0:0:282945:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:282945:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 12 SEND TEvPut with key [1:1:12:0:0:34864:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:34864:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 13 SEND TEvPut with key [1:1:13:0:0:363096:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:363096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 15 SEND TEvPut with key [1:1:15:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 16 SEND TEvPut with key [1:1:16:0:0:136892:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:136892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 17 SEND TEvPut with key [1:1:17:0:0:517733:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:517733:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 18 SEND TEvPut with key [1:1:18:0:0:250802:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:250802:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 19 SEND TEvPut with key [1:1:19:0:0:199490:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:199490:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 20 SEND TEvPut with key [1:1:20:0:0:244269:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:244269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 21 SEND TEvPut with key [1:1:21:0:0:329606:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:329606:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 23 SEND TEvPut with key [1:1:23:0:0:519258:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:519258:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 25 SEND TEvPut with key [1:1:25:0:0:514591:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:514591:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Stop node 7 2024-11-21T17:46:22.321919Z 1 00h01m30.111024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 26 SEND TEvPut with key [1:1:26:0:0:5927:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:5927:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 28 SEND TEvPut with key [1:1:28:0:0:6043:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:6043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 30 SEND TEvPut with key [1:1:30:0:0:264716:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:264716:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Compact vdisk 3 Step = 31 SEND TEvPut with key [1:1:31:0:0:168116:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:168116:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 32 SEND TEvPut with key [1:1:32:0:0:444749:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:444749:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 33 SEND TEvPut with key [1:1:33:0:0:350254:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:350254:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 34 SEND TEvPut with key [1:1:34:0:0:145950:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:145950:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 35 SEND TEvPut with key [1:1:35:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 38 SEND TEvPut with key [1:1:38:0:0:185170:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:185170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 39 SEND TEvPut with key [1:1:39:0:0:297271:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:297271:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 40 SEND TEvPut with key [1:1:40:0:0:419670:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:419670:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 41 SEND TEvPut with key [1:1:41:0:0:218956:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:218956:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 42 SEND TEvPut with key [1:1:42:0:0:154723:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:154723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 43 SEND TEvPut with key [1:1:43:0:0:13332:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:13332:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 44 SEND TEvPut with key [1:1:44:0:0:448892:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:448892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 45 SEND TEvPut with key [1:1:45:0:0:103231:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:103231:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 46 SEND TEvPut with key [1:1:46:0:0:295973:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:295973:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 47 SEND TEvPut with key [1:1:47:0:0:402799:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:402799:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 48 SEND TEvPut with key [1:1:48:0:0:165045:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:165045:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 49 SEND TEvPut with key [1:1:49:0:0:360099:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:360099:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 50 SEND TEvPut with key [1:1:50:0:0:97222:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:97222:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 51 SEND TEvPut with key [1:1:51:0:0:303396:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:303396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 52 SEND TEvPut with key [1:1:52:0:0:304876:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:304876:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 53 SEND TEvPut with key [1:1:53:0:0:375063:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:375063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Start node 4 Step = 54 SEND TEvPut with key [1:1:54:0:0:288044:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:288044:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999866} Step = 55 SEND TEvPut with key [1:1:55:0:0:181559:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:181559:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999866} Step = 57 SEND TEvPut with key [1:1:57:0:0:424399:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:424399:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 58 SEND TEvPut with key [1:1:58:0:0:169341:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:169341:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999902} Step = 59 SEND TEvPut with key [1:1:59:0:0:405932:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:405932:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999902} Step = 60 SEND TEvPut with key [1:1:60:0:0:190148:0] TEvPutResult: TEvPutResult {Id# [1:1:60:0:0:190148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Stop node 3 2024-11-21T17:46:22.488776Z 1 00h02m00.161536s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Wipe node 0 2024-11-21T17:46:22.505118Z 1 00h02m10.211024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T17:46:22.505427Z 1 00h02m10.211024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15838155216333164654] Step = 61 SEND TEvPut with key [1:1:61:0:0:500240:0] 2024-11-21T17:46:22.649282Z 1 00h03m50.211024s :BS_PROXY ERROR: Group# 2181038080 StateEstablishingSessions Wakeup TIMEOUT Marker# DSP12 TEvPutResult: TEvPutResult {Id# [1:1:61:0:0:500240:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4)." ApproximateFreeSpaceShare# 0} Step = 62 SEND TEvPut with key [1:1:62:0:0:354994:0] TEvPutResult: TEvPutResult {Id# [1:1:62:0:0:354994:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4)." ApproximateFreeSpaceShare# 0} Step = 63 SEND TEvPut with key [1:1:63:0:0 ... d# [1:1:945:0:0:76599:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Compact vdisk 2 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 948 SEND TEvPut with key [1:1:948:0:0:112126:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:112126:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 949 SEND TEvPut with key [1:1:949:0:0:525378:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:525378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 950 SEND TEvPut with key [1:1:950:0:0:410875:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:410875:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 951 SEND TEvPut with key [1:1:951:0:0:113503:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:113503:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 952 SEND TEvPut with key [1:1:952:0:0:431140:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:431140:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 953 SEND TEvPut with key [1:1:953:0:0:509293:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:509293:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 3 2024-11-21T17:46:34.738846Z 1 00h28m01.362560s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:286395:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:286395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 1 2024-11-21T17:46:34.775194Z 1 00h28m11.363072s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 955 SEND TEvPut with key [1:1:955:0:0:219270:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:219270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Start node 1 Step = 956 SEND TEvPut with key [1:1:956:0:0:274971:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:274971:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Step = 957 SEND TEvPut with key [1:1:957:0:0:487884:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:487884:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Start node 3 Step = 958 SEND TEvPut with key [1:1:958:0:0:327302:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:327302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 961 SEND TEvPut with key [1:1:961:0:0:61147:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:61147:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 962 SEND TEvPut with key [1:1:962:0:0:237906:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:237906:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 963 SEND TEvPut with key [1:1:963:0:0:347273:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:347273:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 964 SEND TEvPut with key [1:1:964:0:0:181317:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:181317:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 965 SEND TEvPut with key [1:1:965:0:0:456096:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:456096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 966 SEND TEvPut with key [1:1:966:0:0:93776:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:93776:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 967 SEND TEvPut with key [1:1:967:0:0:447659:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:447659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 969 SEND TEvPut with key [1:1:969:0:0:92781:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:92781:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 0 2024-11-21T17:46:34.933560Z 9 00h28m41.364608s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [9:127446:353] ServerId# [1:128483:171] TabletId# 72057594037932033 PipeClientId# [9:127446:353] 2024-11-21T17:46:34.933636Z 8 00h28m41.364608s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:158066:16] ServerId# [1:158076:4101] TabletId# 72057594037932033 PipeClientId# [8:158066:16] 2024-11-21T17:46:34.933657Z 7 00h28m41.364608s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:157029:16] ServerId# [1:157039:3975] TabletId# 72057594037932033 PipeClientId# [7:157029:16] 2024-11-21T17:46:34.933684Z 6 00h28m41.364608s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:134095:16] ServerId# [1:134104:1009] TabletId# 72057594037932033 PipeClientId# [6:134095:16] 2024-11-21T17:46:34.933706Z 5 00h28m41.364608s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:154119:16] ServerId# [1:154127:3594] TabletId# 72057594037932033 PipeClientId# [5:154119:16] 2024-11-21T17:46:34.933731Z 4 00h28m41.364608s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:163031:16] ServerId# [1:163041:4701] TabletId# 72057594037932033 PipeClientId# [4:163031:16] 2024-11-21T17:46:34.933774Z 3 00h28m41.364608s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:153033:16] ServerId# [1:153043:3471] TabletId# 72057594037932033 PipeClientId# [3:153033:16] 2024-11-21T17:46:34.933801Z 2 00h28m41.364608s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:162062:16] ServerId# [1:162069:4591] TabletId# 72057594037932033 PipeClientId# [2:162062:16] Step = 971 SEND TEvPut with key [1:1:971:0:0:439384:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:439384:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 972 SEND TEvPut with key [1:1:972:0:0:252551:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:252551:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 973 SEND TEvPut with key [1:1:973:0:0:39982:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:39982:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Stop node 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:526796:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:526796:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Start node 0 Step = 975 SEND TEvPut with key [1:1:975:0:0:337763:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:337763:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:475740:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:475740:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 977 SEND TEvPut with key [1:1:977:0:0:169780:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:169780:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 980 SEND TEvPut with key [1:1:980:0:0:159890:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:159890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 981 SEND TEvPut with key [1:1:981:0:0:111300:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:111300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 982 SEND TEvPut with key [1:1:982:0:0:355914:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:355914:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 983 SEND TEvPut with key [1:1:983:0:0:399106:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:399106:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 985 SEND TEvPut with key [1:1:985:0:0:261994:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:261994:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 987 SEND TEvPut with key [1:1:987:0:0:138774:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:138774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 988 SEND TEvPut with key [1:1:988:0:0:441913:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:441913:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 989 SEND TEvPut with key [1:1:989:0:0:134469:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:134469:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 990 SEND TEvPut with key [1:1:990:0:0:123825:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:123825:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 991 SEND TEvPut with key [1:1:991:0:0:40387:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:40387:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Stop node 7 2024-11-21T17:46:35.146809Z 1 00h29m21.403072s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 993 SEND TEvPut with key [1:1:993:0:0:455894:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:455894:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Compact vdisk 0 Step = 994 SEND TEvPut with key [1:1:994:0:0:54378:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:54378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Compact vdisk 6 Step = 995 SEND TEvPut with key [1:1:995:0:0:487669:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:487669:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 996 SEND TEvPut with key [1:1:996:0:0:194641:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:194641:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 997 SEND TEvPut with key [1:1:997:0:0:74188:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:74188:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 998 SEND TEvPut with key [1:1:998:0:0:136082:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:136082:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 999 SEND TEvPut with key [1:1:999:0:0:145518:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:145518:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Starting nodes Start compaction 1 Start checking >> KqpOlapAggregations::Aggregation_ResultDistinctCountRI_GroupByL [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] |76.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |76.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |76.2%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |76.2%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSysColV0::SelectRowById [GOOD] |76.2%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2024-11-21T17:45:42.987623Z :TestReorderedExecutor INFO: Random seed for debugging is 1732211142987617 2024-11-21T17:45:43.155575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790210585375175:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:43.155710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:43.157892Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790210136154198:2117];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:43.188415Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:45:43.189574Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001eac/r3tmp/tmpy6CXF3/pdisk_1.dat 2024-11-21T17:45:43.190677Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:45:43.232357Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8813, node 1 2024-11-21T17:45:43.254932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:43.254954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:43.255761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:43.262534Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001eac/r3tmp/yandexLlJMZY.tmp 2024-11-21T17:45:43.262548Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001eac/r3tmp/yandexLlJMZY.tmp 2024-11-21T17:45:43.262721Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001eac/r3tmp/yandexLlJMZY.tmp 2024-11-21T17:45:43.262771Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:43.273752Z INFO: TTestServer started on Port 23612 GrpcPort 8813 TClient is connected to server localhost:23612 PQClient connected to localhost:8813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:45:43.306600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:43.306624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:43.309233Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:45:43.309668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:43.313062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:43.327480Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:43.332917Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720658, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:45:43.403637Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720660, at schemeshard: 72057594046644480 2024-11-21T17:45:43.521063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790210136154425:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:43.521093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790210136154400:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:43.521135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:43.522563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T17:45:43.527546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790210136154429:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T17:45:43.569509Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790210585375984:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:43.570069Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2Q0MjI5ODktMzU5YTg1NTUtNjUzMzY1OTgtYzFmNzQ3MDU=, ActorId: [1:7439790210585375934:2299], ActorState: ExecuteState, TraceId: 01jd7x8wv56te0566b8c7he5tj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:43.570850Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:43.571853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:43.612785Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790210136154509:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:43.612882Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDBlODRmZDItODk0NDk5MWYtZGYyNTFmMjctODZiYTE3YjA=, ActorId: [2:7439790210136154398:2277], ActorState: ExecuteState, TraceId: 01jd7x8wtz9jb6jsgkf74eq7bj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:43.613036Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:43.640461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:43.737363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:8813", true, true, 1000); 2024-11-21T17:45:43.870041Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jd7x8x4yckhq7zez4k944gfx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI2ZDE5MTYtM2RkZGI4NzUtYjE5NjBlN2EtZjc0MDcyYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790210585376364:2909] 2024-11-21T17:45:48.156703Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790210585375175:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:48.156736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:45:48.161461Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790210136154198:2117];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:48.161491Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:45:49.941815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720679:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:8813 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:45:49.975967Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:8813 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { Parti ... NFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:57044 proto=v1 topic=test-topic durationSec=0 2024-11-21T17:46:41.256881Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:46:41.257301Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T17:46:41.257360Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:46:41.257367Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:46:41.257369Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:46:41.257375Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439790461870195514:2470] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:46:41.257915Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439790461870195514:2470] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:46:41.278249Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439790461870195514:2470] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T17:46:41.278394Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7439790461870195544:2470] connected; active server actors: 1 2024-11-21T17:46:41.278428Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439790461870195514:2470] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T17:46:41.278434Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439790461870195514:2470] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T17:46:41.278505Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7439790461870195544:2470] disconnected; active server actors: 1 2024-11-21T17:46:41.278513Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7439790461870195544:2470] disconnected no session 2024-11-21T17:46:41.298231Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439790461870195514:2470] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:46:41.298249Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439790461870195514:2470] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T17:46:41.298253Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439790461870195514:2470] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T17:46:41.298262Z node 13 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:46:41.298614Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:41.298636Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [13:7439790461870195561:2470], now have 1 active actors on pipe 2024-11-21T17:46:41.298647Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 14, Generation: 1 2024-11-21T17:46:41.298752Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:41.298769Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:41.298807Z node 14 :PERSQUEUE INFO: new Cookie src|a19c898-251aef3e-76ee5985-499f57d0_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T17:46:41.298860Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:46:41.298887Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:46:41.300123Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:41.300135Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:41.300165Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:46:41.301063Z node 13 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|a19c898-251aef3e-76ee5985-499f57d0_0 2024-11-21T17:46:41.301600Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211201301 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:41.301646Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|a19c898-251aef3e-76ee5985-499f57d0_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T17:46:41.302486Z :INFO: [] MessageGroupId [src] SessionId [src|a19c898-251aef3e-76ee5985-499f57d0_0] Write session: close. Timeout = 0 ms 2024-11-21T17:46:41.302494Z :INFO: [] MessageGroupId [src] SessionId [src|a19c898-251aef3e-76ee5985-499f57d0_0] Write session will now close 2024-11-21T17:46:41.302500Z :DEBUG: [] MessageGroupId [src] SessionId [src|a19c898-251aef3e-76ee5985-499f57d0_0] Write session: aborting 2024-11-21T17:46:41.302787Z :INFO: [] MessageGroupId [src] SessionId [src|a19c898-251aef3e-76ee5985-499f57d0_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:46:41.302790Z :DEBUG: [] MessageGroupId [src] SessionId [src|a19c898-251aef3e-76ee5985-499f57d0_0] Write session is aborting and will not restart 2024-11-21T17:46:41.302814Z :DEBUG: [] MessageGroupId [src] SessionId [src|a19c898-251aef3e-76ee5985-499f57d0_0] Write session: destroy 2024-11-21T17:46:41.302844Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|a19c898-251aef3e-76ee5985-499f57d0_0 grpc read done: success: 0 data: 2024-11-21T17:46:41.302854Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a19c898-251aef3e-76ee5985-499f57d0_0 grpc read failed 2024-11-21T17:46:41.302860Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a19c898-251aef3e-76ee5985-499f57d0_0 grpc closed 2024-11-21T17:46:41.302866Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a19c898-251aef3e-76ee5985-499f57d0_0 is DEAD 2024-11-21T17:46:41.303106Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:46:41.303231Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:41.303256Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [13:7439790461870195561:2470] destroyed 2024-11-21T17:46:41.303268Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:46:41.313616Z :INFO: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Starting read session 2024-11-21T17:46:41.313633Z :DEBUG: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Starting cluster discovery 2024-11-21T17:46:41.313681Z :INFO: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2022: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:2022
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:2022. " 2024-11-21T17:46:41.313687Z :DEBUG: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Restart cluster discovery in 0.009628s 2024-11-21T17:46:41.324590Z :DEBUG: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Starting cluster discovery 2024-11-21T17:46:41.324701Z :INFO: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2022: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:2022
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:2022. " 2024-11-21T17:46:41.324709Z :DEBUG: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Restart cluster discovery in 0.016629s 2024-11-21T17:46:41.342415Z :DEBUG: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Starting cluster discovery 2024-11-21T17:46:41.342532Z :INFO: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2022: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:2022
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:2022. " 2024-11-21T17:46:41.342539Z :DEBUG: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Restart cluster discovery in 0.035404s 2024-11-21T17:46:41.378381Z :DEBUG: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Starting cluster discovery 2024-11-21T17:46:41.378491Z :NOTICE: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2022: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:2022
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:2022. " } 2024-11-21T17:46:41.378574Z :NOTICE: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2022: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:2022
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:2022. " } 2024-11-21T17:46:41.378604Z :INFO: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Closing read session. Close timeout: 0.000000s 2024-11-21T17:46:41.378614Z :NOTICE: [/Root] [/Root] [62fd636-a5a13b03-2d80efee-1219d28e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } |76.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |76.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk >> KqpOlapWrite::TierDraftsGCWithRestart [GOOD] |76.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |76.2%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 14409, MsgBus: 15677 2024-11-21T17:46:41.195091Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790460233398368:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:41.195141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001463/r3tmp/tmpy0Q5zy/pdisk_1.dat 2024-11-21T17:46:41.257194Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14409, node 1 2024-11-21T17:46:41.268806Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:41.268818Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:41.268820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:41.268852Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15677 2024-11-21T17:46:41.296498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:41.296536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:41.297618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:41.330158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:41.337581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:41.358364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:41.380701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:41.401415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:41.563907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790460233399771:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.563955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.594710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.602166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.614631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.624629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.631316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.638049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.646673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790460233400276:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.646700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790460233400281:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.646703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.647236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:41.651637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790460233400283:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultDistinctCountRI_GroupByL [GOOD] Test command err: Trying to start YDB, gRPC: 21368, MsgBus: 25733 2024-11-21T17:46:13.611075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790341770557184:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:13.611093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e55/r3tmp/tmpFRHvn7/pdisk_1.dat 2024-11-21T17:46:13.659346Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21368, node 1 2024-11-21T17:46:13.666130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:13.666142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:13.666144Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:13.666174Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25733 TClient is connected to server localhost:25733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:13.711130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:13.711159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:13.711506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:13.712188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:46:13.724470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:13.734544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341770557823:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.734592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341770557823:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.734620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341770557823:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:13.734639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341770557823:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:13.734662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341770557823:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:13.734689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341770557823:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:13.734714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341770557823:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:13.734738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341770557823:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:13.734761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341770557823:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:13.734784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341770557823:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.734806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341770557823:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:13.734830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790341770557823:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:13.735274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:13.735291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:13.735303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:13.735307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:13.735330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:13.735341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:13.735351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:13.735366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:13.735375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:13.735385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:13.735393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:13.735404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:13.735460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:13.735475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:13.735492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:13.735496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:13.735509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:13.735517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:13.735534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:13.735543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:13.735558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:13.735566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:13.737792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790341770557824:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:13.737809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790341770557824:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:13.737829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790341770557824:2289];tablet_id=7207518622 ... 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:39.670941Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:39.843780Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:39.843810Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:39.927835Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:39.927869Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:40.011314Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:40.011349Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:40.094039Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:40.094070Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:40.166350Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:40.187198Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T17:46:40.249243Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:40.249276Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:40.331810Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:40.331837Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:40.414094Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:40.414126Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:40.496434Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:40.496459Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:40.578844Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:40.578876Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:40.616473Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:40.741858Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:40.741888Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:40.867863Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:40.867890Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:40.960356Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:40.960381Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:41.042683Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:41.042715Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:41.125452Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:41.125476Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1843:3100], TxId: 281474976715662, task: 161. Ctx: { SessionId : ydb://session/3?node_id=2&id=NDg1ZTU5YjEtZTU1YzU0Zi1kOTNjNDc1YS1jOTYzNTU1Yw==. CustomerSuppliedId : . TraceId : 01jd7xa0462697zkzwsjrdhe4v. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:41.187906Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:41.209329Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpScripting::StreamExecuteYqlScriptMixed >> KqpYql::TestUuidDefaultColumn >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapWrite::TierDraftsGCWithRestart [GOOD] Test command err: Trying to start YDB, gRPC: 23525, MsgBus: 32480 2024-11-21T17:46:22.756745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790378275798292:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:22.756889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dfd/r3tmp/tmpIxC0kU/pdisk_1.dat 2024-11-21T17:46:22.800349Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23525, node 1 2024-11-21T17:46:22.810575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:22.810599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:22.810601Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:22.810636Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32480 TClient is connected to server localhost:32480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:22.858109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:22.858138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:22.859200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:22.881078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:22.885842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:22.895182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378275798946:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:22.895234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378275798946:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:22.895279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378275798946:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:22.895299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378275798946:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:22.895318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378275798946:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:22.895343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378275798946:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:22.895362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378275798946:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:22.895391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378275798946:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:22.895409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378275798946:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:22.895424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378275798946:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.895439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378275798946:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:22.895458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790378275798946:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:22.897672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378275798947:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:22.897691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378275798947:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:22.897714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378275798947:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:22.897738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378275798947:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:22.897752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378275798947:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:22.897765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378275798947:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:22.897787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378275798947:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:22.897806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378275798947:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:22.897821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378275798947:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:22.897834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378275798947:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:22.897848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378275798947:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:22.897862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790378275798947:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:22.900764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790378275798954:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:22.900786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790378275798954:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:22.900820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790378275798954:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:22.900840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790378275798954:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:22.900881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790378275798954:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:22.900903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790378275798954:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:22.900923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790378275798954:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:22.900944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790378275798954:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:22.900965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:74397903782757989 ... ARD DEBUG: Index: tables 1 inserted portions=2;blobs=2;rows=7364;bytes=269696;raw_bytes=8667664; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 72075186224037889 2024-11-21T17:46:41.642221Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T17:46:41.642229Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=3; 2024-11-21T17:46:41.642240Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T17:46:41.642255Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=72075186224037889; 2024-11-21T17:46:41.642271Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=202797666304;kff=0.3; 2024-11-21T17:46:41.642282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:41.642288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:46:41.642289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:46:41.642292Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:41.642298Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:41.642301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:41.642302Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:41.642306Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:41.642312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:46:41.642316Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:41.642367Z node 1 :TX_COLUMNSHARD DEBUG: fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:46:41.642392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:46:41.642403Z node 1 :TX_COLUMNSHARD DEBUG: Registered with mediator time cast at tablet 72075186224037889 2024-11-21T17:46:41.642435Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2024-11-21T17:46:41.642443Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889; 2024-11-21T17:46:41.642445Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:46:41.642446Z node 1 :TX_COLUMNSHARD DEBUG: Create periodic stats pipe to 72057594046644480 at tablet 72075186224037889 2024-11-21T17:46:41.642460Z node 1 :TX_COLUMNSHARD DEBUG: There are stats for 1 tables 2024-11-21T17:46:41.642472Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:46:41.642474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:46:41.642476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:46:41.642478Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:41.642480Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:41.642482Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:41.642484Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:41.642488Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:41.642491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:46:41.642493Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:41.642502Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:46:41.642510Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T17:46:41.642518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=88; 2024-11-21T17:46:41.642527Z node 1 :TX_COLUMNSHARD INFO: fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=2;path_id=3; 2024-11-21T17:46:41.642541Z node 1 :TX_COLUMNSHARD INFO: fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=2;path_id=3; 2024-11-21T17:46:41.642551Z node 1 :TX_COLUMNSHARD INFO: fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=3; 2024-11-21T17:46:41.642552Z node 1 :TX_COLUMNSHARD INFO: fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; 2024-11-21T17:46:41.642559Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732211201000 at tablet 72075186224037889 2024-11-21T17:46:41.642561Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:46:41.642563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:46:41.642564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:46:41.642566Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:46:41.642569Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:46:41.642571Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:41.642572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:46:41.642579Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:46:41.642584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439790459880181478:3695];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:46:41.642590Z node 1 :TX_COLUMNSHARD DEBUG: Connected to 72057594046644480 at tablet 72075186224037889 2024-11-21T17:46:41.642595Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:7439790459880181525:3714];tablet_id=72075186224037889;parent=[1:7439790459880181478:3695];fline=manager.h:99;event=ask_data;request=request_id=927;3={portions_count=2};; 2024-11-21T17:46:42.136818Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7439790459880181436:3692];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:42.137322Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439790459880181438:3693];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:42.137335Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439790459880181461:3694];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:42.141597Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439790459880181478:3695];fline=actor.cpp:33;event=skip_flush_writing; >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs >> KqpOlapSysView::StatsSysView [GOOD] >> KqpYql::TestUuidDefaultColumn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TRtmrTestReboots::CreateRtmrVolumeWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:45:43.976313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:43.976343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:43.976347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:43.976352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:43.976358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:43.976362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:43.976370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:43.976513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:43.996387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:43.996420Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:45:43.999165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:43.999275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:43.999307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:44.001762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:44.001834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:44.001965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:44.002141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:44.002684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:44.002953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:44.002963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:44.002974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:44.002980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:44.002986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:44.003031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:45:44.004028Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:45:44.017434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:44.017546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:44.017624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:44.017672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:44.017681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:44.018864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:44.018893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:44.018958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:44.018968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:44.018972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:44.018977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:44.019400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:44.019411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:44.019415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:44.020912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:44.020929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:44.020935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:44.020942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:44.021541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:44.021995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:44.022050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:44.022241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:44.022267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:44.022274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:44.022329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:44.022335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:44.022364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:44.022377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:44.022717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:44.022728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:44.022777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:44.022782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:44.022862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:44.022868Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:44.022880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:44.022885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:44.022891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:44.022896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:44.022901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:44.022906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:44.022917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:44.022923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:44.022928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 678944, LocalPathId: 3] 2024-11-21T17:46:40.984407Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:46:40.984435Z node 220 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:46:40.984439Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [220:202:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:46:40.984443Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [220:202:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 4 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:46:40.984548Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:46:40.984556Z node 220 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:46:40.984569Z node 220 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:46:40.984573Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:46:40.984579Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:46:40.984585Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:46:40.984590Z node 220 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:46:40.984594Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:46:40.984690Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 45 2024-11-21T17:46:40.984699Z node 220 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 1 2024-11-21T17:46:40.984702Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T17:46:40.984704Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 2 2024-11-21T17:46:40.984796Z node 220 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:46:40.984804Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:46:40.984807Z node 220 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:46:40.984811Z node 220 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:46:40.984814Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:46:40.984910Z node 220 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:46:40.984917Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:46:40.984919Z node 220 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:46:40.984921Z node 220 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T17:46:40.984924Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 44 2024-11-21T17:46:40.984930Z node 220 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T17:46:40.984933Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [220:300:2292] 2024-11-21T17:46:40.985505Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:46:40.985800Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:46:40.985856Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:46:40.985874Z node 220 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [220:301:2293] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T17:46:40.986010Z node 220 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirRtmr/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:46:40.986078Z node 220 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirRtmr/rtmr1" took 76us result status StatusSuccess 2024-11-21T17:46:40.986245Z node 220 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirRtmr/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 4 PartitionsCount: 42 Partitions { PartitionId: "\335\367x\002n?\304\026\023\326\005\327\360}t\323" BusKey: 0 TabletId: 72075186233409546 } Partitions { PartitionId: "\243\233\206\323/\267$\007B\332g]?\\z\306" BusKey: 1 TabletId: 72075186233409547 } Partitions { PartitionId: "\274\217\023\336N\237\233\205{\300\007\305R \271\016" BusKey: 2 TabletId: 72075186233409548 } Partitions { PartitionId: "[wu\206\306q\301\tUYH,\223u2*" BusKey: 3 TabletId: 72075186233409549 } Partitions { PartitionId: "\'\375\345aEdO\255\254\353Gtx\202\323d" BusKey: 4 TabletId: 72075186233409550 } Partitions { PartitionId: "*\276\247\246\343\035\3115\374F_5\206\355\276\320" BusKey: 5 TabletId: 72075186233409551 } Partitions { PartitionId: "\232\341,\255U\246&\243Z\231\265|\252LK\307" BusKey: 6 TabletId: 72075186233409552 } Partitions { PartitionId: "#\262\0074\314\333\275\261\232$\032q\375[\001\025" BusKey: 7 TabletId: 72075186233409553 } Partitions { PartitionId: "\254\234\232\027{jpU\227\237\206\355\202\3120O" BusKey: 8 TabletId: 72075186233409554 } Partitions { PartitionId: "r\207\331\200\316VsV\342\354R\231:\324p]" BusKey: 9 TabletId: 72075186233409555 } Partitions { PartitionId: "`^\306E\323QPz;\231\221\233\347\320\204=" BusKey: 10 TabletId: 72075186233409556 } Partitions { PartitionId: "\3509\027\272\255W+\334\373\267\261\366\232\371\016\251" BusKey: 11 TabletId: 72075186233409557 } Partitions { PartitionId: "\201#\021\244c;d})q|?\365%\256\241" BusKey: 12 TabletId: 72075186233409558 } Partitions { PartitionId: "\300k\347{\317\310s\352\255\330\337\3049bz\255" BusKey: 13 TabletId: 72075186233409559 } Partitions { PartitionId: "\031h\225\317\342\273\256E\345\351G\"\321\334\2056" BusKey: 14 TabletId: 72075186233409560 } Partitions { PartitionId: "h\307\026\006\265\257s\026\002\013\225\265\374>\226\223" BusKey: 15 TabletId: 72075186233409561 } Partitions { PartitionId: "Za2\310\376u\030m\'\021q\306\243\230\2546" BusKey: 16 TabletId: 72075186233409562 } Partitions { PartitionId: "\251*\007\200\352\260\023\312SW\253%\276\337\031\330" BusKey: 17 TabletId: 72075186233409563 } Partitions { PartitionId: "\211v7\007\331\243\277\311k\304\005\301\222\200\"I" BusKey: 18 TabletId: 72075186233409564 } Partitions { PartitionId: "R\234\263\330\272_\246\t2P\024\345\347\331\234c" BusKey: 19 TabletId: 72075186233409565 } Partitions { PartitionId: "\212\3469\244l\255\354\\\267j\036w\321\267\257\037" BusKey: 20 TabletId: 72075186233409566 } Partitions { PartitionId: "Y(" BusKey: 26 TabletId: 72075186233409572 } Partitions { PartitionId: "L\020\252\237\356\232\327GK\362\362\\Mr\301\203" BusKey: 27 TabletId: 72075186233409573 } Partitions { PartitionId: "\264\007\207[\241\317\303]?\254\217\251qJ\214\004" BusKey: 28 TabletId: 72075186233409574 } Partitions { PartitionId: "\2408NM\354\003\305KU\345\213d\200 \346T" BusKey: 29 TabletId: 72075186233409575 } Partitions { PartitionId: "\001\234_\251\312\277H/\360\235>\211\223\206V," BusKey: 30 TabletId: 72075186233409576 } Partitions { PartitionId: "L*\263\225\234\026\376\365\345\206W\3602\224\003\260" BusKey: 31 TabletId: 72075186233409577 } Partitions { PartitionId: "\034\010\361}\002\017\354G\340\275\'\251\332%`\017" BusKey: 32 TabletId: 72075186233409578 } Partitions { PartitionId: "\251\311s\220\230\317a\te\1770\222\206\177\226Q" BusKey: 33 TabletId: 72075186233409579 } Partitions { PartitionId: "\253\362w\360M\371\354T\252QU\006\0031\376\347" BusKey: 34 TabletId: 72075186233409580 } Partitions { PartitionId: "\303MS\']5xv0\254\306\376JJ\202\244" BusKey: 35 TabletId: 72075186233409581 } Partitions { PartitionId: "+F\377\005\376\007\030\347VA\014\n_]\212\225" BusKey: 36 TabletId: 72075186233409582 } Partitions { PartitionId: "\220I\213\326\032\2678e\267\311r\235\023}\261\205" BusKey: 37 TabletId: 72075186233409583 } Partitions { PartitionId: "\020=\323\252+^\354\001\032\260d>}\321\207\356" BusKey: 38 TabletId: 72075186233409584 } Partitions { PartitionId: "\245zDMx\354\325\202\366B\214&R\211mX" BusKey: 39 TabletId: 72075186233409585 } Partitions { PartitionId: "{M\337\022\3214\037\241\247T2\242\204\232B\206" BusKey: 40 TabletId: 72075186233409586 } Partitions { PartitionId: "\241\353v\364t\033\351\341\251\202z_\354\2436\366" BusKey: 41 TabletId: 72075186233409587 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] Test command err: Trying to start YDB, gRPC: 21244, MsgBus: 4981 2024-11-21T17:46:42.740816Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790464964717241:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001786/r3tmp/tmpcOcZRj/pdisk_1.dat 2024-11-21T17:46:42.777185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:42.811410Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21244, node 1 2024-11-21T17:46:42.854677Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:42.854690Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:42.854692Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:42.854738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4981 2024-11-21T17:46:42.877256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:42.877293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:42.878172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:42.925028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:42.928006Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:43.208565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790469259684964:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.208596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.260577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.292064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790469259685064:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.292091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790469259685069:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.292103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.292743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:43.293993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790469259685071:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk Test command err: Trying to start YDB, gRPC: 3796, MsgBus: 22120 2024-11-21T17:46:41.374211Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790461553522012:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:41.374425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00145a/r3tmp/tmpsOwfp1/pdisk_1.dat 2024-11-21T17:46:41.435038Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3796, node 1 2024-11-21T17:46:41.449485Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:41.449499Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:41.449500Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:41.449533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22120 2024-11-21T17:46:41.474873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:41.474899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:41.475883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:41.501893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:41.508362Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:46:41.517989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:46:41.542464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.605979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:41.622061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:41.686152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790461553523551:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.686187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.710405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.716089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.722334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.729628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.743570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.750408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.758970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790461553524045:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.758993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.758997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790461553524050:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.759631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:41.763416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790461553524052:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. >> KqpScripting::StreamExecuteYqlScriptMixed [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysView [GOOD] Test command err: Trying to start YDB, gRPC: 20653, MsgBus: 62020 2024-11-21T17:46:30.193949Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790415035868299:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:30.194271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000db3/r3tmp/tmp6GeVkQ/pdisk_1.dat 2024-11-21T17:46:30.242394Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20653, node 1 2024-11-21T17:46:30.254621Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:30.254632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:30.254637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:30.254664Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62020 TClient is connected to server localhost:62020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:30.294737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:30.294764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:30.295871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:30.321333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:30.331355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:30.342038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790415035868952:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:30.342111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790415035868952:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:30.342166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790415035868952:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:30.342187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790415035868952:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:30.342209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790415035868952:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:30.342228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790415035868952:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:30.342248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790415035868952:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:30.342270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790415035868952:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:30.342290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790415035868952:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:30.342305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790415035868952:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:30.342320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790415035868952:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:30.342336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790415035868952:2288];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:30.342781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:30.342795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:30.342805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:30.342809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:30.342826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:30.342834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:30.342842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:30.342854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:30.342864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:30.342871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:30.342876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:30.342881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:30.342930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:30.342943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:30.342958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:30.342962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:30.342971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:30.342974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:30.342988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:30.342991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:30.343004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:30.343007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:30.345987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790415035868953:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:30.346011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790415035868953:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:30.346044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790415035868953:2289];tablet_id=7207518622 ... LBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1173808;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1173808;columns=5; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2024-11-21T17:46:35.194477Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790415035868299:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:35.194515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT PathId, Kind, TabletId, Sum(Rows) as Rows FROM `/Root/olapStore/.sys/store_primary_index_portion_stats` WHERE Activity == 1 GROUP BY PathId, Kind, TabletId ORDER BY TabletId, Kind, PathId RESULT: 2024-11-21T17:46:41.689241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790462280511609:3752], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.689241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790462280511617:3755], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.689291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.690442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:46:41.692968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790462280511623:3756], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:46:43.157908Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211201866, txId: 281474976715662] shutting down TabletId: 72075186224037888 Rows: 33295 Kind: INSERTED PathId: 3 TabletId: 72075186224037889 Rows: 33320 Kind: INSERTED PathId: 3 TabletId: 72075186224037890 Rows: 33385 Kind: INSERTED PathId: 3 |76.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/pytest >> test.py::test[window-yql-18879-default.txt-Results] [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |76.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |76.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |76.3%| [TA] $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/test-results/pytest/{meta.json ... results_accumulator.log} |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> KqpOlap::ManyColumnShardsFilterPushdownEmptySet [GOOD] >> TLocalTests::TestAddTenant >> KqpOlapBlobsSharing::TableReshardingModuloN [FAIL] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] >> TMonitoringTests::ValidActorId >> KqpOlapAggregations::Aggregation_MaxR_GroupL_OrderL [GOOD] |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] >> TLocalTests::TestAddTenant [GOOD] >> LocalPartitionReader::Simple [GOOD] >> TMonitoringTests::ValidActorId [GOOD] >> KqpOlapAggregations::Aggregation_ResultL_FilterL_OrderL_Limit2 [GOOD] |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |76.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] >> LocalPartitionReader::FeedSlowly [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] Test command err: Trying to start YDB, gRPC: 15441, MsgBus: 15095 2024-11-21T17:46:42.656338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790465175946556:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:42.656391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001783/r3tmp/tmp45jdBA/pdisk_1.dat 2024-11-21T17:46:42.721626Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15441, node 1 2024-11-21T17:46:42.756538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:42.756581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:42.764908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:42.776469Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:42.776480Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:42.776482Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:42.776525Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15095 TClient is connected to server localhost:15095 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:42.938968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:42.945093Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:42.957131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:42.993417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:43.025113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:43.061858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:43.142508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790469470915238:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.142535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.204690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.213770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.229667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.241101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.295672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.304260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.313521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790469470915755:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.313543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.313551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790469470915760:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.314217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:43.316959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790469470915763:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:43.562246Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211203604, txId: 281474976715671] shutting down 2024-11-21T17:46:43.620606Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211203653, txId: 281474976715673] shutting down 2024-11-21T17:46:43.746258Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211203772, txId: 281474976715677] shutting down Trying to start YDB, gRPC: 7969, MsgBus: 13458 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001783/r3tmp/tmp0iRcoQ/pdisk_1.dat 2024-11-21T17:46:44.160325Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:44.166427Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7969, node 2 2024-11-21T17:46:44.175483Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:44.175502Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:44.175504Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:44.175542Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13458 2024-11-21T17:46:44.261098Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:44.261133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:44.267843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:46:44.286942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.288580Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:44.295958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.311196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:44.332779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:44.346146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:44.539192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790474973480441:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:44.539217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:44.546292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.558243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.568961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.587232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.598253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.610782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.691446Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790474973480951:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:44.691484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:44.691696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790474973480956:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:44.692742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:44.695923Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:46:44.696078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790474973480958:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:44.938504Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211204983, txId: 281474976715671] shutting down 2024-11-21T17:46:44.985079Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211205025, txId: 281474976715673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ManyColumnShardsFilterPushdownEmptySet [GOOD] Test command err: 2024-11-21T17:46:27.025926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:46:27.028930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:27.029066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:27.029109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:27.029521Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:27.029541Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dd8/r3tmp/tmp2knPFT/pdisk_1.dat 2024-11-21T17:46:27.117384Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:27.204561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:27.292746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:27.292783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:27.293787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:27.293805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:27.305876Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:27.306018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:27.306114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:27.640427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:27.690321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:27.690371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:27.690420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:27.690442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:27.690459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:27.690478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:27.690493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:27.690513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:27.690532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:27.690557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.690573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:27.690594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1286:2803];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:27.697639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:27.697669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:27.697726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:27.697751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:27.697769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:27.697786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:27.697803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:27.697821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:27.697849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:27.697868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.697885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:27.697900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1292:2807];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:27.704961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:27.704992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:27.705032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:27.705052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:27.705070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:27.705088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:27.705106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:27.705127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:27.705164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:27.705183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.705200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:1298:2809];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:27.705217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:129 ... omponent=2100;fline=native.cpp:69;event=parsing;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2024-11-21T17:46:41.118360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=9045fbe6-a83011ef-8ff68e95-8f6cb4ba;fline=with_appended.cpp:80;portions=67,;task_id=9045fbe6-a83011ef-8ff68e95-8f6cb4ba; 2024-11-21T17:46:41.180548Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=904ba92e-a83011ef-a503dc1b-b3362ea;fline=with_appended.cpp:80;portions=67,;task_id=904ba92e-a83011ef-a503dc1b-b3362ea; 2024-11-21T17:46:41.245853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=90519938-a83011ef-ba9c5c07-8f8632e6;fline=with_appended.cpp:80;portions=67,;task_id=90519938-a83011ef-ba9c5c07-8f8632e6; 2024-11-21T17:46:41.301774Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=90573348-a83011ef-a6e8a13f-937686ee;fline=with_appended.cpp:80;portions=67,;task_id=90573348-a83011ef-a6e8a13f-937686ee; 2024-11-21T17:46:41.357933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;task_id=906016d4-a83011ef-af8fb9f0-cd45d611;fline=with_appended.cpp:80;portions=67,;task_id=906016d4-a83011ef-af8fb9f0-cd45d611; 2024-11-21T17:46:41.415262Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=9068a4ac-a83011ef-a1576e8c-91e2ee07;fline=with_appended.cpp:80;portions=67,;task_id=9068a4ac-a83011ef-a1576e8c-91e2ee07; 2024-11-21T17:46:41.474500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;task_id=909198bc-a83011ef-b67f83b0-5fcf21dd;fline=with_appended.cpp:80;portions=67,;task_id=909198bc-a83011ef-b67f83b0-5fcf21dd; 2024-11-21T17:46:41.529996Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=909730a6-a83011ef-8bfe9872-dbd5075b;fline=with_appended.cpp:80;portions=67,;task_id=909730a6-a83011ef-8bfe9872-dbd5075b; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2024-11-21T17:46:41.796088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;task_id=90b0fbbc-a83011ef-895405f1-d53ab3d2;fline=with_appended.cpp:80;portions=68,;task_id=90b0fbbc-a83011ef-895405f1-d53ab3d2; 2024-11-21T17:46:41.837711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;task_id=90ba7bb0-a83011ef-94cf1eb5-f72c576d;fline=with_appended.cpp:80;portions=68,;task_id=90ba7bb0-a83011ef-94cf1eb5-f72c576d; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2024-11-21T17:46:41.983905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=90c477fa-a83011ef-a7ef1405-9ff6e018;fline=with_appended.cpp:80;portions=70,;task_id=90c477fa-a83011ef-a7ef1405-9ff6e018; 2024-11-21T17:46:42.042379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=90ccfa38-a83011ef-92b58608-83f09786;fline=with_appended.cpp:80;portions=70,;task_id=90ccfa38-a83011ef-92b58608-83f09786; 2024-11-21T17:46:42.101302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=90d58d60-a83011ef-93531dc8-bb97c4c9;fline=with_appended.cpp:80;portions=70,;task_id=90d58d60-a83011ef-93531dc8-bb97c4c9; 2024-11-21T17:46:42.149693Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=90de4ae0-a83011ef-92111cfb-8d479174;fline=with_appended.cpp:80;portions=70,;task_id=90de4ae0-a83011ef-92111cfb-8d479174; 2024-11-21T17:46:42.230655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;task_id=90e7590a-a83011ef-9150b591-9beaf44e;fline=with_appended.cpp:80;portions=70,;task_id=90e7590a-a83011ef-9150b591-9beaf44e; 2024-11-21T17:46:42.282204Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=90efcf04-a83011ef-b81f47eb-e5418740;fline=with_appended.cpp:80;portions=70,;task_id=90efcf04-a83011ef-b81f47eb-e5418740; 2024-11-21T17:46:42.353279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;task_id=91186752-a83011ef-bf75f855-a33e948b;fline=with_appended.cpp:80;portions=70,;task_id=91186752-a83011ef-bf75f855-a33e948b; 2024-11-21T17:46:42.402434Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=911ec35e-a83011ef-8d36c6ed-8d96c07c;fline=with_appended.cpp:80;portions=70,;task_id=911ec35e-a83011ef-8d36c6ed-8d96c07c; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2024-11-21T17:46:42.701454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;task_id=9135135c-a83011ef-89aa45ce-a91fede6;fline=with_appended.cpp:80;portions=71,;task_id=9135135c-a83011ef-89aa45ce-a91fede6; 2024-11-21T17:46:42.786842Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;task_id=913dfee0-a83011ef-830f1fbb-b34f4b29;fline=with_appended.cpp:80;portions=71,;task_id=913dfee0-a83011ef-830f1fbb-b34f4b29; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=2350304;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=2350304;columns=5; 2024-11-21T17:46:43.054010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=9146faae-a83011ef-b2eef9af-e5a1aa2c;fline=with_appended.cpp:80;portions=73,;task_id=9146faae-a83011ef-b2eef9af-e5a1aa2c; 2024-11-21T17:46:43.112381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=914e5b96-a83011ef-acae802a-ab43c7ad;fline=with_appended.cpp:80;portions=73,;task_id=914e5b96-a83011ef-acae802a-ab43c7ad; 2024-11-21T17:46:43.156292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=915abb20-a83011ef-ad60b8f2-ef9e28cd;fline=with_appended.cpp:80;portions=73,;task_id=915abb20-a83011ef-ad60b8f2-ef9e28cd; 2024-11-21T17:46:43.220248Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=916296a6-a83011ef-891366ea-3d77d15d;fline=with_appended.cpp:80;portions=73,;task_id=916296a6-a83011ef-891366ea-3d77d15d; 2024-11-21T17:46:43.281209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;task_id=916d69be-a83011ef-9575db91-51b66218;fline=with_appended.cpp:80;portions=73,;task_id=916d69be-a83011ef-9575db91-51b66218; 2024-11-21T17:46:43.357504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=9174ec7a-a83011ef-8fd62591-ad6a5797;fline=with_appended.cpp:80;portions=73,;task_id=9174ec7a-a83011ef-8fd62591-ad6a5797; 2024-11-21T17:46:43.445256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;task_id=91a28b9e-a83011ef-a3e71e13-6d26822b;fline=with_appended.cpp:80;portions=73,;task_id=91a28b9e-a83011ef-a3e71e13-6d26822b; 2024-11-21T17:46:43.514259Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=91af9226-a83011ef-ac6e50b8-8f7f767a;fline=with_appended.cpp:80;portions=73,;task_id=91af9226-a83011ef-ac6e50b8-8f7f767a; 2024-11-21T17:46:43.619462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7261:6434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.619498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7272:6439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.619670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.624200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:43.685268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7275:6442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:43.925756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;task_id=91d85b84-a83011ef-829bfc86-45550eeb;fline=with_appended.cpp:80;portions=74,;task_id=91d85b84-a83011ef-829bfc86-45550eeb; 2024-11-21T17:46:44.009961Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;task_id=91e13c90-a83011ef-9fc44223-a7a5c1b1;fline=with_appended.cpp:80;portions=74,;task_id=91e13c90-a83011ef-9fc44223-a7a5c1b1; 2024-11-21T17:46:44.147562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=91e7f56c-a83011ef-bd4fc383-97c89d93;fline=with_appended.cpp:80;portions=75,;task_id=91e7f56c-a83011ef-bd4fc383-97c89d93; 2024-11-21T17:46:44.274416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=91f1cbaa-a83011ef-af051799-33c751a1;fline=with_appended.cpp:80;portions=75,;task_id=91f1cbaa-a83011ef-af051799-33c751a1; 2024-11-21T17:46:44.329204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=91fb07f6-a83011ef-8614251c-973e5967;fline=with_appended.cpp:80;portions=75,;task_id=91fb07f6-a83011ef-8614251c-973e5967; 2024-11-21T17:46:44.343431Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=9206a85e-a83011ef-bef0eb39-f3931d3;fline=with_appended.cpp:80;portions=75,;task_id=9206a85e-a83011ef-bef0eb39-f3931d3; 2024-11-21T17:46:44.359075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;task_id=92140ca6-a83011ef-92599986-5f5472d0;fline=with_appended.cpp:80;portions=75,;task_id=92140ca6-a83011ef-92599986-5f5472d0; 2024-11-21T17:46:44.374945Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=921e92de-a83011ef-8628cc36-ad732217;fline=with_appended.cpp:80;portions=75,;task_id=921e92de-a83011ef-8628cc36-ad732217; 2024-11-21T17:46:44.510010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;task_id=925d61bc-a83011ef-99c47a64-819fb0e1;fline=with_appended.cpp:80;portions=75,;task_id=925d61bc-a83011ef-99c47a64-819fb0e1; 2024-11-21T17:46:44.531528Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=926a31d0-a83011ef-b398e0a8-f7101733;fline=with_appended.cpp:80;portions=75,;task_id=926a31d0-a83011ef-b398e0a8-f7101733; 2024-11-21T17:46:44.563435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;task_id=927f3436-a83011ef-acc671e2-8517fbe7;fline=with_appended.cpp:80;portions=75,;task_id=927f3436-a83011ef-acc671e2-8517fbe7; 2024-11-21T17:46:44.591905Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;task_id=9292959e-a83011ef-a7896f8c-8bcd3abc;fline=with_appended.cpp:80;portions=75,;task_id=9292959e-a83011ef-a7896f8c-8bcd3abc; -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 -- nodeId: 2 -- nodeId: 1 -- nodeId: 2 -- nodeId: 1 -- nodeId: 1 -- nodeId: 2 2024-11-21T17:46:44.766134Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xaqh1f8w5xhshsszcenhh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWI4MDhmZDEtOGY3ZDQxYWYtNTBlYmMwY2QtZjEyMjE2MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"level","type":{"optional_type":{"item":{"type_id":1}}}},{"name":"message","type":{"optional_type":{"item":{"type_id":4608}}}},{"name":"resource_id","type":{"optional_type":{"item":{"type_id":4608}}}},{"name":"timestamp","type":{"type_id":50}},{"name":"uid","type":{"type_id":4608}}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":65} 2024-11-21T17:46:44.825803Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2500, txId: 18446744073709551615] shutting down >> KqpSystemView::NodesRange2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: 2024-11-21T17:46:41.734681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:41.735223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:41.735253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0020ff/r3tmp/tmp09vTtY/pdisk_1.dat 2024-11-21T17:46:41.838463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.839564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:46:41.846166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.846452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:46:41.846902Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2024-11-21T17:46:41.846918Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 1 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2024-11-21T17:46:41.860313Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:41.860922Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2024-11-21T17:46:41.904394Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:307:2347] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2024-11-21T17:46:41.904440Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2024-11-21T17:46:41.904466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:41.904471Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T17:46:41.904475Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T17:46:41.904480Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T17:46:41.904483Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T17:46:41.904495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:41.904548Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T17:46:41.904553Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T17:46:41.904558Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2024-11-21T17:46:41.904562Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T17:46:41.904591Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2024-11-21T17:46:41.914868Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2024-11-21T17:46:41.914894Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:307:2347]) 2024-11-21T17:46:41.914941Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T17:46:41.915108Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2024-11-21T17:46:41.915124Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:307:2347])::Execute 2024-11-21T17:46:41.915130Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T17:46:41.915154Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:307:2347])::Complete 2024-11-21T17:46:41.915189Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 202797666304 } 2024-11-21T17:46:41.915193Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2024-11-21T17:46:41.915197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:41.915232Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2024-11-21T17:46:41.915237Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T17:46:41.915239Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T17:46:41.915272Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T17:46:41.915278Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T17:46:41.915284Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2024-11-21T17:46:41.915288Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T17:46:41.925597Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2024-11-21T17:46:41.925624Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T17:46:42.017308Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2024-11-21T17:46:42.017355Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T17:46:42.017468Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2024-11-21T17:46:42.017570Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2024-11-21T17:46:42.017580Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:380:2375] Proxy 2024-11-21T17:46:42.017775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:46:42.018029Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T17:46:42.018045Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T17:46:42.018049Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2024-11-21T17:46:42.018053Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2024-11-21T17:46:42.018183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:46:42.018196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:46:42.018346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T17:46:42.018875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:42.019145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:46:42.019157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:42.019282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2024-11-21T17:46:42.019711Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2024-11-21T17:46:42.020987Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2024-11-21T17:46:42.021010Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2024-11-21T17:46:42.021058Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2024-11-21T17:46:42.021066Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2024-11-21T17:46:42.021074Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2024-11-21T17:46:42.021099Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2024-11-21T17:46:42.021189Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T17:46:42.021215Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2024-11-21T17:46:42.021338Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2024-11-21T17:46:42.021401Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2024-11-21T17:46:42.021417Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{95499645518944}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2024-11-21T17:46:42.021439Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{95499645518944}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2024-11-21T17:46:42.021458Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{95499645518944}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2024-11-21T17:46:42.021467Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{95499645518944}: tablet 72075186224037888 channel 2 assigned to group 2181038080 ... 5186224037888 OK) 2024-11-21T17:46:45.125794Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:46:45.125812Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2024-11-21T17:46:45.125882Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2024-11-21T17:46:45.125889Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2024-11-21T17:46:45.125942Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2024-11-21T17:46:45.136488Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:46:45.136938Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 HANDLE EvProposeTransaction marker# C0 2024-11-21T17:46:45.136953Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 step# 3001 Status# 16 SEND to# [2:379:2374] Proxy marker# C1 2024-11-21T17:46:45.147525Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2024-11-21T17:46:45.262276Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715666 has been planned 2024-11-21T17:46:45.262311Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T17:46:45.262317Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 2024-11-21T17:46:45.262393Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 0.500000s at 3.950000s 2024-11-21T17:46:45.262520Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 3500, txid# 281474976715666 marker# C2 2024-11-21T17:46:45.262530Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 stepId# 3500 Status# 17 SEND EvProposeTransactionStatus to# [2:379:2374] Proxy 2024-11-21T17:46:45.262641Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:46:45.262801Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715666 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715666 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T17:46:45.262810Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:46:45.262846Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:45.262852Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:45.262859Z node 2 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2024-11-21T17:46:45.262912Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2024-11-21T17:46:45.262937Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:45.262977Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:45.262993Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2024-11-21T17:46:45.263089Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:45.263346Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715666} 2024-11-21T17:46:45.263354Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2024-11-21T17:46:45.263362Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:45.263532Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:45.263554Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:45.263565Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715666 state PreOffline TxInFly 0 2024-11-21T17:46:45.263576Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:46:45.263589Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T17:46:45.263600Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T17:46:45.263603Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2024-11-21T17:46:45.263606Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 acknowledged 2024-11-21T17:46:45.263613Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 acknowledged 2024-11-21T17:46:45.263747Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715666, done: 0, blocked: 1 2024-11-21T17:46:45.264329Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715666 datashard 72075186224037889 state PreOffline 2024-11-21T17:46:45.264344Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T17:46:45.264462Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2024-11-21T17:46:45.264477Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715666, publications: 1, subscribers: 1 2024-11-21T17:46:45.264592Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 1 2024-11-21T17:46:45.264698Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:46:45.264926Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OGU2NjYxMzktNzhiNWQxOTktN2VlMjE0ODUtMTI4MjE3MQ== 2024-11-21 17:46:45.264 INFO ydb-core-tx-datashard-ut_minstep(pid=279394, tid=0x00007F64F9D37BC0) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2024-11-21T17:46:45.264951Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OGU2NjYxMzktNzhiNWQxOTktN2VlMjE0ODUtMTI4MjE3MQ== 2024-11-21 17:46:45.264 INFO ydb-core-tx-datashard-ut_minstep(pid=279394, tid=0x00007F64F9D37BC0) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2024-11-21T17:46:45.264962Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OGU2NjYxMzktNzhiNWQxOTktN2VlMjE0ODUtMTI4MjE3MQ== 2024-11-21 17:46:45.264 INFO ydb-core-tx-datashard-ut_minstep(pid=279394, tid=0x00007F64F9D37BC0) [core exec] yql_execution.cpp:59: Begin, root #43 2024-11-21T17:46:45.264969Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OGU2NjYxMzktNzhiNWQxOTktN2VlMjE0ODUtMTI4MjE3MQ== 2024-11-21 17:46:45.264 INFO ydb-core-tx-datashard-ut_minstep(pid=279394, tid=0x00007F64F9D37BC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2024-11-21T17:46:45.264977Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=OGU2NjYxMzktNzhiNWQxOTktN2VlMjE0ODUtMTI4MjE3MQ== 2024-11-21 17:46:45.264 TRACE ydb-core-tx-datashard-ut_minstep(pid=279394, tid=0x00007F64F9D37BC0) [core exec] yql_execution.cpp:387: {0}, callable #43 2024-11-21T17:46:45.264989Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OGU2NjYxMzktNzhiNWQxOTktN2VlMjE0ODUtMTI4MjE3MQ== 2024-11-21 17:46:45.264 INFO ydb-core-tx-datashard-ut_minstep(pid=279394, tid=0x00007F64F9D37BC0) [core exec] yql_execution.cpp:577: Node #43 finished execution 2024-11-21T17:46:45.265005Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OGU2NjYxMzktNzhiNWQxOTktN2VlMjE0ODUtMTI4MjE3MQ== 2024-11-21 17:46:45.265 INFO ydb-core-tx-datashard-ut_minstep(pid=279394, tid=0x00007F64F9D37BC0) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2024-11-21T17:46:45.265011Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OGU2NjYxMzktNzhiNWQxOTktN2VlMjE0ODUtMTI4MjE3MQ== 2024-11-21 17:46:45.265 INFO ydb-core-tx-datashard-ut_minstep(pid=279394, tid=0x00007F64F9D37BC0) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2024-11-21T17:46:45.265016Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OGU2NjYxMzktNzhiNWQxOTktN2VlMjE0ODUtMTI4MjE3MQ== 2024-11-21 17:46:45.265 INFO ydb-core-tx-datashard-ut_minstep(pid=279394, tid=0x00007F64F9D37BC0) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2024-11-21T17:46:45.265043Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=OGU2NjYxMzktNzhiNWQxOTktN2VlMjE0ODUtMTI4MjE3MQ== 2024-11-21 17:46:45.265 NOTE ydb-core-tx-datashard-ut_minstep(pid=279394, tid=0x00007F64F9D37BC0) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2024-11-21T17:46:45.265049Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=OGU2NjYxMzktNzhiNWQxOTktN2VlMjE0ODUtMTI4MjE3MQ== 2024-11-21 17:46:45.265 NOTE ydb-core-tx-datashard-ut_minstep(pid=279394, tid=0x00007F64F9D37BC0) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2024-11-21T17:46:45.265054Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=OGU2NjYxMzktNzhiNWQxOTktN2VlMjE0ODUtMTI4MjE3MQ== 2024-11-21 17:46:45.265 NOTE ydb-core-tx-datashard-ut_minstep(pid=279394, tid=0x00007F64F9D37BC0) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2024-11-21T17:46:45.276497Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T17:46:45.276576Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2024-11-21T17:46:45.276923Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T17:46:45.277115Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2024-11-21T17:46:45.277215Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2024-11-21T17:46:45.277227Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2024-11-21T17:46:45.277248Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2024-11-21T17:46:45.277270Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2024-11-21T17:46:45.277288Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2024-11-21T17:46:45.362566Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Bootstrap 2024-11-21T17:46:45.382493Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Become StateWork (SchemeCache [1:103:2137]) 2024-11-21T17:46:45.401298Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:45.403932Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:45.404446Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:45.404593Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:46:45.405444Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:46:45.405469Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:46:45.405495Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:46:45.409740Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:46:45.409812Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:46:45.409830Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:46:45.409855Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:46:45.409869Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:46:45.409883Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:46:45.441598Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:46:45.441641Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:46:45.452580Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:46:45.452636Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:46:45.452653Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:46:45.452665Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:46:45.452691Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:46:45.452698Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:46:45.452704Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:46:45.452712Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:46:45.463665Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:46:45.463730Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:46:45.463942Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:46:45.463949Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:46:45.465245Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:46:45.465442Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:46:45.465700Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/001dec/r3tmp/tmpDWLoam/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T17:46:45.465800Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/001dec/r3tmp/tmpDWLoam/pdisk_1.dat 2024-11-21T17:46:45.465990Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:46:45.466012Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:46:45.466026Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:46:45.466062Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:46:45.466095Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:46:45.466485Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:46:45.466525Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:46:45.477690Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:46:45.477860Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:46:45.477917Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:46:45.477924Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:46:45.477956Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:46:45.477980Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:46:45.478069Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:45.478077Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:45.478081Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:45.478097Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:312:2281] 2024-11-21T17:46:45.478449Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:46:45.478457Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:307:2278] 2024-11-21T17:46:45.478594Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:312:2281] 2024-11-21T17:46:45.478613Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:45.478621Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:45.478624Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:45.496313Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T17:46:45.496339Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2024-11-21T17:46:45.500861Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2024-11-21T17:46:45.500922Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 5 Memory: 1 Network: 1) 2024-11-21T17:46:45.501055Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:45.501062Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:45.501080Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:369:2316] 2024-11-21T17:46:45.501477Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:369:2316] 2024-11-21T17:46:45.501600Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:45.501610Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:45.501613Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:45.506553Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2024-11-21T17:46:45.506661Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2024-11-21T17:46:45.506675Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:46:45.506748Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:46:45.506753Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:46:45.506765Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:402:2336] 2024-11-21T17:46:45.506880Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:402:2336] 2024-11-21T17:46:45.506900Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:46:45.506905Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:46:45.506907Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:46:45.506989Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-unknown to schemeshard 72057594046578944 2024-11-21T17:46:45.507004Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusPathDoesNotExist Path: "/dc-1/users/tenant-unknown" 2024-11-21T17:46:45.507008Z node 1 :LOCAL ERROR: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2024-11-21T17:46:45.507016Z node 1 :LOCAL ERROR: Unknown domain dc-3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_MaxR_GroupL_OrderL [GOOD] Test command err: Trying to start YDB, gRPC: 30428, MsgBus: 31159 2024-11-21T17:46:25.595901Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790393528960434:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:25.596279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000def/r3tmp/tmpFtjEzQ/pdisk_1.dat 2024-11-21T17:46:25.656196Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30428, node 1 2024-11-21T17:46:25.669985Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:25.669999Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:25.670000Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:25.670035Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31159 2024-11-21T17:46:25.696627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:25.696660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:25.697732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:25.732915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:25.736920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:25.747131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393528961081:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:25.747189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393528961081:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:25.747221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393528961081:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:25.747242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393528961081:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:25.747257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393528961081:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:25.747272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393528961081:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:25.747286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393528961081:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:25.747303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393528961081:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:25.747321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393528961081:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:25.747339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393528961081:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:25.747359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393528961081:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:25.747378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790393528961081:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:25.747745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:25.747758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:25.747770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:25.747783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:25.747799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:25.747803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:25.747812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:25.747822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:25.747833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:25.747841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:25.747848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:25.747857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:25.747911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:25.747922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:25.747937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:25.747946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:25.747963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:25.747971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:25.747987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:25.747996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:25.748006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:25.748014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:25.751077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393528961080:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:25.751103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393528961080:2288];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:25.751142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790393528961080:2288];tablet_id=7207518622 ... 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:43.093625Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:43.325505Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:43.325538Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:43.419052Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:43.419089Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:43.520488Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:43.520525Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:43.618932Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:43.618970Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:43.733650Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:43.754539Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T17:46:43.788088Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:43.788123Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:43.871047Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:43.871079Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:43.959709Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:43.959738Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.046007Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.046041Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.131941Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.131987Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.200246Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:44.247311Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.247343Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.427351Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.427384Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.524856Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.524892Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.609964Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.610010Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.696868Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.696903Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1795:3052], TxId: 281474976715662, task: 113. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjUwZWE4ZTQtYTdmZWRmOTYtZDRiZjBmZjUtZDk2Njc1YWI=. TraceId : 01jd7xaamw1p26anwfbkczmq4e. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.799184Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:44.822984Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlap::PredicatePushdownCastErrors [GOOD] |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_ResultL_FilterL_OrderL_Limit2 [GOOD] Test command err: Trying to start YDB, gRPC: 17440, MsgBus: 18363 2024-11-21T17:46:32.655555Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790422790169714:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:32.655574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000d86/r3tmp/tmpdecXdX/pdisk_1.dat 2024-11-21T17:46:32.711367Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17440, node 1 2024-11-21T17:46:32.731330Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:32.731344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:32.731346Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:32.731396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18363 2024-11-21T17:46:32.756685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:32.756713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:32.757770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:32.788957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:32.797257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:32.809372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790422790170372:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:32.809458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790422790170372:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:32.809506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790422790170372:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:32.809535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790422790170372:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:32.809577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790422790170372:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:32.809607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790422790170372:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:32.809629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790422790170372:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:32.809654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790422790170372:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:32.809679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790422790170372:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:32.809703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790422790170372:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:32.809727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790422790170372:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:32.809751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790422790170372:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:32.810250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:32.810264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:32.810275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:32.810278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:32.810294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:32.810305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:32.810313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:32.810317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:32.810327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:32.810335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:32.810341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:32.810349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:32.810409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:32.810423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:32.810442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:32.810450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:32.810460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:32.810468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:32.810482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:32.810491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:32.810500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:32.810507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:32.812845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790422790170377:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:32.812862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790422790170377:2291];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:32.812878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790422790170377:2291];tablet_id=7207518622 ... TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:43.573089Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:43.760540Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:43.760574Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:43.847052Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:43.847088Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:43.930536Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:43.930573Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.019660Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.019693Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.102374Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:44.123402Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T17:46:44.174782Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.174817Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.258618Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.258648Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.346180Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.346207Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.432092Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.432117Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.517186Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.517219Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.562708Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:44.674373Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.674406Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.816428Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.816460Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.901732Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.901763Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:44.983943Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:44.983975Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:45.071957Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 65538 2024-11-21T17:46:45.071982Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:1747:3004], TxId: 281474976715662, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xagek21qnwyctv6jtpzsv. SessionId : ydb://session/3?node_id=2&id=M2RiODJiNTUtYTA0OGMzYTEtYzRlMmJlNjctY2RlZjZmZTY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. wakeup with tag 2 2024-11-21T17:46:45.143917Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[3:1217:2358];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:46:45.164753Z node 3 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[3:1217:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 26848, MsgBus: 8672 2024-11-21T17:46:42.882244Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790463285505129:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:42.882324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001773/r3tmp/tmp2gCbED/pdisk_1.dat 2024-11-21T17:46:42.957497Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26848, node 1 2024-11-21T17:46:42.973118Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:42.973129Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:42.973130Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:42.973161Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:42.980947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:42.980973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:42.982055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8672 TClient is connected to server localhost:8672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:43.036722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:43.039303Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:43.045809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:43.111839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:43.130714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:43.140161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:43.233701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790467580473827:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.233730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.269022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.289956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.299119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.311308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.318073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.325303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.334144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790467580474335:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.334176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.334183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790467580474342:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.334777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:43.338064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790467580474344:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:46:43.528124Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474631:2453] 2024-11-21T17:46:43.529651Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474639:2457] 2024-11-21T17:46:43.533594Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474654:2462] 2024-11-21T17:46:43.540372Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474665:2466] 2024-11-21T17:46:43.544591Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474682:2473] 2024-11-21T17:46:43.551627Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474695:2478] 2024-11-21T17:46:43.559714Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474722:2489] 2024-11-21T17:46:43.568663Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474737:2495] 2024-11-21T17:46:43.578671Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474754:2502] 2024-11-21T17:46:43.589712Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474774:2510] 2024-11-21T17:46:43.604276Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474795:2519] 2024-11-21T17:46:43.615326Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474818:2528] 2024-11-21T17:46:43.632602Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474835:2534] 2024-11-21T17:46:43.647130Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474858:2543] 2024-11-21T17:46:43.669073Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474880:2547] 2024-11-21T17:46:43.679097Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474907:2556] 2024-11-21T17:46:43.698274Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474938:2564] 2024-11-21T17:46:43.719650Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580474978:2571] 2024-11-21T17:46:43.739421Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580475017:2580] 2024-11-21T17:46:43.764669Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580475070:2589] 2024-11-21T17:46:43.788594Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580475100:2598] 2024-11-21T17:46:43.812763Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580475141:2607] 2024-11-21T17:46:43.836854Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580475217:2616] 2024-11-21T17:46:43.864066Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790467580475282:2625] 2024-11-21T17:46:43.864306Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790467580475355:2630] TxId: 281474976715677. Ctx: { TraceId: 01jd7xaqqx66vn61se71q9qcyp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWY5MDgwMDItNmRhZDQ4YjItMzViNGYwZDUtY2RmOTZjOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T17:46:43.865337Z node 1 :KQP_NODE WARN: TxId: 281474976715677, terminate transaction, reason: ABORTED 2024-11-21T17:46:43.865349Z node 1 :KQP_NODE ERROR: node service cancelled the task, because it ABORTED, NodeId: 1, TxId: 281474976715677 2024-11-21T17:46:43.865544Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWY5MDgwMDItNmRhZDQ4YjItMzViNGYwZDUtY2RmOTZjOWI=, ActorId: [1:7439790467580475322:2630], ActorState: ExecuteState, TraceId: 01jd7xaqqx66vn61se71q9qcyp, Create QueryResponse for error on request, msg: 2024-11-21T17:46:43.865651Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211203905, txId: 281474976715676] shutting down 2024-11-21T17:46:43.865710Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790467580475361:2634], TxId: 281474976715677, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWY5MDgwMDItNmRhZDQ4YjItMzViNGYwZDUtY2RmOTZjOWI=. TraceId : 01jd7xaqqx66vn61se71q9qcyp. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439790467580473796:2368], status: UNSPECIFIED, reason: {
: Error: ABORTED } 2024-11-21T17:46:43.865889Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790467580475362:2635], TxId: 281474976715677, task: 2. Ctx: { TraceId : 01jd7xaqqx66vn61se71q9qcyp. SessionId : ydb://session/3?node_id=1&id=MWY5MDgwMDItNmRhZDQ4YjItMzViNGYwZDUtY2RmOTZjOWI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439790467580473796:2368], status: UNSPECIFIED, reason: {
: Error: ABOR ... Error scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037896, actor_id: [2:7439790473115416984:2298] 2024-11-21T17:46:45.331026Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037895, step: 1732211205368 2024-11-21T17:46:45.331036Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037894, step: 1732211205368 2024-11-21T17:46:45.331042Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037896, step: 1732211205368 2024-11-21T17:46:45.331050Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386860:2668]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7439790473115416986:2300] 2024-11-21T17:46:45.331059Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386859:2667]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037894, actor_id: [2:7439790473115416994:2302] 2024-11-21T17:46:45.331064Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386861:2669]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037896, actor_id: [2:7439790473115416984:2298] 2024-11-21T17:46:45.331273Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037895, step: 1732211205368 2024-11-21T17:46:45.331284Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037894, step: 1732211205368 2024-11-21T17:46:45.331290Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037896, step: 1732211205368 2024-11-21T17:46:45.331296Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386860:2668]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7439790473115416986:2300] 2024-11-21T17:46:45.331305Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386859:2667]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037894, actor_id: [2:7439790473115416994:2302] 2024-11-21T17:46:45.331310Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386861:2669]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037896, actor_id: [2:7439790473115416984:2298] 2024-11-21T17:46:45.331518Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037895, step: 1732211205368 2024-11-21T17:46:45.331529Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037894, step: 1732211205368 2024-11-21T17:46:45.331535Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037896, step: 1732211205368 2024-11-21T17:46:45.331543Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386860:2668]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7439790473115416986:2300] 2024-11-21T17:46:45.331553Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386859:2667]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037894, actor_id: [2:7439790473115416994:2302] 2024-11-21T17:46:45.331560Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386861:2669]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037896, actor_id: [2:7439790473115416984:2298] 2024-11-21T17:46:45.331799Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037895, step: 1732211205368 2024-11-21T17:46:45.331810Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037894, step: 1732211205368 2024-11-21T17:46:45.331817Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037896, step: 1732211205368 2024-11-21T17:46:45.331827Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386860:2668]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7439790473115416986:2300] 2024-11-21T17:46:45.331840Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386859:2667]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037894, actor_id: [2:7439790473115416994:2302] 2024-11-21T17:46:45.331849Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386861:2669]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037896, actor_id: [2:7439790473115416984:2298] 2024-11-21T17:46:45.332089Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037895, step: 1732211205368 2024-11-21T17:46:45.332099Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037894, step: 1732211205368 2024-11-21T17:46:45.332109Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037896, step: 1732211205368 2024-11-21T17:46:45.332118Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386860:2668]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7439790473115416986:2300] 2024-11-21T17:46:45.332130Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386859:2667]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037894, actor_id: [2:7439790473115416994:2302] 2024-11-21T17:46:45.332136Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386861:2669]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037896, actor_id: [2:7439790473115416984:2298] 2024-11-21T17:46:45.332346Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037895, step: 1732211205368 2024-11-21T17:46:45.332357Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037894, step: 1732211205368 2024-11-21T17:46:45.332363Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037896, step: 1732211205368 2024-11-21T17:46:45.332370Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386860:2668]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7439790473115416986:2300] 2024-11-21T17:46:45.332379Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386859:2667]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037894, actor_id: [2:7439790473115416994:2302] 2024-11-21T17:46:45.332387Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386861:2669]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037896, actor_id: [2:7439790473115416984:2298] 2024-11-21T17:46:45.332640Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037895, step: 1732211205368 2024-11-21T17:46:45.332650Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037894, step: 1732211205368 2024-11-21T17:46:45.332657Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037896, step: 1732211205368 2024-11-21T17:46:45.332663Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386860:2668]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7439790473115416986:2300] 2024-11-21T17:46:45.332672Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386859:2667]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037894, actor_id: [2:7439790473115416994:2302] 2024-11-21T17:46:45.332677Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790477410386859:2667]. TKqpScanFetcherActor: broken tablet for this request 72075186224037894, retries limit exceeded (0/20) 2024-11-21T17:46:45.332715Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386861:2669]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037896, actor_id: [2:7439790473115416984:2298] 2024-11-21T17:46:45.332934Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037895, step: 1732211205368 2024-11-21T17:46:45.332943Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715685. Snapshot is not valid, tabletId: 72075186224037896, step: 1732211205368 2024-11-21T17:46:45.332950Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386860:2668]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037895, actor_id: [2:7439790473115416986:2300] 2024-11-21T17:46:45.332952Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790477410386860:2668]. TKqpScanFetcherActor: broken tablet for this request 72075186224037895, retries limit exceeded (0/20) 2024-11-21T17:46:45.332978Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7439790477410386861:2669]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/EightShard' scheme changed., code: 2028 , tablet id: 72075186224037896, actor_id: [2:7439790473115416984:2298] 2024-11-21T17:46:45.332979Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790477410386861:2669]. TKqpScanFetcherActor: broken tablet for this request 72075186224037896, retries limit exceeded (0/20) 2024-11-21T17:46:45.352466Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmIyNjQ4ZWEtZDM1ZDkwZWUtZjliYTIzZWMtNzcxMWE4ZjY=, ActorId: [2:7439790477410386899:2672], ActorState: ExecuteState, TraceId: 01jd7xas6d2dhecs332jn609fd, Create QueryResponse for error on request, msg: 2024-11-21T17:46:45.352809Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211205396, txId: 281474976715687] shutting down 2024-11-21T17:46:45.378071Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211205417, txId: 281474976715689] shutting down 2024-11-21T17:46:45.406254Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211205452, txId: 281474976715691] shutting down 2024-11-21T17:46:45.431163Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211205473, txId: 281474976715693] shutting down 2024-11-21T17:46:45.455912Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211205501, txId: 281474976715695] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 25078, MsgBus: 20194 2024-11-21T17:46:41.079254Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790461230058883:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:41.079333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:41.081720Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790459539136131:2066];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:41.086117Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790462301232856:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:41.087281Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439790458166751002:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:41.087299Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:41.123313Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:41.129814Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00146f/r3tmp/tmpUIQdxm/pdisk_1.dat 2024-11-21T17:46:41.304333Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:41.339027Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25078, node 1 2024-11-21T17:46:41.370809Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:41.370823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:41.370825Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:41.370864Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20194 2024-11-21T17:46:41.402952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:41.402971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:41.403177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:41.403202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:41.404086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:41.404103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:41.404123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:41.404129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:41.404316Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:46:41.404417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:41.404490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:41.404845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:41.404854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:41.405345Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:41.405357Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T17:46:41.405359Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T17:46:41.405578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:41.405624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:41.405637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:41.432865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:41.442200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:43.517288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:43.643446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:43.740894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:43.944478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790469819995107:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.944536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:43.951538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.008600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.073504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.148525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.235220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.284792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.353331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790474114963061:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:44.353363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:44.353543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790474114963066:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:44.354571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720668:3, at schemeshard: 72057594046644480 2024-11-21T17:46:44.360502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790474114963068:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720668 completed, doublechecking } 2024-11-21T17:46:44.664786Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211204642, txId: 281474976720671] shutting down 2024-11-21T17:46:44.848204Z node 5 :BS_PROXY_PUT ERROR: [2eef00964908bdb0] Result# TEvPutResult {Id# [72075186224037913:1:12:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T17:46:44.852559Z node 3 :BS_PROXY_PUT ERROR: [d2836b50c807f061] Result# TEvPutResult {Id# [72075186224037917:1:11:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T17:46:44.895862Z node 2 :BS_PROXY_PUT ERROR: [38bc0aba23b1b68d] Result# TEvPutResult {Id# [72075186224037900:1:15:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T17:46:44.903992Z node 4 :BS_PROXY_PUT ERROR: [5da730599d8f570b] Result# TEvPutResult {Id# [72075186224037899:1:15:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 |76.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdownCastErrors [GOOD] Test command err: Trying to start YDB, gRPC: 12955, MsgBus: 64591 2024-11-21T17:46:16.559841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790350751995963:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:16.559986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e27/r3tmp/tmp7fuz4v/pdisk_1.dat 2024-11-21T17:46:16.604138Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12955, node 1 2024-11-21T17:46:16.614008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:16.614020Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:16.614022Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:16.614060Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64591 TClient is connected to server localhost:64591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:16.660800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:16.660835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:16.661908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:16.661996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:16.672121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:16.678381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350751996587:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.678425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350751996587:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.678458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350751996587:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:16.678476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350751996587:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:16.678492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350751996587:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:16.678503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350751996587:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:16.678518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350751996587:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:16.678537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350751996587:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:16.678554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350751996587:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:16.678571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350751996587:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.678586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350751996587:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:16.678609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790350751996587:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:16.678932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.678944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.678954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.678962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.678971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.678974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.678980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.678983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.678989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.678991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.678996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.678998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.679046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.679055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.679066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.679068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.679075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.679081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.679092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.679098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.679105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.679111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:16.728293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211176710 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ... : Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Int16 '==' Optional 2024-11-21T17:46:45.367809Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjM1NmFlMWUtNmYzYmI0MjAtYmYwNDMxOTItNjg0NmU3MjI=, ActorId: [1:7439790475306133841:6865], ActorState: ExecuteState, TraceId: 01jd7xas7ka4h1sszfhv0jty0m, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Int32; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.373792Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790475306133855:6943], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Int32 '==' Optional 2024-11-21T17:46:45.373884Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjVjNjQ5Yi1iNDAwMmJiOS00ZWVmYTVkOS03OWZiNTMzOA==, ActorId: [1:7439790475306133853:6934], ActorState: ExecuteState, TraceId: 01jd7xas7t1kvd0gy34rzq0zcv, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Int64; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.378576Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790475306133867:6906], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Int64 '==' Optional 2024-11-21T17:46:45.378716Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2ZhMWUyOGUtNWY1NGVmN2ItYTNlNDYzZWQtODNjMjBiOWQ=, ActorId: [1:7439790475306133865:6955], ActorState: ExecuteState, TraceId: 01jd7xas7y7jctryes52qewdk3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt8; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.385426Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790475306133879:6888], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint8 '==' Optional 2024-11-21T17:46:45.385578Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmY5YjYyYTMtZGI3Mzc5MmQtZGQ4NDcxODctODk0ZmNjMjg=, ActorId: [1:7439790475306133877:6912], ActorState: ExecuteState, TraceId: 01jd7xas85c46r4sy5htyp3bk7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt16; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.392993Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790475306133892:2296], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint16 '==' Optional Test query: --!syntax_v1 DECLARE $in_value AS UInt32; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.393119Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWNlYjMwOGYtMjcxOTA5NTYtZjFmNjE2MWQtMTZjZGUyMzk=, ActorId: [1:7439790475306133890:2360], ActorState: ExecuteState, TraceId: 01jd7xas8d57der4wttd6qatqc, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:46:45.398452Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790475306133904:2298], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint32 '==' Optional 2024-11-21T17:46:45.399038Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2ZkNDM5YmMtZTA1NTZiYTAtODMyYjA3YTMtYTk0ZjIwZmM=, ActorId: [1:7439790475306133902:2370], ActorState: ExecuteState, TraceId: 01jd7xas8kbpvwzxpv3dgcewkr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS UInt64; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.403576Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790475306133916:2284], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Uint64 '==' Optional 2024-11-21T17:46:45.403660Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWZhNzM3NzItNDZjYzk0ZS01MTZiNTU1Ni0zNjViMTdhNQ==, ActorId: [1:7439790475306133914:2299], ActorState: ExecuteState, TraceId: 01jd7xas8qa85x72spanp1anqd, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Double; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.407263Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790475306133928:2408], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Double '==' Optional 2024-11-21T17:46:45.407475Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDcwN2I3OTgtOTA4YzcwZmYtOWEyZGM3ZDItYWEyNzRkODY=, ActorId: [1:7439790475306133926:2399], ActorState: ExecuteState, TraceId: 01jd7xas8w9y5c1pqm98qmg5bv, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Float; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.412705Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790475306133940:2474], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Float '==' Optional 2024-11-21T17:46:45.412809Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmRhNzc0YjktZDNlNDM5NDEtMTNlYWUyZTYtZTg4OGZmZGM=, ActorId: [1:7439790475306133938:2495], ActorState: ExecuteState, TraceId: 01jd7xas91fwwmzxvb7y83fpx0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS String; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.417016Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790475306133952:2476], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: String '==' Optional 2024-11-21T17:46:45.417115Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTJjZDE5NGItN2ZhZDFhOWUtNWY3YTY2OTgtOGJmNGZiMDc=, ActorId: [1:7439790475306133950:2486], ActorState: ExecuteState, TraceId: 01jd7xas95ftkz3nhrdke7b9c9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Utf8; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.421282Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790475306133964:2489], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:31: Error: At function: Filter, At function: Coalesce
:5:19: Error: At function: ==
:5:19: Error: Uncompatible types in compare: Utf8 '==' Optional 2024-11-21T17:46:45.421365Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTIwYzE0OGMtMWQzMDY4MjktNDIwZDVkYTYtMzFkODU3MTM=, ActorId: [1:7439790475306133962:2478], ActorState: ExecuteState, TraceId: 01jd7xas9a1sd2w1md7ge44stb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Test query: --!syntax_v1 DECLARE $in_value AS Timestamp; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.467691Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211205347, txId: 18446744073709551615] shutting down Test query: --!syntax_v1 DECLARE $in_value AS Date; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.542146Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211205501, txId: 18446744073709551615] shutting down Test query: --!syntax_v1 DECLARE $in_value AS Datetime; SELECT `key` FROM `/Root/olapStore/OlapParametersTable` WHERE $in_value = Datetime_column 2024-11-21T17:46:45.623292Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211205571, txId: 18446744073709551615] shutting down |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] >> LocalPartitionReader::Booting [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::TableReshardingModuloN [FAIL] Test command err: Trying to start YDB, gRPC: 5030, MsgBus: 63040 2024-11-21T17:46:16.733308Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790352963725887:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:16.733321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e20/r3tmp/tmpczLJwf/pdisk_1.dat 2024-11-21T17:46:16.781800Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5030, node 1 2024-11-21T17:46:16.789494Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:16.789508Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:16.789510Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:16.789546Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63040 TClient is connected to server localhost:63040 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:16.834559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:16.834588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:16.835634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:16.857927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:16.867779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:16.891449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790352963726709:2295];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.891514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790352963726709:2295];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.891557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790352963726709:2295];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:16.891579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790352963726709:2295];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:16.891600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790352963726709:2295];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:16.891622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790352963726709:2295];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:16.891657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790352963726709:2295];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:16.891678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790352963726709:2295];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:16.891697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790352963726709:2295];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:16.891720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790352963726709:2295];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.891739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790352963726709:2295];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:16.891762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790352963726709:2295];tablet_id=72075186224037908;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:16.892249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.892263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.892274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.892296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.892314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.892323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.892332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.892341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.892351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.892359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.892372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.892380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.892436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.892445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.892460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.892467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.892483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.892491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.892506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.892514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.892523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.892530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:16.895118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7439790352963726731:2304];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.895137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7439790352963726731:2304];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.895163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7439790352963726731:2304];tablet_id=720751862240 ... sh_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 7 AppropriateMods: 15 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } } ; 2024-11-21T17:46:41.030370Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 7 AppropriateMods: 15 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } } ; 2024-11-21T17:46:41.030504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725680:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.030658Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 7 AppropriateMods: 15 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } } ; 2024-11-21T17:46:41.035128Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 7 AppropriateMods: 15 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } } ; 2024-11-21T17:46:41.037542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725681:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_WAIT_FINISHED... (SPLIT:1) 2024-11-21T17:46:43.021546Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 15 } } ; 2024-11-21T17:46:43.021684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976725682:0, at schemeshard: 72057594046644480 2024-11-21T17:46:43.021857Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 15 } } ; 2024-11-21T17:46:43.030781Z node 1 :TX_COLUMNSHARD ERROR: fline=hash_modulo.h:101;proto=HashSharding { ModuloPartsCount: 16 TabletsForModulo { TabletId: 72075186224037896 AppropriateMods: 0 } TabletsForModulo { TabletId: 72075186224037897 AppropriateMods: 1 } TabletsForModulo { TabletId: 72075186224037898 AppropriateMods: 2 } TabletsForModulo { TabletId: 72075186224037899 AppropriateMods: 3 } TabletsForModulo { TabletId: 72075186224037900 AppropriateMods: 4 } TabletsForModulo { TabletId: 72075186224037901 AppropriateMods: 5 } TabletsForModulo { TabletId: 72075186224037902 AppropriateMods: 6 } TabletsForModulo { TabletId: 72075186224037903 AppropriateMods: 7 } TabletsForModulo { TabletId: 72075186224037892 AppropriateMods: 8 } TabletsForModulo { TabletId: 72075186224037893 AppropriateMods: 9 } TabletsForModulo { TabletId: 72075186224037894 AppropriateMods: 10 } TabletsForModulo { TabletId: 72075186224037895 AppropriateMods: 11 } TabletsForModulo { TabletId: 72075186224037888 AppropriateMods: 12 } TabletsForModulo { TabletId: 72075186224037889 AppropriateMods: 13 } TabletsForModulo { TabletId: 72075186224037890 AppropriateMods: 14 } TabletsForModulo { TabletId: 72075186224037891 AppropriateMods: 15 } } ; RESHARDING_WAIT_FINISHED... (SPLIT:1) RESHARDING_FINISHED 2024-11-21T17:46:43.493385Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211203086, txId: 18446744073709551615] shutting down [[57643u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:552, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[57643u]]|[[230000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x127C3768 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:552: CompareYson @ 0x2415B5B9 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x1246EC34 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:351: Execute @ 0x1245DEBA 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:405: Execute_ @ 0x1246062C 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x1246A9F6 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x127C571D 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x1246A3B9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x127C5E92 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x127D90AC 10. ??:0: ?? @ 0x7F26E47C5D8F 11. ??:0: ?? @ 0x7F26E47C5E3F 12. ??:0: ?? @ 0x1179D028 |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TReplicaTest::Handshake >> TReplicaTest::Update >> BasicUsage::BasicWriteSession [GOOD] >> BasicUsage::CloseWriteSessionImmediately |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> TReplicaTest::Handshake [GOOD] >> TReplicaTest::DoubleUnsubscribe >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath >> TReplicaTest::HandshakeWithStaleGeneration [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 >> TReplicaTest::Commit >> TReplicaTest::Update [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe >> TReplicaTest::Subscribe >> TReplicaTest::CommitWithoutHandshake [GOOD] >> TReplicaTest::CommitWithStaleGeneration [GOOD] >> TReplicaTest::Delete >> TReplicaTest::DoubleUnsubscribe [GOOD] >> TReplicaTest::DoubleDelete >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2024-11-21T17:46:39.714328Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1732211199714318 2024-11-21T17:46:39.902788Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790450903303002:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:39.902807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:39.905724Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790452027381839:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:39.905895Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e1b/r3tmp/tmpBigFZD/pdisk_1.dat 2024-11-21T17:46:39.929243Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:39.929562Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:39.961169Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64913, node 1 2024-11-21T17:46:39.979820Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e1b/r3tmp/yandexmXxzvV.tmp 2024-11-21T17:46:39.979833Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e1b/r3tmp/yandexmXxzvV.tmp 2024-11-21T17:46:39.979895Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e1b/r3tmp/yandexmXxzvV.tmp 2024-11-21T17:46:39.979936Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:39.984995Z INFO: TTestServer started on Port 13641 GrpcPort 64913 TClient is connected to server localhost:13641 PQClient connected to localhost:64913 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:46:40.003348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:40.003388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:46:40.006289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:40.028172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:40.028212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:40.029306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:40.031097Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:40.031692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T17:46:40.238802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790456322349435:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.238827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790456322349411:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.238836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.240109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T17:46:40.244204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790456322349439:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T17:46:40.303244Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790455198271249:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:40.303354Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWVlZjZmMzAtYWU2NmQxM2ItNjQ1NzFiYTQtYjE0NTAzOGI=, ActorId: [1:7439790455198271207:2298], ActorState: ExecuteState, TraceId: 01jd7xam89f5tw0xtzw39h5rsq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:40.304000Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:40.304423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2024-11-21T17:46:40.320450Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790456322349508:2287], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:40.320544Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTQxMTA4ZDktZDFhYTBmNGItZDlmNjlhNDItYWJjM2JmMmY=, ActorId: [2:7439790456322349408:2277], ActorState: ExecuteState, TraceId: 01jd7xam7e7b7bvxs7fnhbc5ph, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:40.320680Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:40.369692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:40.434438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:64913", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T17:46:40.467678Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jd7xame0b34wqvjyb45261qm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjY1ZGNmNGUtZWQ5OGEzZWUtNjJhNzZkOWQtZjRiMzEzZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790455198271632:2909] 2024-11-21T17:46:44.902934Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790450903303002:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:44.902975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:44.905986Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790452027381839:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:44.906023Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:46:45.542063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:64913 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:45.572306Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:64913 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 ... 2024-11-21T17:46:46.697301Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T17:46:46.697404Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:46.697417Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:46.697470Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T17:46:46.697590Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T17:46:46.697668Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:46.697676Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:46.697774Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 1 partNo : 0 messageNo: 1 size 115 offset: -1 2024-11-21T17:46:46.697831Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 1 partNo 0 2024-11-21T17:46:46.697905Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2024-11-21T17:46:46.697956Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 177 WTime 1732211206697 2024-11-21T17:46:46.697984Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:46:46.699043Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 2024-11-21T17:46:46.699058Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:46:46.699071Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T17:46:46.699087Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T17:46:46.699094Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:46:46.699099Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T17:46:46.699127Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T17:46:46.699135Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T17:46:46.699140Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T17:46:46.699144Z node 2 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:46:46.699177Z node 2 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1732211206697 queuesize 0 startOffset 0 2024-11-21T17:46:46.699181Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T17:46:46.699441Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T17:46:46.699597Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { nanos: 1000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T17:46:46.699605Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2024-11-21T17:46:46.699608Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write session: acknoledged message 1 === Inside AcksHandler 2024-11-21T17:46:46.699695Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write 1 messages with Id from 2 to 2 === Inside ReadyToAcceptHandler === AcksHandler has written a message, closing the session 2024-11-21T17:46:46.699846Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write session: try to update token 2024-11-21T17:46:46.699856Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 2 2024-11-21T17:46:46.700052Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:46:46.700117Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T17:46:46.700270Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:46.700281Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:46.700309Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 2 requestId: cookie: 2 2024-11-21T17:46:46.700417Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T17:46:46.700523Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:46.700531Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:46.700547Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 2 partNo : 0 messageNo: 3 size 107 offset: -1 2024-11-21T17:46:46.700589Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2024-11-21T17:46:46.700623Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2024-11-21T17:46:46.700670Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 169 WTime 1732211206700 2024-11-21T17:46:46.700704Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:46:46.701607Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 2024-11-21T17:46:46.701624Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:46:46.701640Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2024-11-21T17:46:46.701645Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2024-11-21T17:46:46.701746Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T17:46:46.701949Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T17:46:46.702002Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 1000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T17:46:46.702008Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2024-11-21T17:46:46.702012Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2024-11-21T17:46:46.708326Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2024-11-21T17:46:46.708366Z :INFO: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2024-11-21T17:46:46.708370Z :INFO: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write session will now close 2024-11-21T17:46:46.708380Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write session: aborting 2024-11-21T17:46:46.708510Z :WARNING: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T17:46:46.708775Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0 grpc read done: success: 0 data: 2024-11-21T17:46:46.708789Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0 grpc read failed 2024-11-21T17:46:46.708796Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0 grpc closed 2024-11-21T17:46:46.708801Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0 is DEAD 2024-11-21T17:46:46.709070Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:46:46.709168Z :DEBUG: [/Root] SessionId [src_id|b61722cd-f60dd73d-d2320781-61bc2fa4_0] MessageGroupId [src_id] Write session: destroy 2024-11-21T17:46:46.709221Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:46.709262Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439790480968076282:2477] destroyed 2024-11-21T17:46:46.709276Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. |76.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] >> TReplicaTest::Subscribe [GOOD] >> TReplicaTest::SubscribeUnknownPath >> TReplicaTest::Delete [GOOD] >> TReplicaTest::Commit [GOOD] >> TReplicaTest::AckNotifications >> TReplicaTest::DoubleDelete [GOOD] >> TReplicaTest::AckNotifications [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation >> TReplicaTest::SubscribeUnknownPath [GOOD] >> TReplicaTest::SyncVersion >> TReplicaTest::Merge >> TReplicaTest::SyncVersion [GOOD] >> TReplicaTest::Unsubscribe ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] Test command err: 2024-11-21T17:46:47.550694Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:6:2053] 2024-11-21T17:46:47.550716Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 2 2024-11-21T17:46:47.550729Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T17:46:47.550734Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:5:2052] Reject handshake from stale populator: sender# [1:6:2053], owner# 1, generation# 1, pending generation# 2 2024-11-21T17:46:47.579727Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:6:2053] 2024-11-21T17:46:47.579750Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.579773Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:7:2054] 2024-11-21T17:46:47.579779Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:47.579798Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T17:46:47.579845Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.579851Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:47.580884Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:47.580959Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 40 2024-11-21T17:46:47.580964Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T17:46:47.580969Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:47.580980Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.580985Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:47.580989Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:47.580996Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.581000Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2024-11-21T17:46:47.581006Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:47.581021Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:8:2055] 2024-11-21T17:46:47.581031Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2024-11-21T17:46:47.605456Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T17:46:47.605490Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.605520Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.605527Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:47.605540Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:47.605554Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.605558Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2024-11-21T17:46:47.605563Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:47.605570Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:47.605577Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 40 2024-11-21T17:46:47.605581Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# true 2024-11-21T17:46:47.605585Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 2] 2024-11-21T17:46:47.605591Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.605595Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:47.605599Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:47.605607Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.605610Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2024-11-21T17:46:47.605614Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 2] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] Test command err: 2024-11-21T17:46:47.298899Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:6:2053] 2024-11-21T17:46:47.298924Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 800, generation# 1 2024-11-21T17:46:47.298943Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:6:2053] 2024-11-21T17:46:47.298948Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Commit generation: owner# 800, generation# 1 2024-11-21T17:46:47.298959Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2024-11-21T17:46:47.298963Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 800, generation# 1 2024-11-21T17:46:47.298968Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2024-11-21T17:46:47.298971Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Commit generation: owner# 800, generation# 1 2024-11-21T17:46:47.299033Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 103 2024-11-21T17:46:47.299040Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T17:46:47.299833Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T17:46:47.299878Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 103 2024-11-21T17:46:47.299883Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T17:46:47.299889Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T17:46:47.299909Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:8:2055] 2024-11-21T17:46:47.299923Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:8:2055], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2024-11-21T17:46:47.305041Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:10:2057] 2024-11-21T17:46:47.305059Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Successful handshake: owner# 800, generation# 1 2024-11-21T17:46:47.305072Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:10:2057] 2024-11-21T17:46:47.305077Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Commit generation: owner# 800, generation# 1 2024-11-21T17:46:47.305085Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:11:2058] 2024-11-21T17:46:47.305091Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Successful handshake: owner# 900, generation# 1 2024-11-21T17:46:47.305097Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:11:2058] 2024-11-21T17:46:47.305100Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Commit generation: owner# 900, generation# 1 2024-11-21T17:46:47.305121Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:10:2057], cookie# 0, event size# 103 2024-11-21T17:46:47.305126Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T17:46:47.305137Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T17:46:47.305149Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:11:2058], cookie# 0, event size# 103 2024-11-21T17:46:47.305152Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Update description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], deletion# false 2024-11-21T17:46:47.305158Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:9:2056] Replace GSS by TSS description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], domainId# [OwnerId: 800, LocalPathId: 2], curPathId# [OwnerId: 800, LocalPathId: 2], curDomainId# [OwnerId: 800, LocalPathId: 2] 2024-11-21T17:46:47.305164Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 900, LocalPathId: 1], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T17:46:47.305176Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:12:2059] 2024-11-21T17:46:47.305185Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:12:2059], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2024-11-21T17:46:47.305232Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:14:2061] 2024-11-21T17:46:47.305236Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:13:2060] Successful handshake: owner# 800, generation# 1 2024-11-21T17:46:47.305242Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:14:2061] 2024-11-21T17:46:47.305245Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:13:2060] Commit generation: owner# 800, generation# 1 2024-11-21T17:46:47.305251Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2024-11-21T17:46:47.305255Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:13:2060] Successful handshake: owner# 800, generation# 1 2024-11-21T17:46:47.305259Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2024-11-21T17:46:47.305264Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:13:2060] Commit generation: owner# 800, generation# 1 2024-11-21T17:46:47.305271Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:14:2061], cookie# 0, event size# 103 2024-11-21T17:46:47.305275Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:13:2060] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T17:46:47.305278Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:13:2060] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T17:46:47.305285Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:15:2062], cookie# 0, event size# 103 2024-11-21T17:46:47.305288Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:13:2060] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T17:46:47.305292Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:13:2060] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 2, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T17:46:47.305301Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:13:2060] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:16:2063] 2024-11-21T17:46:47.305306Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:13:2060] Subscribe: subscriber# [1:16:2063], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2024-11-21T17:46:47.305351Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:17:2064] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:18:2065] 2024-11-21T17:46:47.305355Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:17:2064] Successful handshake: owner# 800, generation# 1 2024-11-21T17:46:47.305360Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:17:2064] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:18:2065] 2024-11-21T17:46:47.305363Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:17:2064] Commit generation: owner# 800, generation# 1 2024-11-21T17:46:47.305369Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:17:2064] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:19:2066] 2024-11-21T17:46:47.305372Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:17:2064] Successful handshake: owner# 900, generation# 1 2024-11-21T17:46:47.305376Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:17:2064] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:19:2066] 2024-11-21T17:46:47.305379Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:17:2064] Commit generation: owner# 900, generation# 1 2024-11-21T17:46:47.305386Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:17:2064] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:18:2065], cookie# 0, event size# 103 2024-11-21T17:46:47.305390Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:17:2064] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T17:46:47.305394Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:17:2064] Upsert description: path# /Root/Tenant, pathId# [Ow ... ble_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2024-11-21T17:46:47.587407Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:393:2440] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:396:2443] 2024-11-21T17:46:47.587410Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:393:2440] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T17:46:47.587414Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:393:2440] Subscribe: subscriber# [2:396:2443], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2024-11-21T17:46:47.587619Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:398:2445] 2024-11-21T17:46:47.587622Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Successful handshake: owner# 910, generation# 1 2024-11-21T17:46:47.587627Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:398:2445] 2024-11-21T17:46:47.587631Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Commit generation: owner# 910, generation# 1 2024-11-21T17:46:47.587636Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2024-11-21T17:46:47.587639Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Successful handshake: owner# 910, generation# 1 2024-11-21T17:46:47.587644Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2024-11-21T17:46:47.587647Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Commit generation: owner# 910, generation# 1 2024-11-21T17:46:47.587654Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:398:2445], cookie# 0, event size# 64 2024-11-21T17:46:47.587658Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2024-11-21T17:46:47.587661Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:397:2444] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2024-11-21T17:46:47.587667Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:399:2446], cookie# 0, event size# 130 2024-11-21T17:46:47.587672Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# false 2024-11-21T17:46:47.587675Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:397:2444] Path was explicitly deleted, ignoring: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2024-11-21T17:46:47.587681Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:397:2444] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:400:2447] 2024-11-21T17:46:47.587685Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:397:2444] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T17:46:47.587689Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:397:2444] Subscribe: subscriber# [2:400:2447], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2024-11-21T17:46:47.587899Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:402:2449] 2024-11-21T17:46:47.587902Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:401:2448] Successful handshake: owner# 910, generation# 1 2024-11-21T17:46:47.587908Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:402:2449] 2024-11-21T17:46:47.587911Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:401:2448] Commit generation: owner# 910, generation# 1 2024-11-21T17:46:47.587916Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2024-11-21T17:46:47.587919Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:401:2448] Successful handshake: owner# 910, generation# 1 2024-11-21T17:46:47.587924Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2024-11-21T17:46:47.587928Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:401:2448] Commit generation: owner# 910, generation# 1 2024-11-21T17:46:47.587934Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:402:2449], cookie# 0, event size# 64 2024-11-21T17:46:47.587937Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:401:2448] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2024-11-21T17:46:47.587940Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:401:2448] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2024-11-21T17:46:47.587946Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:403:2450], cookie# 0, event size# 64 2024-11-21T17:46:47.587950Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:401:2448] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2024-11-21T17:46:47.587955Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:401:2448] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:404:2451] 2024-11-21T17:46:47.587958Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:401:2448] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T17:46:47.587964Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:401:2448] Subscribe: subscriber# [2:404:2451], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2024-11-21T17:46:47.747917Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:6:2053] 2024-11-21T17:46:47.747943Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 800, generation# 1 2024-11-21T17:46:47.747961Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:6:2053] 2024-11-21T17:46:47.747966Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Commit generation: owner# 800, generation# 1 2024-11-21T17:46:47.747975Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:7:2054] 2024-11-21T17:46:47.747981Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 900, generation# 1 2024-11-21T17:46:47.747990Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:7:2054] 2024-11-21T17:46:47.747994Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Commit generation: owner# 900, generation# 1 2024-11-21T17:46:47.748029Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 118 2024-11-21T17:46:47.748035Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2024-11-21T17:46:47.748048Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2024-11-21T17:46:47.748061Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 117 2024-11-21T17:46:47.748065Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2024-11-21T17:46:47.748073Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description by newest path form tenant schemeshard: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], domainId# [OwnerId: 800, LocalPathId: 1], curPathId# [OwnerId: 800, LocalPathId: 1111], curDomainId# [OwnerId: 800, LocalPathId: 1] 2024-11-21T17:46:47.748079Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Delete description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111] 2024-11-21T17:46:47.748085Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2024-11-21T17:46:47.748099Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 0 }: sender# [3:8:2055] 2024-11-21T17:46:47.748114Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:8:2055], path# /root/db/dir_inside, domainOwnerId# 0, capabilities# =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 1111 PathOwnerId: 800 =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 11 PathOwnerId: 900 =========== DomainId: [OwnerId: 800, LocalPathId: 1] IsDeletion: 0 PathId: [OwnerId: 900, LocalPathId: 11] Versions: 1 |76.4%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] >> TReplicaTest::Merge [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Delete [GOOD] Test command err: 2024-11-21T17:46:47.635670Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T17:46:47.635698Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:5:2052] Reject commit from unknown populator: sender# [1:6:2053], owner# 1, generation# 1 2024-11-21T17:46:47.635710Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T17:46:47.635716Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.665841Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 0 }: sender# [2:6:2053] 2024-11-21T17:46:47.665864Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 0 2024-11-21T17:46:47.665875Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2024-11-21T17:46:47.665879Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.665890Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2024-11-21T17:46:47.665894Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Commit generation: owner# 1, generation# 1 2024-11-21T17:46:47.665900Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 0 }: sender# [2:6:2053] 2024-11-21T17:46:47.665904Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:5:2052] Reject commit from stale populator: sender# [2:6:2053], owner# 1, generation# 0, pending generation# 1 2024-11-21T17:46:47.665909Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [2:6:2053] 2024-11-21T17:46:47.665912Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 2 2024-11-21T17:46:47.692562Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T17:46:47.692586Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.692633Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.692641Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# false 2024-11-21T17:46:47.693596Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 42, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:47.693656Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:7:2054] 2024-11-21T17:46:47.693673Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:47.693705Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:8:2055] 2024-11-21T17:46:47.693711Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:8:2055], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T17:46:47.693734Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 40 2024-11-21T17:46:47.693742Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# true 2024-11-21T17:46:47.693746Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Delete description: path# path, pathId# [OwnerId: 42, LocalPathId: 1] 2024-11-21T17:46:47.693791Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2024-11-21T17:46:47.693795Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:47.693805Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:10:2057] 2024-11-21T17:46:47.693810Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:10:2057], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T17:46:47.693819Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:11:2058] 2024-11-21T17:46:47.693822Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:11:2058], path# path, domainOwnerId# 0, capabilities# ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] Test command err: 2024-11-21T17:46:47.412664Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T17:46:47.412692Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.412752Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.412758Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:47.413732Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:47.413771Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:6:2053] 2024-11-21T17:46:47.413784Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:47.413817Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:6:2053] 2024-11-21T17:46:47.413824Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Unsubscribe: subscriber# [1:6:2053], path# path 2024-11-21T17:46:47.413832Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:6:2053] 2024-11-21T17:46:47.413838Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:6:2053], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T17:46:47.413847Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:6:2053] 2024-11-21T17:46:47.413851Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Unsubscribe: subscriber# [1:6:2053], path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:47.645357Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:6:2053] 2024-11-21T17:46:47.645381Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.645416Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.645424Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:47.645436Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:47.645453Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:6:2053] >> TReplicaTest::Unsubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath >> TReplicaTest::UpdateWithoutHandshake >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers [GOOD] >> TReplicaTest::StrongNotificationAfterCommit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleDelete [GOOD] Test command err: 2024-11-21T17:46:47.350426Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T17:46:47.350451Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.563975Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:6:2053] 2024-11-21T17:46:47.564000Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.564044Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.564052Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:47.575846Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:47.575932Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:6:2053] 2024-11-21T17:46:47.575946Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:47.575970Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:6:2053] 2024-11-21T17:46:47.575978Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Unsubscribe: subscriber# [2:6:2053], path# path 2024-11-21T17:46:47.575991Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:6:2053] 2024-11-21T17:46:47.778200Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T17:46:47.778222Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.778242Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:7:2054] 2024-11-21T17:46:47.778247Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path 2024-11-21T17:46:47.778265Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:47.778298Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.778304Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:47.778316Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:47.778370Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 40 2024-11-21T17:46:47.778374Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T17:46:47.778378Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:47.778391Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2024-11-21T17:46:47.778396Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:47.778403Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 40 2024-11-21T17:46:47.778407Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] Test command err: 2024-11-21T17:46:47.694209Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T17:46:47.694226Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.694259Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.694265Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:47.695111Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:47.695149Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:6:2053] 2024-11-21T17:46:47.695160Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:47.695184Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 40 2024-11-21T17:46:47.695189Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T17:46:47.695193Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:47.906670Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:6:2053] 2024-11-21T17:46:47.906693Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path 2024-11-21T17:46:47.906710Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:48.128496Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T17:46:48.128521Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:48.128559Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 76 2024-11-21T17:46:48.128566Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:48.128580Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 100500, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 32} 2024-11-21T17:46:48.128599Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:6:2053] 2024-11-21T17:46:48.128612Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:48.128629Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:6:2053], cookie# 1 >> TReplicaTest::UnsubscribeUnknownPath [GOOD] >> BSCRestartPDisk::RestartNotAllowed >> TReplicaTest::UpdateWithoutHandshake [GOOD] >> TReplicaTest::UpdateWithStaleGeneration >> TReplicaTest::StrongNotificationAfterCommit [GOOD] >> BSCRestartPDisk::RestartOneByOne >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] Test command err: 2024-11-21T17:46:47.651978Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T17:46:47.652002Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.652018Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T17:46:47.652023Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Commit generation: owner# 1, generation# 1 2024-11-21T17:46:47.652030Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:6:2053] 2024-11-21T17:46:47.652033Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 2 2024-11-21T17:46:47.862005Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2024-11-21T17:46:47.862031Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path 2024-11-21T17:46:47.862060Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2024-11-21T17:46:47.862082Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:6:2053] 2024-11-21T17:46:47.862087Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:47.862126Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:47.862133Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:47.863046Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:47.863125Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7:2054] 2024-11-21T17:46:47.863144Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 40 2024-11-21T17:46:47.863149Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T17:46:47.863154Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:47.863162Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [2:7:2054] 2024-11-21T17:46:48.077085Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T17:46:48.077108Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:48.077142Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:48.077149Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:48.077163Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:48.077182Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:7:2054] 2024-11-21T17:46:48.077203Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2024-11-21T17:46:48.077218Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:48.077221Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:48.077226Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 3, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:48.077281Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:48.077286Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2024-11-21T17:46:48.077290Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:48.077300Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path 2024-11-21T17:46:48.077307Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2024-11-21T17:46:48.077312Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:48.077324Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [3:7:2054] |76.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus |76.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeUnknownPath [GOOD] Test command err: 2024-11-21T17:46:48.227377Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T17:46:48.227402Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:48.227421Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2024-11-21T17:46:48.227427Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path 2024-11-21T17:46:48.227448Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:48.227464Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2024-11-21T17:46:48.227468Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:48.227502Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:48.227509Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:48.233385Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:48.233536Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2024-11-21T17:46:48.233546Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Unsubscribe: subscriber# [1:7:2054], path# path 2024-11-21T17:46:48.233570Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 40 2024-11-21T17:46:48.233576Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T17:46:48.233581Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:48.454747Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:6:2053] |76.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |76.5%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |76.5%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |76.5%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::StrongNotificationAfterCommit [GOOD] Test command err: 2024-11-21T17:46:48.182709Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2024-11-21T17:46:48.182731Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path 2024-11-21T17:46:48.182751Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:48.182771Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:8:2055] 2024-11-21T17:46:48.182776Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:48.182781Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T17:46:48.182791Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:6:2053] 2024-11-21T17:46:48.182796Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:48.182832Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:48.182837Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:48.183585Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:48.183666Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 40 2024-11-21T17:46:48.183671Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T17:46:48.183675Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:48.411757Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:6:2053] 2024-11-21T17:46:48.411784Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:48.411803Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:7:2054] 2024-11-21T17:46:48.411809Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:48.411826Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T17:46:48.411860Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:48.411866Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:48.411877Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:48.411908Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 40 2024-11-21T17:46:48.411911Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2024-11-21T17:46:48.411915Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:48.411927Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:7:2054] 2024-11-21T17:46:48.411933Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Unsubscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:48.411942Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:48.411946Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2024-11-21T17:46:48.411949Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:48.411954Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:48.411957Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2024-11-21T17:46:48.411962Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2024-11-21T17:46:48.411970Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:8:2055] 2024-11-21T17:46:48.411974Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2024-11-21T17:46:48.649763Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 1 }: sender# [3:7:2054] 2024-11-21T17:46:48.649787Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Upsert description: path# path 2024-11-21T17:46:48.649805Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:5:2052] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 1, capabilities# 2024-11-21T17:46:48.649832Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T17:46:48.649839Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:48.649850Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [3:6:2053] 2024-11-21T17:46:48.649855Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:5:2052] Commit generation: owner# 1, generation# 1 2024-11-21T17:46:48.649877Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:5:2052] Handle NKikimr::NSchemeBoard::TReplica::TEvPrivate::TEvSendStrongNotifications { Owner: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] Test command err: 2024-11-21T17:46:48.565315Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:48.565341Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:5:2052] Reject update from unknown populator: sender# [1:6:2053], owner# 1, generation# 1 2024-11-21T17:46:48.565360Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:6:2053] 2024-11-21T17:46:48.565365Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# path 2024-11-21T17:46:48.565386Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:48.565410Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:6:2053] 2024-11-21T17:46:48.565417Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Unsubscribe: subscriber# [1:6:2053], path# path 2024-11-21T17:46:48.565426Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:6:2053] 2024-11-21T17:46:48.565431Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:48.565436Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Subscribe: subscriber# [1:6:2053], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T17:46:48.565443Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:6:2053] 2024-11-21T17:46:48.565448Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:5:2052] Unsubscribe: subscriber# [1:6:2053], path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:48.792684Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:6:2053] 2024-11-21T17:46:48.792706Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:5:2052] Successful handshake: owner# 1, generation# 1 2024-11-21T17:46:48.792755Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 0 }: sender# [2:6:2053], cookie# 0, event size# 72 2024-11-21T17:46:48.792762Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:5:2052] Reject update from stale populator: sender# [2:6:2053], owner# 1, generation# 0, pending generation# 1 2024-11-21T17:46:48.792775Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:6:2053] 2024-11-21T17:46:48.792781Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# path 2024-11-21T17:46:48.792798Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:6:2053], path# path, domainOwnerId# 0, capabilities# 2024-11-21T17:46:48.792815Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:6:2053] 2024-11-21T17:46:48.792821Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Unsubscribe: subscriber# [2:6:2053], path# path 2024-11-21T17:46:48.792830Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:6:2053] 2024-11-21T17:46:48.792834Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2024-11-21T17:46:48.792839Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Subscribe: subscriber# [2:6:2053], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2024-11-21T17:46:48.792847Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:5:2052] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:6:2053] 2024-11-21T17:46:48.792852Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:5:2052] Unsubscribe: subscriber# [2:6:2053], path# [OwnerId: 1, LocalPathId: 1] >> THealthCheckTest::SpecificServerless >> THealthCheckTest::Issues100Groups100VCardListing >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] Test command err: RandomSeed# 10467486394819346215 2024-11-21T17:46:49.084479Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084519Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084532Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084544Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084557Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084569Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084581Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084593Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:7:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084775Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.084800Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.084815Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.084830Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.084843Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.084855Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.084868Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.084881Z 8 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.084900Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084908Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084914Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084920Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:7:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084929Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084935Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084940Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.084945Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.085351Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.085373Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.085384Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.085394Z 8 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.085403Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.085414Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.085427Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.085440Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] Test command err: RandomSeed# 13572096557349813222 2024-11-21T17:46:49.216662Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.216706Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.216720Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.216731Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.216744Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.216757Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.216768Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.216949Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.216971Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.216982Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.216996Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.217008Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.217019Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.217030Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.217044Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.217050Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.217056Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.217069Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.217075Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.217080Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.217088Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T17:46:49.217438Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.217455Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.217465Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.217496Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.217504Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.217515Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T17:46:49.217524Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus >> CompressExecutor::TestExecutorMemUsage [GOOD] >> THealthCheckTest::ShardsLimit999 >> THealthCheckTest::Basic >> THealthCheckTest::StaticGroupIssue >> THealthCheckTest::OneIssueListing >> THealthCheckTest::Issues100GroupsListing |76.5%| [TA] $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::StorageLimit95 >> BSCRestartPDisk::RestartOneByOneWithReconnects >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest |76.5%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::YellowGroupIssueOnYellowSpace >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes |76.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |76.5%| [TA] {RESULT} $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/test-results/pytest/{meta.json ... results_accumulator.log} |76.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk >> TMonitoringTests::InvalidActorId >> BSCRestartPDisk::RestartNotAllowed [GOOD] |76.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2024-11-21T17:45:42.241156Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1732211142241138 2024-11-21T17:45:42.552017Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790207256166539:2094];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:42.552504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:42.564622Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790205394017232:2266];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ed6/r3tmp/tmpBqeyuU/pdisk_1.dat 2024-11-21T17:45:42.608496Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:45:42.613822Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:45:42.615136Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:45:42.637807Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:42.652091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:42.652116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:42.658156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20043, node 1 2024-11-21T17:45:42.680185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001ed6/r3tmp/yandexZxP2ET.tmp 2024-11-21T17:45:42.680197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001ed6/r3tmp/yandexZxP2ET.tmp 2024-11-21T17:45:42.686336Z INFO: TTestServer started on Port 16626 GrpcPort 20043 2024-11-21T17:45:42.702599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:42.702621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:16626 2024-11-21T17:45:42.703840Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:45:42.704092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:45:42.706179Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001ed6/r3tmp/yandexZxP2ET.tmp 2024-11-21T17:45:42.706246Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration PQClient connected to localhost:20043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:42.741006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T17:45:42.953408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790205394017305:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:42.953454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790205394017316:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:42.953466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:42.954993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T17:45:42.961199Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:45:42.962293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790205394017320:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T17:45:42.966686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790207256167402:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:42.966903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:42.986445Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790207256167488:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:42.986834Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWQwYjAwYTAtOGM5NTc1NGItZjJjMDk2NjEtODRkOGZlZQ==, ActorId: [1:7439790207256167399:2299], ActorState: ExecuteState, TraceId: 01jd7x8w98976zvv29qc688cq7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:42.987217Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:42.988064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2024-11-21T17:45:43.073927Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790209688984698:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:45:43.074323Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDYxOTE3NDEtZjNjNjg2YWEtOTIyMDdhMDAtOGJlYzExYQ==, ActorId: [2:7439790205394017303:2277], ActorState: ExecuteState, TraceId: 01jd7x8w960xv3v3akv3c4r9b8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:45:43.074462Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:45:43.109327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:45:43.136817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:20043", true, true, 1000); 2024-11-21T17:45:43.241564Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jd7x8whkdh1mxv6r9cy9yk5z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjlmMjcxZDItZDNkODZlMGEtZWMwZDE4ODEtNWNjYzJkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790211551135163:2918] 2024-11-21T17:45:47.552298Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790207256166539:2094];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:47.552339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:45:47.563033Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790205394017232:2266];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:47.563064Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:45:49.249007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720679:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:20043 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:45:49.288781Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status ... d [test-message-group-id] SessionId [test-message-group-id|a4ced193-ed672bcc-867a622e-76473714_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T17:46:48.464322Z node 13 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:46:48.464343Z node 13 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2024-11-21T17:46:48.468157Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T17:46:48.468205Z node 13 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:50942 2024-11-21T17:46:48.468209Z node 13 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:50942 proto=v1 topic=test-topic durationSec=0 2024-11-21T17:46:48.468214Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:46:48.468854Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2024-11-21T17:46:48.468903Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:46:48.468905Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:46:48.468907Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:46:48.468914Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439790489555200633:2544] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:46:48.469483Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439790489555200633:2544] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:46:48.669689Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720701. Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T17:46:48.669756Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7439790489555200646:2546] TxId: 281474976720701. Ctx: { TraceId: 01jd7xaw8n94wk6hmw51j0naz8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NGRkZTJkNmUtYjBjYTkwMTMtMTBiMWE4ZS02M2QyYzVhZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T17:46:48.669913Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NGRkZTJkNmUtYjBjYTkwMTMtMTBiMWE4ZS02M2QyYzVhZQ==, ActorId: [13:7439790489555200634:2546], ActorState: ExecuteState, TraceId: 01jd7xaw8n94wk6hmw51j0naz8, Create QueryResponse for error on request, msg: 2024-11-21T17:46:48.670668Z node 13 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [13:7439790489555200633:2544] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=NGRkZTJkNmUtYjBjYTkwMTMtMTBiMWE4ZS02M2QyYzVhZQ==" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd7xaw8n94wk6hmw55dt6yz9" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2024-11-21T17:46:48.670700Z node 13 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=NGRkZTJkNmUtYjBjYTkwMTMtMTBiMWE4ZS02M2QyYzVhZQ==" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd7xaw8n94wk6hmw55dt6yz9" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2024-11-21T17:46:48.670974Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2024-11-21T17:46:48.673347Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a4ced193-ed672bcc-867a622e-76473714_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=NGRkZTJkNmUtYjBjYTkwMTMtMTBiMWE4ZS02M2QyYzVhZQ==" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd7xaw8n94wk6hmw55dt6yz9" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2024-11-21T17:46:48.673358Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a4ced193-ed672bcc-867a622e-76473714_0] Write session will restart in 2.000000s 2024-11-21T17:46:48.673385Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a4ced193-ed672bcc-867a622e-76473714_0] Write session: Do CDS request 2024-11-21T17:46:48.673391Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a4ced193-ed672bcc-867a622e-76473714_0] Do schedule cds request after 2000 ms 2024-11-21T17:46:48.891037Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720703. Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T17:46:48.891086Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7439790489555200711:2549] TxId: 281474976720703. Ctx: { TraceId: 01jd7xawfc2np42wfhpk92fnwy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDU0NzZlY2UtYmE0ZjNjYWUtNzQ1YzRmNzItMzMyZDdiMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T17:46:48.891219Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZDU0NzZlY2UtYmE0ZjNjYWUtNzQ1YzRmNzItMzMyZDdiMDI=, ActorId: [13:7439790489555200698:2549], ActorState: ExecuteState, TraceId: 01jd7xawfc2np42wfhpk92fnwy, Create QueryResponse for error on request, msg: 2024-11-21T17:46:48.890699Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715684. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:46:48.890854Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7439790490806520170:2469] TxId: 281474976715684. Ctx: { TraceId: 01jd7xawexbtzxntadwc0xvnwe, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZGU2MzhhYjAtOTBjN2JmYjUtYmY1MWNmNGEtMzQzYmVkYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:46:48.891003Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZGU2MzhhYjAtOTBjN2JmYjUtYmY1MWNmNGEtMzQzYmVkYzc=, ActorId: [14:7439790490806520157:2469], ActorState: ExecuteState, TraceId: 01jd7xawexbtzxntadwc0xvnwe, Create QueryResponse for error on request, msg: 2024-11-21T17:46:48.892102Z node 14 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd7xawg5a56ynpxj4t9v4ee2" } } YdbStatus: UNAVAILABLE ConsumedRu: 22 } 2024-11-21T17:46:48.893031Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jd7xawgf9v1mskv93bb0vfym" } } YdbStatus: UNAVAILABLE ConsumedRu: 21 } 2024-11-21T17:46:49.436329Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a4ced193-ed672bcc-867a622e-76473714_0] Write session: close. Timeout = 0 ms 2024-11-21T17:46:49.436346Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a4ced193-ed672bcc-867a622e-76473714_0] Write session will now close 2024-11-21T17:46:49.436357Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a4ced193-ed672bcc-867a622e-76473714_0] Write session: aborting 2024-11-21T17:46:49.436656Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a4ced193-ed672bcc-867a622e-76473714_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T17:46:49.438910Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a4ced193-ed672bcc-867a622e-76473714_0] Write session: destroy 2024-11-21T17:46:49.517165Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720705. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:46:49.517236Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7439790493850168099:2556] TxId: 281474976720705. Ctx: { TraceId: 01jd7xax3n0hfy068d93k3y002, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NTVkNmJlZDUtNTU4ODk3ZC00MTMyNTY2Yy0yMzhiNTcyMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:46:49.517384Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NTVkNmJlZDUtNTU4ODk3ZC00MTMyNTY2Yy0yMzhiNTcyMg==, ActorId: [13:7439790493850168096:2556], ActorState: ExecuteState, TraceId: 01jd7xax3n0hfy068d93k3y002, Create QueryResponse for error on request, msg: 2024-11-21T17:46:49.518123Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd7xax3n0hfy068d94nknemm" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2024-11-21T17:46:49.528697Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715686. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:46:49.528750Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7439790495101487550:2476] TxId: 281474976715686. Ctx: { TraceId: 01jd7xax40fmgf2m02m3cjd5gb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=YWRhMmE1NDctMzRmOTg3ODItNTI4MjA2NzEtZmNkODc2YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:46:49.528881Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=YWRhMmE1NDctMzRmOTg3ODItNTI4MjA2NzEtZmNkODc2YWE=, ActorId: [14:7439790495101487547:2476], ActorState: ExecuteState, TraceId: 01jd7xax40fmgf2m02m3cjd5gb, Create QueryResponse for error on request, msg: 2024-11-21T17:46:49.529602Z node 14 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd7xax43702yeahx8tryjck0" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> TMonitoringTests::InvalidActorId [GOOD] |76.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |76.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks |76.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut |76.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::RedGroupIssueOnRedSpace >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] Test command err: RandomSeed# 4264392326803118682 |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::StorageLimit87 |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageNoQuota >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes >> THealthCheckTest::YellowGroupIssueOnYellowSpace [GOOD] >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks >> TNodeBrokerTest::NodeNameWithDifferentTenants >> TDynamicNameserverTest::BasicFunctionality |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> TKqpScanData::UnboxedValueSize [GOOD] >> THealthCheckTest::RedGroupIssueOnRedSpace [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsLimit905 >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageLimit80 >> KqpOlapBlobsSharing::MultipleSchemaVersions [FAIL] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THealthCheckTest::ShardsNoLimit |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] |76.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |76.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |76.6%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::UnboxedValueSize [GOOD] >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead |76.6%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks [GOOD] >> THealthCheckTest::TestTabletIsDead >> VDiskBalancing::TestRandom_Block42 [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::DifferentNumberOfInputAndResultColumns |76.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |76.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Block42 [GOOD] Test command err: RandomSeed# 7196708768897608188 Step = 0 SEND TEvPut with key [1:1:0:0:0:585447:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:585447:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:619381:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:619381:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:725585:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:725585:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:2934723:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:2934723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2024-11-21T17:46:20.495710Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Step = 6 SEND TEvPut with key [1:1:6:0:0:3044947:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:3044947:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Stop node 7 2024-11-21T17:46:20.527775Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 7 SEND TEvPut with key [1:1:7:0:0:582354:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:582354:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 8 SEND TEvPut with key [1:1:8:0:0:1478820:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:1478820:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 9 SEND TEvPut with key [1:1:9:0:0:1360774:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:1360774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Start node 4 Step = 10 SEND TEvPut with key [1:1:10:0:0:1727870:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:1727870:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 11 SEND TEvPut with key [1:1:11:0:0:1883457:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:1883457:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 12 SEND TEvPut with key [1:1:12:0:0:568368:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:568368:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 13 SEND TEvPut with key [1:1:13:0:0:896600:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:896600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 15 SEND TEvPut with key [1:1:15:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 16 SEND TEvPut with key [1:1:16:0:0:670396:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:670396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 17 SEND TEvPut with key [1:1:17:0:0:1584741:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:1584741:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 18 SEND TEvPut with key [1:1:18:0:0:2384818:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:2384818:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 19 SEND TEvPut with key [1:1:19:0:0:2867010:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:2867010:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 20 SEND TEvPut with key [1:1:20:0:0:2911789:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:2911789:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 21 SEND TEvPut with key [1:1:21:0:0:2463622:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:2463622:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 23 SEND TEvPut with key [1:1:23:0:0:2119770:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:2119770:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 25 SEND TEvPut with key [1:1:25:0:0:2648607:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:2648607:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Stop node 0 2024-11-21T17:46:20.715799Z 3 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:185:16] ServerId# [1:284:55] TabletId# 72057594037932033 PipeClientId# [3:185:16] 2024-11-21T17:46:20.715860Z 6 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:203:16] ServerId# [1:287:58] TabletId# 72057594037932033 PipeClientId# [6:203:16] 2024-11-21T17:46:20.715880Z 5 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:7636:15] ServerId# [1:7645:1088] TabletId# 72057594037932033 PipeClientId# [5:7636:15] 2024-11-21T17:46:20.715892Z 4 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:191:16] ServerId# [1:285:56] TabletId# 72057594037932033 PipeClientId# [4:191:16] 2024-11-21T17:46:20.715907Z 2 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:179:16] ServerId# [1:283:54] TabletId# 72057594037932033 PipeClientId# [2:179:16] 2024-11-21T17:46:20.715920Z 7 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:209:16] ServerId# [1:288:59] TabletId# 72057594037932033 PipeClientId# [7:209:16] Step = 26 SEND TEvPut with key [1:1:26:0:0:539431:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:539431:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 28 SEND TEvPut with key [1:1:28:0:0:2673563:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:2673563:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 30 SEND TEvPut with key [1:1:30:0:0:2398732:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:2398732:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Compact vdisk 2 Step = 31 SEND TEvPut with key [1:1:31:0:0:2302132:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:2302132:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 32 SEND TEvPut with key [1:1:32:0:0:3112269:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:3112269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 33 SEND TEvPut with key [1:1:33:0:0:883758:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:883758:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 34 SEND TEvPut with key [1:1:34:0:0:1212958:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:1212958:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 35 SEND TEvPut with key [1:1:35:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 38 SEND TEvPut with key [1:1:38:0:0:1252178:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:1252178:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 39 SEND TEvPut with key [1:1:39:0:0:1897783:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:1897783:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 40 SEND TEvPut with key [1:1:40:0:0:1486678:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:1486678:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 41 SEND TEvPut with key [1:1:41:0:0:1285964:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:1285964:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 42 SEND TEvPut with key [1:1:42:0:0:1221731:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:1221731:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 43 SEND TEvPut with key [1:1:43:0:0:1613844:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:1613844:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 44 SEND TEvPut with key [1:1:44:0:0:2582908:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:2582908:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 45 SEND TEvPut with key [1:1:45:0:0:1703743:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:1703743:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 46 SEND TEvPut with key [1:1:46:0:0:1362981:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:1362981:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 47 SEND TEvPut with key [1:1:47:0:0:1469807:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:1469807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 48 SEND TEvPut with key [1:1:48:0:0:2832565:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:2832565:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 49 SEND TEvPut with key [1:1:49:0:0:1960611:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:1960611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 50 SEND TEvPut with key [1:1:50:0:0:1164230:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:1164230:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 51 SEND TEvPut with key [1:1:51:0:0:836900:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:836900:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 52 SEND TEvPut with key [1:1:52:0:0:838380:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:838380:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 53 SEND TEvPut with key [1:1:53:0:0:1975575:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:1975575:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Start node 0 Step = 54 SEND TEvPut with key [1:1:54:0:0:1888556:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:1888556:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 55 SEND TEvPut with key [1:1:55:0:0:715063:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:715063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 57 SEND TEvPut with key [1:1:57:0:0:1491407:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:1491407:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 58 SEND TEvPut with key [1:1:58:0:0:702845:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:702845:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 59 SEND TEvPut with key [1:1:59:0:0:2539948:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:2539948:0] Status ... 36 SEND TEvPut with key [1:1:936:0:0:2748248:0] TEvPutResult: TEvPutResult {Id# [1:1:936:0:0:2748248:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 937 SEND TEvPut with key [1:1:937:0:0:112302:0] TEvPutResult: TEvPutResult {Id# [1:1:937:0:0:112302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 938 SEND TEvPut with key [1:1:938:0:0:800417:0] TEvPutResult: TEvPutResult {Id# [1:1:938:0:0:800417:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 939 SEND TEvPut with key [1:1:939:0:0:2336442:0] TEvPutResult: TEvPutResult {Id# [1:1:939:0:0:2336442:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 940 SEND TEvPut with key [1:1:940:0:0:982070:0] TEvPutResult: TEvPutResult {Id# [1:1:940:0:0:982070:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Start node 4 Step = 941 SEND TEvPut with key [1:1:941:0:0:713632:0] TEvPutResult: TEvPutResult {Id# [1:1:941:0:0:713632:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 942 SEND TEvPut with key [1:1:942:0:0:1644191:0] TEvPutResult: TEvPutResult {Id# [1:1:942:0:0:1644191:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 943 SEND TEvPut with key [1:1:943:0:0:254634:0] TEvPutResult: TEvPutResult {Id# [1:1:943:0:0:254634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 944 SEND TEvPut with key [1:1:944:0:0:1141270:0] TEvPutResult: TEvPutResult {Id# [1:1:944:0:0:1141270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 945 SEND TEvPut with key [1:1:945:0:0:610103:0] TEvPutResult: TEvPutResult {Id# [1:1:945:0:0:610103:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Compact vdisk 6 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 948 SEND TEvPut with key [1:1:948:0:0:645630:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:645630:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 949 SEND TEvPut with key [1:1:949:0:0:2125890:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:2125890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 950 SEND TEvPut with key [1:1:950:0:0:2544891:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:2544891:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 951 SEND TEvPut with key [1:1:951:0:0:647007:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:647007:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 952 SEND TEvPut with key [1:1:952:0:0:2031652:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:2031652:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 953 SEND TEvPut with key [1:1:953:0:0:2109805:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:2109805:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Stop node 3 2024-11-21T17:46:44.396402Z 1 00h28m30.986344s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:1353403:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:1353403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 4 2024-11-21T17:46:44.460013Z 1 00h28m40.987880s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 955 SEND TEvPut with key [1:1:955:0:0:1286278:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:1286278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Start node 3 Step = 956 SEND TEvPut with key [1:1:956:0:0:1875483:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:1875483:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 957 SEND TEvPut with key [1:1:957:0:0:1021388:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:1021388:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Start node 4 Step = 958 SEND TEvPut with key [1:1:958:0:0:860806:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:860806:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 961 SEND TEvPut with key [1:1:961:0:0:1661659:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:1661659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 962 SEND TEvPut with key [1:1:962:0:0:771410:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:771410:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 963 SEND TEvPut with key [1:1:963:0:0:1414281:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:1414281:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 964 SEND TEvPut with key [1:1:964:0:0:2848837:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:2848837:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 965 SEND TEvPut with key [1:1:965:0:0:989600:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:989600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 966 SEND TEvPut with key [1:1:966:0:0:2761296:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:2761296:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 967 SEND TEvPut with key [1:1:967:0:0:981163:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:981163:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 969 SEND TEvPut with key [1:1:969:0:0:626285:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:626285:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Stop node 7 2024-11-21T17:46:44.836143Z 1 00h29m10.989416s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 971 SEND TEvPut with key [1:1:971:0:0:972888:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:972888:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 972 SEND TEvPut with key [1:1:972:0:0:786055:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:786055:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 973 SEND TEvPut with key [1:1:973:0:0:2707502:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:2707502:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Stop node 1 2024-11-21T17:46:44.903098Z 1 00h29m21.015120s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:2660812:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:2660812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Start node 1 Step = 975 SEND TEvPut with key [1:1:975:0:0:3005283:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:3005283:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Stop node 1 2024-11-21T17:46:45.015133Z 1 00h29m41.016144s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:1542748:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:1542748:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 977 SEND TEvPut with key [1:1:977:0:0:2837300:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:2837300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 980 SEND TEvPut with key [1:1:980:0:0:1760402:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:1760402:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 981 SEND TEvPut with key [1:1:981:0:0:1711812:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:1711812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 982 SEND TEvPut with key [1:1:982:0:0:1422922:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:1422922:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 983 SEND TEvPut with key [1:1:983:0:0:2533122:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:2533122:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 985 SEND TEvPut with key [1:1:985:0:0:1862506:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:1862506:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 987 SEND TEvPut with key [1:1:987:0:0:672278:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:672278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 988 SEND TEvPut with key [1:1:988:0:0:2042425:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:2042425:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 989 SEND TEvPut with key [1:1:989:0:0:1201477:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:1201477:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 990 SEND TEvPut with key [1:1:990:0:0:1724337:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:1724337:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 991 SEND TEvPut with key [1:1:991:0:0:2174403:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:2174403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 993 SEND TEvPut with key [1:1:993:0:0:618508:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:618508:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 994 SEND TEvPut with key [1:1:994:0:0:2278246:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:2278246:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 995 SEND TEvPut with key [1:1:995:0:0:2001881:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:2001881:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 996 SEND TEvPut with key [1:1:996:0:0:1759634:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:1759634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 997 SEND TEvPut with key [1:1:997:0:0:2469234:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:2469234:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 998 SEND TEvPut with key [1:1:998:0:0:1329395:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:1329395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Step = 999 SEND TEvPut with key [1:1:999:0:0:1243807:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:1243807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999609} Starting nodes Start compaction 1 Start checking ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: 2024-11-21T17:46:42.559774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:42.560295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:42.560323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0020f0/r3tmp/tmpvhtoC1/pdisk_1.dat 2024-11-21T17:46:42.659696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:42.660661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:46:42.666239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:42.666495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:46:42.666831Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2024-11-21T17:46:42.666842Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 1 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2024-11-21T17:46:42.680143Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:42.680764Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2024-11-21T17:46:42.723307Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:307:2347] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2024-11-21T17:46:42.723351Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2024-11-21T17:46:42.723379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:42.723386Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T17:46:42.723390Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T17:46:42.723396Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T17:46:42.723399Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T17:46:42.723411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:42.723464Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T17:46:42.723470Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T17:46:42.723474Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2024-11-21T17:46:42.723479Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T17:46:42.723508Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2024-11-21T17:46:42.733777Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2024-11-21T17:46:42.733809Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:307:2347]) 2024-11-21T17:46:42.733825Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T17:46:42.734006Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2024-11-21T17:46:42.734025Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:307:2347])::Execute 2024-11-21T17:46:42.734031Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T17:46:42.734046Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:307:2347])::Complete 2024-11-21T17:46:42.734091Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 202797666304 } 2024-11-21T17:46:42.734098Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2024-11-21T17:46:42.734105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:42.734170Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2024-11-21T17:46:42.734181Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T17:46:42.734185Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T17:46:42.734215Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T17:46:42.734220Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T17:46:42.734225Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2024-11-21T17:46:42.734247Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T17:46:42.744540Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2024-11-21T17:46:42.744568Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T17:46:42.839971Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2024-11-21T17:46:42.840016Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T17:46:42.840119Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2024-11-21T17:46:42.840218Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2024-11-21T17:46:42.840243Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:380:2375] Proxy 2024-11-21T17:46:42.840449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:46:42.840716Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T17:46:42.840731Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T17:46:42.840735Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2024-11-21T17:46:42.840739Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2024-11-21T17:46:42.840867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:46:42.840879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:46:42.841031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T17:46:42.841555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:42.841817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:46:42.841825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:42.841944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2024-11-21T17:46:42.842409Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2024-11-21T17:46:42.843647Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2024-11-21T17:46:42.843667Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2024-11-21T17:46:42.844249Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2024-11-21T17:46:42.844264Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2024-11-21T17:46:42.844274Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2024-11-21T17:46:42.844302Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2024-11-21T17:46:42.844415Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T17:46:42.844446Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2024-11-21T17:46:42.844582Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2024-11-21T17:46:42.844660Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2024-11-21T17:46:42.844679Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{94000701932640}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2024-11-21T17:46:42.844691Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{94000701932640}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2024-11-21T17:46:42.844724Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{94000701932640}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2024-11-21T17:46:42.844735Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{94000701932640}: tablet 72075186224037888 channel 2 assigned to group 2181038080 ... 2:4 2024-11-21T17:46:53.500036Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715667 at tablet 72075186224037889 2024-11-21T17:46:53.500088Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:53.500125Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:53.500136Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:53.500140Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:53.500144Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2024-11-21T17:46:53.510840Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:53.510902Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:53.511391Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 HANDLE EvProposeTransaction marker# C0 2024-11-21T17:46:53.511412Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 step# 32501 Status# 16 SEND to# [2:379:2374] Proxy marker# C1 2024-11-21T17:46:53.626636Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715667 has been planned 2024-11-21T17:46:53.626671Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715667 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T17:46:53.626677Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715667 for mediator 72057594046382081 tablet 72075186224037889 2024-11-21T17:46:53.626751Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 33500 in 0.500000s at 33.450000s 2024-11-21T17:46:53.626869Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 33000, txid# 281474976715667 marker# C2 2024-11-21T17:46:53.626900Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 stepId# 33000 Status# 17 SEND EvProposeTransactionStatus to# [2:379:2374] Proxy 2024-11-21T17:46:53.627022Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 33000, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:46:53.627170Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715667 at step 33000 at tablet 72075186224037889 { Transactions { TxId: 281474976715667 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 33000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T17:46:53.627180Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:46:53.627250Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:53.627258Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:53.627267Z node 2 :TX_DATASHARD DEBUG: Found ready operation [33000:281474976715667] in PlanQueue unit at 72075186224037889 2024-11-21T17:46:53.627320Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 33000:281474976715667 keys extracted: 0 2024-11-21T17:46:53.627348Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:53.627368Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:46:53.627384Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2024-11-21T17:46:53.627471Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:53.627875Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 33000 txid# 281474976715667} 2024-11-21T17:46:53.627886Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 33000} 2024-11-21T17:46:53.627894Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:53.627934Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T17:46:53.627947Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T17:46:53.627952Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2024-11-21T17:46:53.627956Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 acknowledged 2024-11-21T17:46:53.627960Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 acknowledged 2024-11-21T17:46:53.628012Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:46:53.628029Z node 2 :TX_DATASHARD DEBUG: Complete [33000 : 281474976715667] from 72075186224037889 at tablet 72075186224037889 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:46:53.628040Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715667 state PreOffline TxInFly 0 2024-11-21T17:46:53.628056Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:46:53.628080Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715667, done: 0, blocked: 1 2024-11-21T17:46:53.628695Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715667 datashard 72075186224037889 state PreOffline 2024-11-21T17:46:53.628710Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T17:46:53.628820Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:0 2024-11-21T17:46:53.628840Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715667, publications: 2, subscribers: 1 2024-11-21T17:46:53.628985Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715667, subscribers: 1 2024-11-21T17:46:53.629076Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:46:53.629340Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=M2Y5NjAyYzgtNjZjOWU4YzctNDY5YjI4MmQtNjlkNjc1Mjg= 2024-11-21 17:46:53.629 INFO ydb-core-tx-datashard-ut_minstep(pid=279828, tid=0x00007F6A0054BBC0) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2024-11-21T17:46:53.629369Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=M2Y5NjAyYzgtNjZjOWU4YzctNDY5YjI4MmQtNjlkNjc1Mjg= 2024-11-21 17:46:53.629 INFO ydb-core-tx-datashard-ut_minstep(pid=279828, tid=0x00007F6A0054BBC0) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2024-11-21T17:46:53.629382Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=M2Y5NjAyYzgtNjZjOWU4YzctNDY5YjI4MmQtNjlkNjc1Mjg= 2024-11-21 17:46:53.629 INFO ydb-core-tx-datashard-ut_minstep(pid=279828, tid=0x00007F6A0054BBC0) [core exec] yql_execution.cpp:59: Begin, root #43 2024-11-21T17:46:53.629389Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=M2Y5NjAyYzgtNjZjOWU4YzctNDY5YjI4MmQtNjlkNjc1Mjg= 2024-11-21 17:46:53.629 INFO ydb-core-tx-datashard-ut_minstep(pid=279828, tid=0x00007F6A0054BBC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2024-11-21T17:46:53.629397Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=M2Y5NjAyYzgtNjZjOWU4YzctNDY5YjI4MmQtNjlkNjc1Mjg= 2024-11-21 17:46:53.629 TRACE ydb-core-tx-datashard-ut_minstep(pid=279828, tid=0x00007F6A0054BBC0) [core exec] yql_execution.cpp:387: {0}, callable #43 2024-11-21T17:46:53.629409Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=M2Y5NjAyYzgtNjZjOWU4YzctNDY5YjI4MmQtNjlkNjc1Mjg= 2024-11-21 17:46:53.629 INFO ydb-core-tx-datashard-ut_minstep(pid=279828, tid=0x00007F6A0054BBC0) [core exec] yql_execution.cpp:577: Node #43 finished execution 2024-11-21T17:46:53.629426Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=M2Y5NjAyYzgtNjZjOWU4YzctNDY5YjI4MmQtNjlkNjc1Mjg= 2024-11-21 17:46:53.629 INFO ydb-core-tx-datashard-ut_minstep(pid=279828, tid=0x00007F6A0054BBC0) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2024-11-21T17:46:53.629432Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=M2Y5NjAyYzgtNjZjOWU4YzctNDY5YjI4MmQtNjlkNjc1Mjg= 2024-11-21 17:46:53.629 INFO ydb-core-tx-datashard-ut_minstep(pid=279828, tid=0x00007F6A0054BBC0) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2024-11-21T17:46:53.629437Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=M2Y5NjAyYzgtNjZjOWU4YzctNDY5YjI4MmQtNjlkNjc1Mjg= 2024-11-21 17:46:53.629 INFO ydb-core-tx-datashard-ut_minstep(pid=279828, tid=0x00007F6A0054BBC0) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2024-11-21T17:46:53.629462Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=M2Y5NjAyYzgtNjZjOWU4YzctNDY5YjI4MmQtNjlkNjc1Mjg= 2024-11-21 17:46:53.629 NOTE ydb-core-tx-datashard-ut_minstep(pid=279828, tid=0x00007F6A0054BBC0) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2024-11-21T17:46:53.629468Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=M2Y5NjAyYzgtNjZjOWU4YzctNDY5YjI4MmQtNjlkNjc1Mjg= 2024-11-21 17:46:53.629 NOTE ydb-core-tx-datashard-ut_minstep(pid=279828, tid=0x00007F6A0054BBC0) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2024-11-21T17:46:53.629473Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=M2Y5NjAyYzgtNjZjOWU4YzctNDY5YjI4MmQtNjlkNjc1Mjg= 2024-11-21 17:46:53.629 NOTE ydb-core-tx-datashard-ut_minstep(pid=279828, tid=0x00007F6A0054BBC0) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2024-11-21T17:46:53.640438Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T17:46:53.640520Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2024-11-21T17:46:53.640879Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T17:46:53.641054Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2024-11-21T17:46:53.641132Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2024-11-21T17:46:53.641141Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2024-11-21T17:46:53.641160Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2024-11-21T17:46:53.641178Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2024-11-21T17:46:53.641193Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> ReadOnlyVDisk::TestGarbageCollect >> TErasureTypeTest::TestAllSpecies2of2 [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations >> ReadOnlyVDisk::TestWrites >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsLimit800 >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes >> TDynamicNameserverTest::BasicFunctionality [GOOD] >> THealthCheckTest::ShardsNoLimit [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] |76.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpecies2of2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2024-11-21T17:46:52.689732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:46:52.689752Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:52.696964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 2024-11-21T17:46:52.702932Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:52.703530Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:52.703597Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:52.703601Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:52.703607Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:46:52.703888Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:46:52.704992Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:46:52.705003Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:52.705006Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:52.705009Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:52.705025Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:46:52.705047Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:46:52.705063Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.028000Z - 1970-01-01T01:00:00.028000Z - 1970-01-01T02:00:00.028000Z 2024-11-21T17:46:52.705068Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.028000Z - 1970-01-01T01:00:00.028000Z - 1970-01-01T02:00:00.028000Z 2024-11-21T17:46:52.705949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:46:52.741012Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:46:52.741056Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.028000Z 2024-11-21T17:46:52.741062Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:46:52.741072Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:52.741121Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:616:2252], Recipient [1:543:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:52.741469Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:613:2250], Recipient [1:543:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:46:52.741478Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:52.741509Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.028000Z - 1970-01-01T01:00:00.028000Z - 1970-01-01T02:00:00.028000Z 2024-11-21T17:46:52.741593Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:629:2257], Recipient [1:543:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:52.741638Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:613:2250], Recipient [1:543:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:46:52.741645Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:46:52.741654Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:46:52.742314Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:46:52.742344Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:46:52.742371Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:630:2187], Recipient [1:543:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.742375Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.742380Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:52.742383Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:52.742398Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:46:52.742403Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:46:52.742507Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:19001 to database resolvehost=host1 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T17:46:52.742544Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:46:52.753475Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:46:52.753509Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:19001 2024-11-21T17:46:52.753516Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:46:52.753521Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:19001 to epoch cache 2024-11-21T17:46:52.753578Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-0" } 2024-11-21T17:46:52.753587Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:52.753704Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:642:2263], Recipient [1:543:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:52.753731Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:613:2250], Recipient [1:543:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:46:52.753736Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:46:52.753744Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:46:52.753826Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:46:52.753839Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:46:52.753852Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:643:2187], Recipient [1:543:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.753856Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.753860Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:52.753863Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:52.753874Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:46:52.753878Z node 1 :NODE_BROKER DEBUG: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:46:52.753903Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:19001 to database resolvehost=host2 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2024-11-21T17:46:52.753939Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T17:46:52.764957Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:46:52.764987Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:19001 2024-11-21T17:46:52.764994Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T17:46:52.764999Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:19001 to epoch cache 2024-11-21T17:46:52.765061Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-1" } 2024-11-21T17:46:52.765075Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:52.765216Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:648:2268], Recipient [1:543:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:52.765245Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:613:2250], Recipient [1:543:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/yet-another-database" } 2024-11-21T17:46:52.765252Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:46:52.765262Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/yet-another-database" 2024-11-21T17:46:52.765644Z node 1 :NODE_BROKER TRACE: Handle TEvTx ... NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.765696Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.765701Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:52.765704Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:52.765719Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:46:52.765724Z node 1 :NODE_BROKER DEBUG: Registration request from host3:19001 (not fixed) tenant: /dc-1/yet-another-database 2024-11-21T17:46:52.765753Z node 1 :NODE_BROKER DEBUG: Adding node #1026 host3:19001 to database resolvehost=host3 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:3 slotindex=0 authorizedbycertificate=false 2024-11-21T17:46:52.765816Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=4 2024-11-21T17:46:52.778705Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:46:52.778740Z node 1 :NODE_BROKER DEBUG: Added node #1026 host3:19001 2024-11-21T17:46:52.778747Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 3 to 4 2024-11-21T17:46:52.778752Z node 1 :NODE_BROKER DEBUG: Add node #1026 host3:19001 to epoch cache 2024-11-21T17:46:52.778814Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1026 Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-0" } 2024-11-21T17:46:52.778824Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:52.778961Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:661:2274], Recipient [1:543:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:52.778988Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:613:2250], Recipient [1:543:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/yet-another-database" } 2024-11-21T17:46:52.778995Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:46:52.779007Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/yet-another-database" 2024-11-21T17:46:52.779105Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/yet-another-database TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 3] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:46:52.779118Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/yet-another-database": scope id# <72057594046678944:3>: serviced subdomain# 72057594046678944:3 2024-11-21T17:46:52.779132Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:662:2187], Recipient [1:543:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.779136Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.779141Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:52.779144Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:52.779157Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:46:52.779161Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/yet-another-database 2024-11-21T17:46:52.779194Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:19001 to database resolvehost=host1 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:3 slotindex=1 authorizedbycertificate=false 2024-11-21T17:46:52.792856Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:46:52.792947Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-1" } 2024-11-21T17:46:52.792959Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:52.793129Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:667:2279], Recipient [1:543:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:52.793158Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:613:2250], Recipient [1:543:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:46:52.793165Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:46:52.793177Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:46:52.793290Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:46:52.793305Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:46:52.793319Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:668:2187], Recipient [1:543:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.793323Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.793328Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:52.793332Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:52.793347Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:46:52.793351Z node 1 :NODE_BROKER DEBUG: Registration request from host4:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:46:52.793386Z node 1 :NODE_BROKER DEBUG: Adding node #1027 host4:19001 to database resolvehost=host4 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T17:46:52.793434Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=5 2024-11-21T17:46:52.804516Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:46:52.804544Z node 1 :NODE_BROKER DEBUG: Added node #1027 host4:19001 2024-11-21T17:46:52.804553Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 4 to 5 2024-11-21T17:46:52.804557Z node 1 :NODE_BROKER DEBUG: Add node #1027 host4:19001 to epoch cache 2024-11-21T17:46:52.804642Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-0" } 2024-11-21T17:46:52.804651Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:52.804859Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:673:2284], Recipient [1:543:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:52.804900Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:613:2250], Recipient [1:543:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:46:52.804913Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:46:52.804927Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:46:52.805065Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:46:52.805094Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:46:52.805119Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:674:2187], Recipient [1:543:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.805127Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.805135Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:52.805141Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:52.805162Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:46:52.805170Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:46:52.805210Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:19001 to database resolvehost=host1 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=2 authorizedbycertificate=false 2024-11-21T17:46:52.819626Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:46:52.819703Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-2" } 2024-11-21T17:46:52.819714Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] >> BasicUsage::CloseWriteSessionImmediately [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality [GOOD] Test command err: 2024-11-21T17:46:52.844994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:46:52.845018Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:52.850146Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:52.850593Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:52.850677Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:52.850685Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:52.850697Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:46:52.850861Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:52.851551Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:46:52.851561Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:52.851566Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:52.851569Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:52.851618Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:46:52.851668Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:46:52.851687Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:46:52.851694Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:46:52.884049Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:46:52.884091Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2024-11-21T17:46:52.884097Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:46:52.884109Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:52.894525Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:575:2205], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:52.894949Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039946, Sender [1:528:2178], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigRequest { Config { EpochDuration: 10000000 } } 2024-11-21T17:46:52.894965Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvSetConfigRequest 2024-11-21T17:46:52.894972Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:52.894976Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:52.894997Z node 1 :NODE_BROKER DEBUG: TTxUpdateConfig Execute Config { EpochDuration: 10000000 } 2024-11-21T17:46:52.895020Z node 1 :NODE_BROKER DEBUG: Update config in database config=EpochDuration: 10000000 2024-11-21T17:46:52.905996Z node 1 :NODE_BROKER DEBUG: TTxUpdateConfig Complete 2024-11-21T17:46:52.906022Z node 1 :NODE_BROKER ERROR: Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2024-11-21T17:46:52.906057Z node 1 :NODE_BROKER TRACE: TTxUpdateConfig reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigResponse { Status { Code: OK } } 2024-11-21T17:46:52.906065Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:52.906165Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:579:2209], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:52.906198Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:528:2178], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:46:52.906203Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:46:52.906212Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:46:52.906977Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:46:52.907002Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:46:52.907021Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:580:2184], Recipient [1:539:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.907025Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:52.907031Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:52.907034Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:52.907050Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:46:52.907054Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T17:46:52.907144Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T17:46:52.907187Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:46:52.918057Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:46:52.918078Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T17:46:52.918084Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:46:52.918087Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T17:46:52.918151Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } 2024-11-21T17:46:52.918159Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:52.918290Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:593:2214], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:52.918308Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:591:2072], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2024-11-21T17:46:52.918313Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:46:52.918325Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } } 2024-11-21T17:46:52.918369Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877764, Sender [1:593:2214], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:52.918406Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:597:2215], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:52.918414Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:595:2072], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2024-11-21T17:46:52.918417Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:46:52.918422Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T17:46:52.918435Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877764, Sender [1:597:2215], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:52.918463Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:599:2217], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:52.918473Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:46:52.918476Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:52.918484Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:46:53.011796Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:624:2218], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:53.011880Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2024-11-21T17:46:53.011887Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.011903Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:46:53.012359Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:625:2219], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:53.012400Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:626:2220], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:53.012433Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:627:2221], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:53.012454Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:629:2223], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:53.012495Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:626:2220] 2024-11-21T17:46:53.012499Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.012506Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:46:53.012520Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:628:2222], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:53.012545Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:631:2225], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:53.012562Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, S ... RACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:46:53.106878Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:46:53.106890Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:639:2184], Recipient [1:539:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:53.106897Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:46:53.106901Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:53.106905Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:53.106917Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:46:53.106922Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T17:46:53.106947Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:1001 to database resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:05:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2024-11-21T17:46:53.106981Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=4 2024-11-21T17:46:53.118850Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:46:53.118894Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:1001 2024-11-21T17:46:53.118902Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 3 to 4 2024-11-21T17:46:53.118906Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:1001 to epoch cache 2024-11-21T17:46:53.118964Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7500024000 Name: "slot-1" } 2024-11-21T17:46:53.118972Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:53.119071Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 3 } 2024-11-21T17:46:53.119078Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.119098Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z 2024-11-21T17:46:53.119165Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 4 } 2024-11-21T17:46:53.119169Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.119174Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z 2024-11-21T17:46:53.119342Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:645:2237], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:53.119361Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:46:53.119364Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.119368Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z 2024-11-21T17:46:53.243785Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:539:2184], Recipient [1:539:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:46:53.243813Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:46:53.243820Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:46:53.243826Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:46:53.243850Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:46:53.243862Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #3.5 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z - 1970-01-01T02:10:00.024000Z 2024-11-21T17:46:53.274721Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 3 } 2024-11-21T17:46:53.274749Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.274757Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T17:46:53.274863Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:625:2219] 2024-11-21T17:46:53.274868Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.274872Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T17:46:53.274878Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:626:2220] 2024-11-21T17:46:53.274881Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.274884Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T17:46:53.274889Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:627:2221] 2024-11-21T17:46:53.274892Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.274895Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T17:46:53.274901Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:628:2222] 2024-11-21T17:46:53.274904Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.274907Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T17:46:53.274912Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:629:2223] 2024-11-21T17:46:53.274915Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.274918Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T17:46:53.274923Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:630:2224] 2024-11-21T17:46:53.274926Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.274929Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T17:46:53.274935Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:631:2225] 2024-11-21T17:46:53.274938Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.274941Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T17:46:53.286105Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:46:53.286140Z node 1 :NODE_BROKER DEBUG: Node #1024 host1:1001 has expired 2024-11-21T17:46:53.286153Z node 1 :NODE_BROKER DEBUG: Move to new epoch #3.5 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z - 1970-01-01T02:10:00.024000Z 2024-11-21T17:46:53.286171Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:05:00.024000Z 2024-11-21T17:46:53.286176Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #3 nodes=1 expired=1 2024-11-21T17:46:53.286215Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z - 1970-01-01T02:10:00.024000Z 2024-11-21T17:46:53.286224Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z - 1970-01-01T02:10:00.024000Z 2024-11-21T17:46:53.286232Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z - 1970-01-01T02:10:00.024000Z 2024-11-21T17:46:53.286237Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z - 1970-01-01T02:10:00.024000Z 2024-11-21T17:46:53.286243Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z - 1970-01-01T02:10:00.024000Z 2024-11-21T17:46:53.286249Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z - 1970-01-01T02:10:00.024000Z 2024-11-21T17:46:53.286255Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z - 1970-01-01T02:10:00.024000Z 2024-11-21T17:46:53.286261Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z - 1970-01-01T02:10:00.024000Z 2024-11-21T17:46:53.286268Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:46:53.286575Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:662:2243], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:53.286612Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:528:2178], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:46:53.286617Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.286622Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z - 1970-01-01T02:10:00.024000Z 2024-11-21T17:46:53.286703Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2024-11-21T17:46:53.286708Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:46:53.286712Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.024000Z - 1970-01-01T02:05:00.024000Z - 1970-01-01T02:10:00.024000Z 2024-11-21T17:46:53.286828Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:666:2244], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:53.286845Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:664:2072], Recipient [1:539:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1057 } 2024-11-21T17:46:53.286849Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:46:53.286866Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T17:46:53.286890Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877764, Sender [1:666:2244], Recipient [1:539:2184]: NKikimr::TEvTabletPipe::TEvServerDisconnected |76.6%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: 2024-11-21T17:46:50.026032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:50.026085Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:50.026098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001af3/r3tmp/tmpEE0ZHj/pdisk_1.dat 2024-11-21T17:46:50.122681Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18128, node 1 TClient is connected to server localhost:32663 2024-11-21T17:46:50.251836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:50.251852Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:50.251857Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:50.251978Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD database_status { name: "/Root/serverless" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "1-1-55" overall: GREEN pdisk { id: "1-1" overall: GREEN } } } } } compute { overall: GREEN nodes { id: "2" overall: GREEN load { overall: GREEN cores: 64 } } } } location { id: 1 host: "::1" port: 12001 } 2024-11-21T17:46:51.213173Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:453:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:51.213227Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:51.213240Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001af3/r3tmp/tmpztJET2/pdisk_1.dat 2024-11-21T17:46:51.380621Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12755, node 3 TClient is connected to server localhost:8702 2024-11-21T17:46:51.552556Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:51.552572Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:51.552576Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:51.552659Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD database_status { name: "/Root/serverless" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "3-1-55" overall: GREEN pdisk { id: "3-1" overall: GREEN } } } } } compute { overall: GREEN nodes { id: "5" overall: GREEN load { overall: GREEN cores: 64 } } } } location { id: 3 host: "::1" port: 12001 } 2024-11-21T17:46:52.663895Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.663950Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.663974Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001af3/r3tmp/tmpY9iKjv/pdisk_1.dat 2024-11-21T17:46:52.760883Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9069, node 6 TClient is connected to server localhost:27401 2024-11-21T17:46:52.866340Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:52.866360Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:52.866364Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:52.866428Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-f65b-f489" status: RED message: "Database has compute issues" location { database { name: "/Root/serverless" } } reason: "RED-f65b-7469" type: "DATABASE" level: 1 } issue_log { id: "RED-f65b-7469" status: RED message: "There are no compute nodes" location { database { name: "/Root/serverless" } } type: "COMPUTE" level: 2 } database_status { name: "/Root/serverless" overall: RED storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "6-1-55" overall: GREEN pdisk { id: "6-1" overall: GREEN } } } } } compute { overall: RED } } database_status { name: "/Root" overall: GREEN storage { overall: GREEN pools { id: "static" overall: GREEN groups { id: "0" overall: GREEN } } } compute { overall: GREEN nodes { id: "6" overall: GREEN load { overall: GREEN cores: 64 } } } } database_status { name: "/Root/shared" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN } } } compute { overall: GREEN nodes { id: "7" overall: GREY } } } location { id: 6 host: "::1" port: 12001 } 2024-11-21T17:46:54.030174Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:635:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.030243Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:54.030281Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:54.030431Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:633:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.030499Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:54.030515Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001af3/r3tmp/tmpb7bTxZ/pdisk_1.dat 2024-11-21T17:46:54.138553Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4442, node 8 TClient is connected to server localhost:22598 2024-11-21T17:46:54.268222Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:54.268268Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:54.268272Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:54.268380Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-8" reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 >> ReadOnlyVDisk::TestWrites [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleSchemaVersions [FAIL] Test command err: Trying to start YDB, gRPC: 10878, MsgBus: 20730 2024-11-21T17:46:08.502205Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790317124499228:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:08.502265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e9b/r3tmp/tmpXRjrxV/pdisk_1.dat 2024-11-21T17:46:08.552162Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10878, node 1 2024-11-21T17:46:08.562519Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:08.562530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:08.562531Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:08.562556Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20730 TClient is connected to server localhost:20730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:08.602861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:08.602892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:08.603895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:08.603973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:08.690008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:09.271789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790321419470074:2297];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.271843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790321419470074:2297];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.271885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790321419470074:2297];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.271906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790321419470074:2297];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.271919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790321419470074:2297];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.271936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790321419470074:2297];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.271955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790321419470074:2297];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.271975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790321419470074:2297];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.271994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790321419470074:2297];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.272013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790321419470074:2297];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.272031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790321419470074:2297];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.272063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790321419470074:2297];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.273274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7439790321419470242:2301];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.273295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7439790321419470242:2301];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.273320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7439790321419470242:2301];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.273338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7439790321419470242:2301];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.273349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7439790321419470242:2301];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.273364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7439790321419470242:2301];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.273375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7439790321419470242:2301];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.273386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7439790321419470242:2301];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.273404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7439790321419470242:2301];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.273434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7439790321419470242:2301];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.273450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7439790321419470242:2301];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.273466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038894;self_id=[1:7439790321419470242:2301];tablet_id=72075186224038894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.274007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7439790321419470346:2332];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.274025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7439790321419470346:2332];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.274042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7439790321419470346:2332];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.274054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7439790321419470346:2332];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.274064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7439790321419470346:2332];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.274081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7439790321419470346:2332];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.274095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7439790321419470346:2332];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.274112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:7439790321419470346:2332];tablet_id=72075186224038884;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.274129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038884;self_id=[1:74397903214194703 ... zation_start;last_saved_id=16; 2024-11-21T17:46:49.776977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038453;self_id=[1:7439790493218246673:12498];tablet_id=72075186224038453;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.776981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038648;self_id=[1:7439790493218247088:12606];tablet_id=72075186224038648;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.780623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038682;self_id=[1:7439790493218246623:12492];tablet_id=72075186224038682;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.783094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038455;self_id=[1:7439790493218246588:12484];tablet_id=72075186224038455;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.784594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038795;self_id=[1:7439790493218246808:12525];tablet_id=72075186224038795;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.788816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[1:7439790493218247228:12632];tablet_id=72075186224038049;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.788817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038129;self_id=[1:7439790493218246903:12550];tablet_id=72075186224038129;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.792273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038579;self_id=[1:7439790493218246793:12523];tablet_id=72075186224038579;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.792999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038545;self_id=[1:7439790493218246728:12507];tablet_id=72075186224038545;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.795585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038160;self_id=[1:7439790493218246717:12506];tablet_id=72075186224038160;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.796450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038246;self_id=[1:7439790493218246965:12571];tablet_id=72075186224038246;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.798289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;self_id=[1:7439790493218246480:12451];tablet_id=72075186224038695;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.800029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038664;self_id=[1:7439790493218246902:12549];tablet_id=72075186224038664;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.801402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038901;self_id=[1:7439790493218247161:12615];tablet_id=72075186224038901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.804987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038593;self_id=[1:7439790493218246744:12509];tablet_id=72075186224038593;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.805319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038268;self_id=[1:7439790493218247405:12680];tablet_id=72075186224038268;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.808502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038101;self_id=[1:7439790493218246794:12524];tablet_id=72075186224038101;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.808857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038337;self_id=[1:7439790493218246867:12541];tablet_id=72075186224038337;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.811816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038191;self_id=[1:7439790493218246755:12510];tablet_id=72075186224038191;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.812144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038312;self_id=[1:7439790493218247042:12589];tablet_id=72075186224038312;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.816100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038349;self_id=[1:7439790493218246964:12570];tablet_id=72075186224038349;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.816204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038118;self_id=[1:7439790493218246997:12573];tablet_id=72075186224038118;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.819693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038546;self_id=[1:7439790493218247039:12588];tablet_id=72075186224038546;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.819751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038427;self_id=[1:7439790493218247099:12607];tablet_id=72075186224038427;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.823523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038271;self_id=[1:7439790493218247145:12613];tablet_id=72075186224038271;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.823733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038398;self_id=[1:7439790493218246674:12499];tablet_id=72075186224038398;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.827806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038753;self_id=[1:7439790493218247343:12665];tablet_id=72075186224038753;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.827958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038445;self_id=[1:7439790493218247123:12612];tablet_id=72075186224038445;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.831756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038423;self_id=[1:7439790493218247320:12661];tablet_id=72075186224038423;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.831818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038159;self_id=[1:7439790493218247104:12608];tablet_id=72075186224038159;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.834751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037956;self_id=[1:7439790493218247197:12622];tablet_id=72075186224037956;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.835694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038697;self_id=[1:7439790493218246858:12540];tablet_id=72075186224038697;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.838336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038239;self_id=[1:7439790493218247318:12659];tablet_id=72075186224038239;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.840595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038829;self_id=[1:7439790493218247268:12642];tablet_id=72075186224038829;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.841077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[1:7439790493218246966:12572];tablet_id=72075186224038074;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.843662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038678;self_id=[1:7439790493218247182:12621];tablet_id=72075186224038678;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.844819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038237;self_id=[1:7439790493218247265:12641];tablet_id=72075186224038237;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.846163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038758;self_id=[1:7439790493218247466:12694];tablet_id=72075186224038758;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.849515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038471;self_id=[1:7439790493218247453:12690];tablet_id=72075186224038471;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.853326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038528;self_id=[1:7439790493218247319:12660];tablet_id=72075186224038528;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.855463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[1:7439790493218247222:12631];tablet_id=72075186224038032;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.857161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038683;self_id=[1:7439790493218247398:12676];tablet_id=72075186224038683;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:49.861718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038403;self_id=[1:7439790493218247388:12675];tablet_id=72075186224038403;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:46:50.010575Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211209001, txId: 18446744073709551615] shutting down [[2u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:552, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[2u]]|[[3u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x127C3768 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:552: CompareYson @ 0x2415B5B9 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x1246EC34 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x12464399 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:545: Execute_ @ 0x12464399 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x1246A9F6 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x127C571D 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x1246A3B9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x127C5E92 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x127D90AC 10. ??:0: ?? @ 0x7F9B8D8ABD8F 11. ??:0: ?? @ 0x7F9B8D8ABE3F 12. ??:0: ?? @ 0x1179D028 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] Test command err: 2024-11-21T17:46:50.052361Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790496992310326:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:50.077543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001a47/r3tmp/tmp23iPOE/pdisk_1.dat 2024-11-21T17:46:50.107055Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7056, node 1 2024-11-21T17:46:50.132411Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:50.132425Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:50.132428Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:50.132459Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:46:50.177174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:50.177203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:50.179315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:50.180108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:50.182507Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:46:50.656718Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790500743308462:2128];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:50.659916Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001a47/r3tmp/tmpDPNK4A/pdisk_1.dat 2024-11-21T17:46:50.690303Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8654, node 2 2024-11-21T17:46:50.724431Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:50.724445Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:50.724447Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:50.724485Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:50.776674Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:50.776709Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:46:50.777043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:50.777906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:50.779363Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:52.145422Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:635:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.145509Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.145533Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:52.145628Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:633:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.145683Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.145700Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001a47/r3tmp/tmpmowLfN/pdisk_1.dat 2024-11-21T17:46:52.247105Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29936, node 3 TClient is connected to server localhost:28386 2024-11-21T17:46:52.380617Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:52.380638Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:52.380643Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:52.380703Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:53.315722Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:53.315837Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:53.315884Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:53.316028Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:53.316087Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:53.316140Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001a47/r3tmp/tmpi4NdEo/pdisk_1.dat 2024-11-21T17:46:53.404559Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7074, node 5 TClient is connected to server localhost:23693 2024-11-21T17:46:53.528983Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:53.529006Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:53.529010Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:53.529144Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2024-11-21T17:46:54.336889Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:451:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.336948Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:54.336965Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001a47/r3tmp/tmpjDP5n9/pdisk_1.dat 2024-11-21T17:46:54.422666Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32741, node 7 TClient is connected to server localhost:4056 2024-11-21T17:46:54.533768Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:54.533788Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:54.533793Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:54.534056Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD database_status { name: "/Root/serverless" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "7-1-55" overall: GREEN pdisk { id: "7-1" overall: GREEN } } } } } compute { overall: GREEN nodes { id: "9" overall: GREEN load { overall: GREEN cores: 64 } } } } database_status { name: "/Root" overall: GREEN storage { overall: GREEN pools { id: "static" overall: GREEN groups { id: "0" overall: GREEN } } } compute { overall: GREEN nodes { id: "7" overall: GREEN load { overall: GREEN cores: 64 } } } } database_status { name: "/Root/shared" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN } } } compute { overall: GREEN nodes { id: "8" overall: GREEN load { overall: GREEN cores: 64 } } } } location { id: 7 host: "::1" port: 12001 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2024-11-21T17:46:39.867039Z :BasicWriteSession INFO: Random seed for debugging is 1732211199867033 2024-11-21T17:46:39.980825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790453478607336:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:39.980883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:39.985497Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790452546343680:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e17/r3tmp/tmp4sTKKP/pdisk_1.dat 2024-11-21T17:46:40.018470Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:40.028463Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:40.029692Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:40.049174Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19757, node 1 2024-11-21T17:46:40.062717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e17/r3tmp/yandexSuTN96.tmp 2024-11-21T17:46:40.062731Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e17/r3tmp/yandexSuTN96.tmp 2024-11-21T17:46:40.062799Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e17/r3tmp/yandexSuTN96.tmp 2024-11-21T17:46:40.062837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:40.066993Z INFO: TTestServer started on Port 19636 GrpcPort 19757 TClient is connected to server localhost:19636 PQClient connected to localhost:19757 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:46:40.080904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:40.080932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:46:40.084747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:40.119303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:40.120002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:40.120024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:46:40.120990Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:40.121361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:40.124411Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:46:40.287218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790457773575550:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.287248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.287376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790457773575577:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.288091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:40.295574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790457773575579:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:46:40.327908Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790456841311137:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:40.328027Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmIyYmZiN2QtODE3Yjc4NGMtZWMyZWNkM2UtMTMzNGIxMzg=, ActorId: [2:7439790456841311097:2277], ActorState: ExecuteState, TraceId: 01jd7xam97br67mch6v1nezn3f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:40.328218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:40.328503Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:40.393596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:40.395798Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790457773575802:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:40.395893Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTUyYWM1YWEtMzk3ZWVjMjctMjQwZmNiZmMtOWExMDIzYmI=, ActorId: [1:7439790457773575547:2299], ActorState: ExecuteState, TraceId: 01jd7xam8y3tt3chqbebvtdx0b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:40.396016Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:40.457389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:19757", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T17:46:40.487905Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xameqaswbwhz16q1wnwz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2I2MzM2ZWYtOTY1ZmNiNGEtZTE0MjgyMjktNTFmMDg1OTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790457773576065:2941] 2024-11-21T17:46:44.981154Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790453478607336:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:44.981187Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:44.985420Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790452546343680:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:44.985468Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:46:45.582745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:19757 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:45.614734Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:19757 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true Read ... eedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:7976 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:54.026970Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:7976 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:54.529860Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2024-11-21T17:46:54.539395Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2024-11-21T17:46:54.539650Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2024-11-21T17:46:54.539657Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:7976 2024-11-21T17:46:54.540582Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T17:46:54.540996Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:46:54.541014Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T17:46:54.544568Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T17:46:54.544619Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:56588 2024-11-21T17:46:54.544624Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:56588 proto=v1 topic=test-topic durationSec=0 2024-11-21T17:46:54.544629Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:46:54.545126Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T17:46:54.545164Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:46:54.545172Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:46:54.545174Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:46:54.545180Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790517587013774:2464] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:46:54.546466Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790517587013774:2464] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:46:54.566812Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790517587013774:2464] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T17:46:54.566882Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790517587013804:2464] connected; active server actors: 1 2024-11-21T17:46:54.566892Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790517587013774:2464] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T17:46:54.566895Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790517587013774:2464] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T17:46:54.566940Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790517587013804:2464] disconnected; active server actors: 1 2024-11-21T17:46:54.566945Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790517587013804:2464] disconnected no session 2024-11-21T17:46:54.580581Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790517587013774:2464] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:46:54.580597Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790517587013774:2464] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T17:46:54.580600Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790517587013774:2464] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T17:46:54.580609Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:46:54.581097Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:54.581120Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439790517587013821:2464], now have 1 active actors on pipe 2024-11-21T17:46:54.581137Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T17:46:54.581234Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:54.581254Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:54.581309Z node 4 :PERSQUEUE INFO: new Cookie src|ce7e8d87-8d06faf3-eef89862-de614d58_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T17:46:54.581351Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:46:54.581396Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:46:54.581587Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:54.581596Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:54.581620Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:46:54.581688Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|ce7e8d87-8d06faf3-eef89862-de614d58_0 2024-11-21T17:46:54.582037Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211214582 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:54.582063Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|ce7e8d87-8d06faf3-eef89862-de614d58_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T17:46:54.582156Z :INFO: [] MessageGroupId [src] SessionId [src|ce7e8d87-8d06faf3-eef89862-de614d58_0] Write session: close. Timeout = 0 ms 2024-11-21T17:46:54.582167Z :INFO: [] MessageGroupId [src] SessionId [src|ce7e8d87-8d06faf3-eef89862-de614d58_0] Write session will now close 2024-11-21T17:46:54.582178Z :DEBUG: [] MessageGroupId [src] SessionId [src|ce7e8d87-8d06faf3-eef89862-de614d58_0] Write session: aborting 2024-11-21T17:46:54.582349Z :INFO: [] MessageGroupId [src] SessionId [src|ce7e8d87-8d06faf3-eef89862-de614d58_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:46:54.582358Z :DEBUG: [] MessageGroupId [src] SessionId [src|ce7e8d87-8d06faf3-eef89862-de614d58_0] Write session: destroy 2024-11-21T17:46:54.583032Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|ce7e8d87-8d06faf3-eef89862-de614d58_0 grpc read done: success: 0 data: 2024-11-21T17:46:54.583041Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ce7e8d87-8d06faf3-eef89862-de614d58_0 grpc read failed 2024-11-21T17:46:54.583047Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ce7e8d87-8d06faf3-eef89862-de614d58_0 grpc closed 2024-11-21T17:46:54.583053Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ce7e8d87-8d06faf3-eef89862-de614d58_0 is DEAD 2024-11-21T17:46:54.583353Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:46:54.583499Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:54.583527Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439790517587013821:2464] destroyed 2024-11-21T17:46:54.583549Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >> ReadSessionImplTest::SuccessfulInit >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 10419695376910221702 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2024-11-21T17:46:54.598234Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2024-11-21T17:46:54.600958Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2024-11-21T17:46:54.603817Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T17:46:54.604460Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2024-11-21T17:46:54.606121Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2024-11-21T17:46:54.606596Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2024-11-21T17:46:54.607202Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2024-11-21T17:46:54.607673Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2024-11-21T17:46:54.771635Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T17:46:54.771670Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:54.771703Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T17:46:54.771897Z 1 00h03m30.111536s :BS_PROXY_PUT ERROR: [361c969a7f9afab5] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2024-11-21T17:46:54.772280Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T17:46:54.772312Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T17:46:54.772517Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2024-11-21T17:46:54.772846Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T17:46:54.772980Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T17:46:54.773120Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:14:0:0:131072:0] 2024-11-21T17:46:54.773323Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:54.773639Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T17:46:54.773734Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:15:0:0:32768:0] 2024-11-21T17:46:54.773895Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:54.773906Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T17:46:54.774061Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:16:0:0:131072:0] 2024-11-21T17:46:54.774381Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:54.774394Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T17:46:54.774620Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2024-11-21T17:46:54.774847Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T17:46:54.774874Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:54.774883Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2024-11-21T17:46:54.775238Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T17:46:54.775271Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T17:46:54.775286Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2024-11-21T17:46:54.775663Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T17:46:54.775691Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:54.775705Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2024-11-21T17:46:54.776123Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T17:46:54.776136Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:54.776153Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] ... 2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2024-11-21T17:46:55.093609Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:55.093646Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2024-11-21T17:46:55.094581Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T17:46:55.094876Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2024-11-21T17:46:55.095548Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2024-11-21T17:46:55.096068Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:55.096081Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2024-11-21T17:46:55.096564Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:55.096576Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2024-11-21T17:46:55.097130Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:55.097141Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2024-11-21T17:46:55.097592Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T17:46:55.097640Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2024-11-21T17:46:55.098186Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:55.098202Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2024-11-21T17:46:55.098638Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T17:46:55.098657Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} >> ReadOnlyVDisk::TestGarbageCollect [GOOD] >> THealthCheckTest::TestTabletIsDead [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead |76.7%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> THealthCheckTest::ShardsLimit800 [GOOD] >> THealthCheckTest::StorageLimit50 [GOOD] >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2024-11-21T17:46:55.502680Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.502686Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.502689Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.502780Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.502939Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.504366Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.504455Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.504779Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.504783Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.504786Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.504844Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.504914Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.504947Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.504974Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.505023Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:46:55.505207Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.505210Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.505213Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.505254Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.505341Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.505359Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.505381Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.505565Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.505655Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:55.505688Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.505695Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T17:46:55.505916Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.505919Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.505923Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.505976Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.506070Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.506099Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.506122Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 11 Compressed message data size: 31 2024-11-21T17:46:55.506361Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:46:55.506387Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T17:46:55.506433Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T17:46:55.506445Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T17:46:55.506464Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.506469Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:46:55.506474Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:46:55.506504Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2024-11-21T17:46:55.506511Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:46:55.506514Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T17:46:55.506517Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:46:55.506532Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2024-11-21T17:46:55.506545Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T17:46:55.506548Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T17:46:55.506551Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:46:55.506564Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2024-11-21T17:46:55.506569Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T17:46:55.506572Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T17:46:55.506576Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:46:55.506588Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2024-11-21T17:46:55.506802Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.506806Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.506808Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.506858Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.506918Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.506952Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.506976Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T17:46:55.507093Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:46:55.507115Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T17:46:55.507147Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T17:46:55.507158Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T17:46:55.507177Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.507181Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:46:55.507185Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:46:55.507188Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T17:46:55.507192Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:46:55.507231Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2024-11-21T17:46:55.507241Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T17:46:55.507244Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T17:46:55.507248Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T17:46:55.507251Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T17:46:55.507254Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:46:55.507272Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2024-11-21T17:46:55.507695Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.507699Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.507701Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.507756Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.507825Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.507860Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.507887Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.507973Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:46:55.508008Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:46:55.508037Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2024-11-21T17:46:55.508047Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:46:55.508065Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.508070Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:46:55.508073Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2024-11-21T17:46:55.508077Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2024-11-21T17:46:55.508082Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2024-11-21T17:46:55.508085Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T17:46:55.508104Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T17:46:55.508122Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] >> THealthCheckTest::NoStoragePools >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2024-11-21T17:46:39.215208Z :FallbackToSingleDb INFO: Random seed for debugging is 1732211199215198 2024-11-21T17:46:39.325293Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790449742486980:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:39.325341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:39.327330Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790449732015894:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:39.327661Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:39.354165Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e26/r3tmp/tmpYnogbP/pdisk_1.dat 2024-11-21T17:46:39.364362Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:39.382392Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20710, node 1 2024-11-21T17:46:39.402879Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e26/r3tmp/yandexGlFBEK.tmp 2024-11-21T17:46:39.402895Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e26/r3tmp/yandexGlFBEK.tmp 2024-11-21T17:46:39.402947Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e26/r3tmp/yandexGlFBEK.tmp 2024-11-21T17:46:39.402982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:39.408148Z INFO: TTestServer started on Port 6059 GrpcPort 20710 TClient is connected to server localhost:6059 PQClient connected to localhost:20710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:39.453963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:39.454006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:39.455297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:39.455322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:39.457032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:39.458931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.460677Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:39.461026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T17:46:39.651259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790449742487733:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.651295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.651381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790449742487760:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.652578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790449742487773:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.652606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.653011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:39.658067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790449742487762:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T17:46:39.682251Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790449732016202:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:39.682385Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmJiMTQwZmEtMmI1ZjUzY2ItZWIyYzhlZDEtYzljZDZmODE=, ActorId: [2:7439790449732016162:2277], ActorState: ExecuteState, TraceId: 01jd7xaknw37hkb2bg5sxv8hp5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:39.682773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.682996Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:39.755936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.756078Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790449742487955:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:39.756439Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTIzNGZmNDQtZTkwNDFkMjUtYTc2MTI4NDctN2U3NjM0OTE=, ActorId: [1:7439790449742487730:2299], ActorState: ExecuteState, TraceId: 01jd7xakn23n0vx0smag5p6gz6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:39.756571Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:39.840539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:20710", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T17:46:39.953147Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd7xakxr2g41v4c1zmchryaf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRhNDNmNzMtZTY4MmQxMzItOWQ0OWZjYmMtZjIyMDgzMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790449742488232:2939] 2024-11-21T17:46:44.325385Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790449742486980:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:44.325411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:44.327533Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790449732015894:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:44.327580Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:46:45.059068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:20710 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:45.085470Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQue ... Id; 2024-11-21T17:46:53.909275Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:46:53.909277Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:46:53.909283Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511183696192:2469] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:46:53.909857Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511183696192:2469] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:46:53.935536Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511183696192:2469] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T17:46:53.935937Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790511183696224:2469] connected; active server actors: 1 2024-11-21T17:46:53.936052Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511183696192:2469] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T17:46:53.936060Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511183696192:2469] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T17:46:53.938681Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790511183696224:2469] disconnected; active server actors: 1 2024-11-21T17:46:53.938686Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790511183696224:2469] disconnected no session 2024-11-21T17:46:53.949859Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511183696192:2469] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:46:53.949879Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511183696192:2469] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T17:46:53.949883Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511183696192:2469] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T17:46:53.949892Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:46:53.950507Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:53.950524Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439790511183696241:2469], now have 1 active actors on pipe 2024-11-21T17:46:53.957703Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:53.957726Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:53.957766Z node 4 :PERSQUEUE INFO: new Cookie src|5c636ed8-ce6c71a-e435036d-f9db34c1_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T17:46:53.957796Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:46:53.957816Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:46:53.958277Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:53.958283Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:53.958308Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:46:53.956677Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T17:46:53.958473Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|5c636ed8-ce6c71a-e435036d-f9db34c1_0 2024-11-21T17:46:53.959592Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211213959 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:53.959629Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|5c636ed8-ce6c71a-e435036d-f9db34c1_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T17:46:53.959729Z :INFO: [] MessageGroupId [src] SessionId [src|5c636ed8-ce6c71a-e435036d-f9db34c1_0] Write session: close. Timeout = 0 ms 2024-11-21T17:46:53.959734Z :INFO: [] MessageGroupId [src] SessionId [src|5c636ed8-ce6c71a-e435036d-f9db34c1_0] Write session will now close 2024-11-21T17:46:53.959740Z :DEBUG: [] MessageGroupId [src] SessionId [src|5c636ed8-ce6c71a-e435036d-f9db34c1_0] Write session: aborting 2024-11-21T17:46:53.959829Z :INFO: [] MessageGroupId [src] SessionId [src|5c636ed8-ce6c71a-e435036d-f9db34c1_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:46:53.959834Z :DEBUG: [] MessageGroupId [src] SessionId [src|5c636ed8-ce6c71a-e435036d-f9db34c1_0] Write session: destroy 2024-11-21T17:46:53.960556Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:53.960576Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439790511183696241:2469] destroyed 2024-11-21T17:46:53.960588Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:46:53.960134Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|5c636ed8-ce6c71a-e435036d-f9db34c1_0 grpc read done: success: 0 data: 2024-11-21T17:46:53.960142Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|5c636ed8-ce6c71a-e435036d-f9db34c1_0 grpc read failed 2024-11-21T17:46:53.960148Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|5c636ed8-ce6c71a-e435036d-f9db34c1_0 grpc closed 2024-11-21T17:46:53.960154Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|5c636ed8-ce6c71a-e435036d-f9db34c1_0 is DEAD 2024-11-21T17:46:53.960348Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison PORTS 15577 20101 Session was created >>> Ready to answer: ok 2024-11-21T17:46:54.979957Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2024-11-21T17:46:54.979995Z :INFO: [/Root] [] [f1b4a613-5cd71934-3827225c-372d7b27] Open read subsessions to databases: { name: , endpoint: localhost:20101, path: /Root } 2024-11-21T17:46:54.980055Z :INFO: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] Starting read session 2024-11-21T17:46:54.980059Z :DEBUG: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] Starting single session 2024-11-21T17:46:54.980350Z :DEBUG: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2024-11-21T17:46:54.980357Z :DEBUG: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2024-11-21T17:46:54.980363Z :DEBUG: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] [] Reconnecting session to cluster in 0.000000s 2024-11-21T17:46:54.980409Z :ERROR: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:20101
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:20101. 2024-11-21T17:46:54.980417Z :DEBUG: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2024-11-21T17:46:54.980419Z :DEBUG: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2024-11-21T17:46:54.980436Z :INFO: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:20101" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:20101
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:20101. " } 2024-11-21T17:46:54.980763Z :NOTICE: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:46:54.980776Z :DEBUG: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:20101" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:20101
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:20101. " } 2024-11-21T17:46:54.980800Z :INFO: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] Closing read session. Close timeout: 0.010000s 2024-11-21T17:46:54.980809Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:46:54.980817Z :INFO: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:54.980824Z :INFO: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] Closing read session. Close timeout: 0.000000s 2024-11-21T17:46:54.980826Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:46:54.980828Z :INFO: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:54.980831Z :INFO: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] Closing read session. Close timeout: 0.000000s 2024-11-21T17:46:54.980835Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:46:54.980837Z :INFO: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:54.980841Z :NOTICE: [/Root] [/Root] [cc206cd5-55da821b-61f30cb4-6a23558d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 1587856288268696167 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2024-11-21T17:46:54.479925Z 1 00h01m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T17:46:54.479994Z 2 00h01m40.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:6] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2024-11-21T17:46:54.480019Z 8 00h01m40.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:4] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2024-11-21T17:46:54.480034Z 7 00h01m40.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:3] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2024-11-21T17:46:54.480078Z 6 00h01m40.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:2] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2024-11-21T17:46:54.480157Z 5 00h01m40.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:1] barrier# {Soft# {Gen# 1 Step# 2} Hard# } 2024-11-21T17:46:54.480325Z 3 00h01m40.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:2:0:0:131072:5] barrier# {Soft# {Gen# 1 Step# 2} Hard# } TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2024-11-21T17:46:54.482114Z 1 00h01m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2024-11-21T17:46:54.610380Z 1 00h03m20.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T17:46:54.610488Z 8 00h03m20.210512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:5] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2024-11-21T17:46:54.610513Z 7 00h03m20.210512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:4] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2024-11-21T17:46:54.610537Z 6 00h03m20.210512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:3] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2024-11-21T17:46:54.610573Z 5 00h03m20.210512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:2] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2024-11-21T17:46:54.610594Z 4 00h03m20.210512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:1] barrier# {Soft# {Gen# 1 Step# 3} Hard# } 2024-11-21T17:46:54.610857Z 2 00h03m20.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T17:46:54.611074Z 3 00h03m20.210512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:3:0:0:32768:6] barrier# {Soft# {Gen# 1 Step# 3} Hard# } TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2024-11-21T17:46:54.682539Z 1 00h04m20.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T17:46:54.682585Z 2 00h04m20.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T17:46:54.733531Z 1 00h05m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T17:46:54.733660Z 8 00h05m00.310512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:5] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T17:46:54.733719Z 7 00h05m00.310512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:4] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T17:46:54.733764Z 6 00h05m00.310512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:3] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T17:46:54.733812Z 5 00h05m00.310512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:2] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T17:46:54.733852Z 4 00h05m00.310512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:1] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T17:46:54.733952Z 2 00h05m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T17:46:54.734206Z 3 00h05m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T17:46:54.734243Z 1 00h05m00.310512s :BS_PROXY_PUT ERROR: [0743e65f5d206666] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} 2024-11-21T17:46:54.799955Z 1 00h06m00.311024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T17:46:54.800003Z 2 00h06m00.311024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T17:46:54.800013Z 3 00h06m00.311024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2024-11-21T17:46:54.906395Z 1 00h07m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T17:46:54.906444Z 2 00h07m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T17:46:54.906454Z 3 00h07m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T17:46:54.906463Z 4 00h07m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2024-11-21T17:46:54.948810Z 1 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T17:46:54.948864Z 2 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T17:46:54.948873Z 3 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T17:46:54.948881Z 4 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] 2024-11-21T17:46:54.948889Z 5 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2024-11-21T17:46:54.988543Z 1 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T17:46:54.988597Z 2 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T17:46:54.988606Z 3 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T17:46:54.988614Z 4 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] 2024-11-21T17:46:54.988622Z 5 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] 2024-11-21T17:46:54.988630Z 6 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2024-11-21T17:46:55.023958Z 1 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T17:46:55.024014Z 2 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T17:46:55.024027Z 3 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T17:46:55.024036Z 4 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] 2024-11-21T17:46:55.024045Z 5 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] 2024-11-21T17:46:55.024055Z 6 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] 2024-11-21T17:46:55.024064Z 7 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2024-11-21T17:46:55.062829Z 2 00h10m20.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T17:46:55.062859Z 3 00h10m20.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T17:46:55.062870Z 4 00h10m20.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] 2024-11-21T17:46:55.062879Z 5 00h10m20.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] 2024-11-21T17:46:55.062888Z 6 00h10m20.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] 2024-11-21T17:46:55.062897Z 7 00h10m20.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2024-11-21T17:46:55.103918Z 3 00h11m00.660512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T17:46:55.103945Z 4 00h11m00.660512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] 2024-11-21T17:46:55.103955Z 5 00h11m00.660512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] 2024-11-21T17:46:55.103963Z 6 00h11m00.660512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] 2024-11-21T17:46:55.103972Z 7 00h11m00.660512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2024-11-21T17:46:55.149670Z 4 00h11m40.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5306:715] 2024-11-21T17:46:55.149694Z 5 00h11m40.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] 2024-11-21T17:46:55.149703Z 6 00h11m40.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] 2024-11-21T17:46:55.149712Z 7 00h11m40.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2024-11-21T17:46:55.209841Z 5 00h12m20.760512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5313:722] 2024-11-21T17:46:55.209865Z 6 00h12m20.760512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] 2024-11-21T17:46:55.209874Z 7 00h12m20.760512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2024-11-21T17:46:55.334749Z 6 00h14m00.811536s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5320:729] 2024-11-21T17:46:55.334776Z 7 00h14m00.811536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2024-11-21T17:46:55.407935Z 7 00h14m40.860512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5327:736] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T17:46:55.584309Z 1 00h16m30.911536s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:6] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T17:46:55.584406Z 8 00h16m30.911536s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:5] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T17:46:55.584430Z 7 00h16m30.911536s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:4] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T17:46:55.584489Z 6 00h16m30.911536s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:3] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T17:46:55.584537Z 5 00h16m30.911536s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:2] barrier# {Soft# {Gen# 1 Step# 4} Hard# } 2024-11-21T17:46:55.584604Z 4 00h16m30.911536s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:1:4:0:0:131072:1] barrier# {Soft# {Gen# 1 Step# 4} Hard# } TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestTabletIsDead [GOOD] Test command err: 2024-11-21T17:46:50.002303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:50.002430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:50.002465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:50.002601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:50.002638Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:50.002663Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001afd/r3tmp/tmpqL7Fqk/pdisk_1.dat 2024-11-21T17:46:50.124832Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64066, node 1 TClient is connected to server localhost:18329 2024-11-21T17:46:50.254393Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:50.254411Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:50.254415Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:50.254505Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:51.635612Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:46:51.638027Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:51.638074Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:51.638121Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:51.638276Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:51.638301Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001afd/r3tmp/tmpmO9sPt/pdisk_1.dat 2024-11-21T17:46:51.744535Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24463, node 3 TClient is connected to server localhost:1615 2024-11-21T17:46:51.889090Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:51.889113Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:51.889117Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:51.889233Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-9a33-70fb" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-5321" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-3" reason: "YELLOW-9a33-e9e2-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-5321" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-9a33-595f-ab18" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-9a33-595f-ab18" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-9a33-ef3e-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-9a33-99d2-3-2147483648-3-55-0-55" status: YELLOW message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "YELLOW-e463-3-3-42" reason: "YELLOW-e463-3-3-43" reason: "YELLOW-e463-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "YELLOW-e463-3-3-42" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/6fwh/001afd/r3tmp/tmpmO9sPt/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-43" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/6fwh/001afd/r3tmp/tmpmO9sPt/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-44" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/6fwh/001afd/r3tmp/tmpmO9sPt/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-9a33-ef3e-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "YELLOW-9a33-99d2-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2024-11-21T17:46:53.072996Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:53.073081Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:53.073111Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:53.073237Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:53.073275Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:53.073319Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001afd/r3tmp/tmpgzwGaM/pdisk_1.dat 2024-11-21T17:46:53.157111Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14439, node 5 TClient is connected to server localhost:7249 2024-11-21T17:46:53.265938Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:53.265959Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:53.265964Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:53.266075Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-9a33-70fb" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-5321" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-5321" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-9a33-595f-ab18" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-9a33-595f-ab18" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-9a33-ef3e-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-a594-5-5-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-42" path: "/home/runner/.ya/build/build_root/6fwh/001afd/r3tmp/tmpgzwGaM/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-43" path: "/home/runner/.ya/build/build_root/6fwh/001afd/r3tmp/tmpgzwGaM/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-44" path: "/home/runner/.ya/build/build_root/6fwh/001afd/r3tmp/tmpgzwGaM/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-9a33-ef3e-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } type: "STORAGE_GROUP" level: 4 } location { id: 5 host: "::1" port: 12001 } 2024-11-21T17:46:54.456392Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:690:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.456516Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:54.456524Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:54.456739Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:688:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.456799Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:54.456820Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001afd/r3tmp/tmp7SaaPI/pdisk_1.dat 2024-11-21T17:46:54.538764Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2123, node 7 TClient is connected to server localhost:25534 2024-11-21T17:46:54.948694Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:54.948716Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:54.948720Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:54.948839Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:54.951773Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:54.951805Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:54.997779Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2024-11-21T17:46:54.997964Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:55.061565Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2024-11-21T17:46:55.061752Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "RED-9a33-f489" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-9a33-6fa7" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "RED-9a33-6fa7" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-9a33-e5e3-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-7" reason: "YELLOW-9a33-e9e2-8" reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-e5e3-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 7 host: "::1" port: 12001 } >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::NoBscResponse >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> TTxDataShardMiniKQL::WriteEraseRead >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: 2024-11-21T17:46:50.808643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:50.808752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:50.808779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:50.808879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:50.808907Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:50.808925Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ac8/r3tmp/tmpS0OIs9/pdisk_1.dat 2024-11-21T17:46:50.942460Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23537, node 1 TClient is connected to server localhost:2216 2024-11-21T17:46:51.072715Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:51.072747Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:51.072751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:51.072843Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration id: "RED-9a33-70fb" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-9a33-4ff1" reason: "RED-9a33-ebec" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 id: "RED-9a33-4ff1" status: RED message: "Compute quota usage" location { database { name: "/Root" } } reason: "RED-9a33-3195" type: "COMPUTE" level: 2 id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-1" reason: "YELLOW-9a33-e9e2-2" type: "COMPUTE" level: 2 id: "YELLOW-9a33-e9e2-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-e9e2-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "RED-9a33-3195" status: RED message: "Shards quota exhausted" location { database { name: "/Root" } } type: "COMPUTE_QUOTA" level: 3 id: "RED-9a33-ebec" status: RED message: "Storage usage over 90%" location { database { name: "/Root" } } type: "STORAGE" level: 2 2024-11-21T17:46:52.417967Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:46:52.421065Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.421138Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:52.421222Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.421512Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:52.421562Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ac8/r3tmp/tmpjLskGv/pdisk_1.dat 2024-11-21T17:46:52.523492Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10568, node 3 TClient is connected to server localhost:9537 2024-11-21T17:46:52.663707Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:52.663722Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:52.663726Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:52.663820Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration id: "RED-9a33-70fb" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "ORANGE-9a33-4ff1" reason: "RED-9a33-ebec" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 id: "ORANGE-9a33-4ff1" status: ORANGE message: "Compute quota usage" location { database { name: "/Root" } } reason: "ORANGE-9a33-3f66" type: "COMPUTE" level: 2 id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-3" reason: "YELLOW-9a33-e9e2-4" type: "COMPUTE" level: 2 id: "YELLOW-9a33-e9e2-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-e9e2-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "ORANGE-9a33-3f66" status: ORANGE message: "Shards quota usage is over 99%" location { database { name: "/Root" } } type: "COMPUTE_QUOTA" level: 3 id: "RED-9a33-ebec" status: RED message: "Storage usage over 90%" location { database { name: "/Root" } } type: "STORAGE" level: 2 2024-11-21T17:46:53.850324Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:53.850437Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:53.850478Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:53.850607Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:53.850659Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:53.850705Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ac8/r3tmp/tmpy13wFl/pdisk_1.dat 2024-11-21T17:46:53.983203Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26961, node 5 TClient is connected to server localhost:16596 2024-11-21T17:46:54.097314Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:54.097330Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:54.097332Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:54.097397Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration id: "RED-9a33-70fb" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-9a33-ebec" reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-4ff1" type: "DATABASE" level: 1 id: "YELLOW-9a33-4ff1" status: YELLOW message: "Compute quota usage" location { database { name: "/Root" } } reason: "YELLOW-9a33-d159" type: "COMPUTE" level: 2 id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-d159" status: YELLOW message: "Shards quota usage is over 90%" location { database { name: "/Root" } } type: "COMPUTE_QUOTA" level: 3 id: "RED-9a33-ebec" status: RED message: "Storage usage over 90%" location { database { name: "/Root" } } type: "STORAGE" level: 2 2024-11-21T17:46:55.222993Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:55.223086Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:55.223101Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:55.223116Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:55.223142Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:55.223161Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ac8/r3tmp/tmpZZ4ja9/pdisk_1.dat 2024-11-21T17:46:55.320665Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25981, node 7 TClient is connected to server localhost:18065 2024-11-21T17:46:55.443835Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:55.443853Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:55.443858Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:55.443967Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration id: "YELLOW-9a33-70fb" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-1c83" type: "DATABASE" level: 1 id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-7" reason: "YELLOW-9a33-e9e2-8" type: "COMPUTE" level: 2 id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 id: "YELLOW-9a33-1c83" status: YELLOW message: "Storage usage over 75%" location { database { name: "/Root" } } type: "STORAGE" level: 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::StorageLimit50 [GOOD] Test command err: 2024-11-21T17:46:50.784452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:46:50.787800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:50.787953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:50.788002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:50.788429Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:50.788449Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001a9a/r3tmp/tmprXb72H/pdisk_1.dat 2024-11-21T17:46:50.941577Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25057, node 1 TClient is connected to server localhost:61292 2024-11-21T17:46:51.055237Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:51.055255Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:51.055259Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:51.055329Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-9a33-70fb" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" reason: "YELLOW-9a33-5321" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-1" reason: "YELLOW-9a33-e9e2-2" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-5321" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-9a33-595f-8d1d" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-9a33-595f-8d1d" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-9a33-ef3e-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-9a33-4847-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-9a33-ef3e-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-9a33-4847-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 1 host: "::1" port: 12001 } 2024-11-21T17:46:52.626216Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:46:52.629018Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.629092Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:52.629181Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.629399Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:52.629428Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001a9a/r3tmp/tmpCtGi1X/pdisk_1.dat 2024-11-21T17:46:52.721957Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15086, node 3 TClient is connected to server localhost:26910 2024-11-21T17:46:52.828914Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:52.828930Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:52.828933Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:52.829003Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:53.876076Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:53.876202Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:53.876265Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:53.876412Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:53.876470Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:53.876521Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001a9a/r3tmp/tmpXthnZ7/pdisk_1.dat 2024-11-21T17:46:53.969435Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9406, node 5 TClient is connected to server localhost:20587 2024-11-21T17:46:54.099218Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:54.099234Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:54.099239Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:54.099325Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:55.174387Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:632:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:55.174470Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:55.174490Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:55.174598Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:630:2324], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:55.174666Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:55.174686Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001a9a/r3tmp/tmpeTvTkz/pdisk_1.dat 2024-11-21T17:46:55.288063Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21413, node 7 TClient is connected to server localhost:17802 2024-11-21T17:46:55.404945Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:55.404963Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:55.404965Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:55.405004Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration >> BSCRestartPDisk::RestartOneByOne [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2024-11-21T17:46:55.471548Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.471556Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.471560Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.488355Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.488593Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:46:55.488614Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.492653Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.492659Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.492662Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.492754Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.492842Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:46:55.492852Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.493063Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.493066Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.493068Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.493134Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:46:55.493146Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.493149Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.493282Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2024-11-21T17:46:55.493688Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.493693Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.493695Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.493814Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T17:46:55.493829Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.493834Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.493857Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2024-11-21T17:46:55.494851Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T17:46:55.494856Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T17:46:55.494859Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.496285Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.496438Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.497475Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T17:46:55.497668Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.497731Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (empty maybe) 2024-11-21T17:46:55.498198Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2024-11-21T17:46:55.498269Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.498275Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:46:55.498278Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:46:55.498280Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T17:46:55.498284Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T17:46:55.498287Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T17:46:55.498290Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2024-11-21T17:46:55.498293Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2024-11-21T17:46:55.498303Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2024-11-21T17:46:55.498305Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2024-11-21T17:46:55.498307Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2024-11-21T17:46:55.498309Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2024-11-21T17:46:55.498311Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2024-11-21T17:46:55.498313Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2024-11-21T17:46:55.498315Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2024-11-21T17:46:55.498317Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2024-11-21T17:46:55.498340Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2024-11-21T17:46:55.498342Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2024-11-21T17:46:55.498345Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2024-11-21T17:46:55.498348Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2024-11-21T17:46:55.498351Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2024-11-21T17:46:55.498354Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2024-11-21T17:46:55.498357Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2024-11-21T17:46:55.498360Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2024-11-21T17:46:55.498364Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2024-11-21T17:46:55.498367Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2024-11-21T17:46:55.498370Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2024-11-21T17:46:55.498373Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2024-11-21T17:46:55.498376Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2024-11-21T17:46:55.498379Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2024-11-21T17:46:55.498382Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2024-11-21T17:46:55.498385Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2024-11-21T17:46:55.498396Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2024-11-21T17:46:55.498399Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2024-11-21T17:46:55.498402Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2024-11-21T17:46:55.498405Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2024-11-21T17:46:55.498409Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2024-11-21T17:46:55.498412Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2024-11-21T17:46:55.498415Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2024-11-21T17:46:55.498418Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2024-11-21T17:46:55.498421Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2024-11-21T17:46:55.498423Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2024-11-21T17:46:55.498426Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2024-11-21T17:46:55.498428Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2024-11-21T17:46:55.498431Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2024-11-21T17:46:55.498436Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2024-11-21T17:46:55.498439Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2024-11-21T17:46:55.498441Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2024-11-21T17:46:55.498444Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2024-11-21T17:46:55.498447Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2024-11-21T17:46:55.498457Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T17:46:55.498556Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2024-11-21T17:46:55.498596Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2024-11-21T17:46:55.498601Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2024-11-21T17:46:55.498604Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2024-11-21T17:46:55.498607Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2024-11-21T17:46:55.498610Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2024-11-21T17:46:55.498613Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2024-11-21T17:46:55.498615Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2024-11-21T17:46:55.498618Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2024-11-21T17:46:55.498622Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2024-11-21T17:46:55.498624Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2024-11-21T17:46:55.498627Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2024-11-21T17:46:55.498630Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2024-11-21T17:46:55.498632Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2024-11-21T17:46:55.498635Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2024-11-21T17:46:55.498638Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2024-11-21T17:46:55.498641Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2024-11-21T17:46:55.498645Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2024-11-21T17:46:55.498661Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2024-11-21T17:46:55.498664Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2024-11-21T17:46:55.498667Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2024-11-21T17:46:55.498670Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2024-11-21T17:46:55.498673Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2024-11-21T17:46:55.498676Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2024-11-21T17:46:55.498678Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2024-11-21T17:46:55.498681Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2024-11-21T17:46:55.498684Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2024-11-21T17:46:55.498687Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2024-11-21T17:46:55.498690Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2024-11-21T17:46:55.498692Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2024-11-21T17:46:55.498695Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2024-11-21T17:46:55.498698Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2024-11-21T17:46:55.498701Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2024-11-21T17:46:55.498707Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2024-11-21T17:46:55.498711Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2024-11-21T17:46:55.498714Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2024-11-21T17:46:55.498717Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2024-11-21T17:46:55.498720Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2024-11-21T17:46:55.498722Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2024-11-21T17:46:55.498725Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2024-11-21T17:46:55.498729Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2024-11-21T17:46:55.498731Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2024-11-21T17:46:55.498734Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2024-11-21T17:46:55.498736Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2024-11-21T17:46:55.498739Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2024-11-21T17:46:55.498741Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2024-11-21T17:46:55.498744Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2024-11-21T17:46:55.498746Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2024-11-21T17:46:55.498749Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2024-11-21T17:46:55.498751Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2024-11-21T17:46:55.498754Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2024-11-21T17:46:55.498758Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T17:46:55.498784Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T17:46:55.499158Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.499162Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.499165Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.499217Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.499310Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.499353Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.499427Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.599729Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.600283Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:46:55.600306Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.600313Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T17:46:55.600344Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T17:46:55.800895Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T17:46:55.904216Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T17:46:55.904310Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:46:55.904361Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T17:46:55.904651Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.904655Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.904657Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.904704Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.904801Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.904825Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.904935Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:56.005220Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.005291Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:46:56.005314Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:56.005320Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T17:46:56.005348Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T17:46:56.005387Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T17:46:56.005444Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T17:46:56.005483Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:46:56.005536Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |76.7%| [TA] $(B)/ydb/core/erasure/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2024-11-21T17:46:55.719096Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.719103Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.719106Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.719218Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.719359Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.725073Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.726301Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.726649Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:46:55.726727Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:46:55.726802Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T17:46:55.726817Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:55.726851Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.726857Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T17:46:55.726865Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:46:55.726869Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:46:55.736746Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.736753Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.736756Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.736827Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.736910Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.736941Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.736985Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T17:46:55.737099Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:46:55.737114Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T17:46:55.737159Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T17:46:55.737169Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T17:46:55.737194Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.737198Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:46:55.737203Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:46:55.737227Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2024-11-21T17:46:55.737231Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:46:55.737233Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T17:46:55.737234Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:46:55.737244Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2024-11-21T17:46:55.737255Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T17:46:55.737256Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T17:46:55.737258Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:46:55.737264Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2024-11-21T17:46:55.737267Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T17:46:55.737268Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T17:46:55.737270Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:46:55.737278Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2024-11-21T17:46:55.737540Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.737543Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.737545Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.737593Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.737657Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.737687Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.737723Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 100 Compressed message data size: 91 2024-11-21T17:46:55.737800Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:46:55.737814Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T17:46:55.737858Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T17:46:55.737871Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T17:46:55.737897Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.737901Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:46:55.737912Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2024-11-21T17:46:55.737916Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:46:55.737917Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:46:55.737922Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2024-11-21T17:46:55.737925Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:46:55.737926Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:46:55.737933Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2024-11-21T17:46:55.737936Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T17:46:55.737938Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataRecei ... uster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:46:56.141919Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2024-11-21T17:46:56.169879Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T17:46:56.169885Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T17:46:56.169889Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:56.169971Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:56.170077Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:56.170122Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T17:46:56.171712Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2024-11-21T17:46:56.207437Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2024-11-21T17:46:56.207512Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:56.207523Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:46:56.207527Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:46:56.207529Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T17:46:56.207533Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T17:46:56.207535Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T17:46:56.207538Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2024-11-21T17:46:56.207540Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2024-11-21T17:46:56.207543Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2024-11-21T17:46:56.207546Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2024-11-21T17:46:56.207558Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2024-11-21T17:46:56.207620Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:46:56.209201Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2024-11-21T17:46:56.210259Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.210264Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.210267Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:56.210317Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:56.210382Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:56.210409Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.210464Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:56.210527Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2024-11-21T17:46:56.210708Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.210711Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.210713Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:56.210754Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:56.210821Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:56.210862Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.210975Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.211029Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:56.211082Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:56.211091Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:46:56.211140Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] Test command err: 2024-11-21T17:46:50.712592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:50.712693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:50.712723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:50.712841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:50.712873Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:50.712892Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ad7/r3tmp/tmpbWrLcd/pdisk_1.dat 2024-11-21T17:46:50.857599Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5499, node 1 TClient is connected to server localhost:21713 2024-11-21T17:46:50.975485Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:50.975502Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:50.975505Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:50.975567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:52.325925Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:635:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.325994Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.326017Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:52.326111Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:633:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.326170Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.326189Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ad7/r3tmp/tmpk0javs/pdisk_1.dat 2024-11-21T17:46:52.439079Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16273, node 3 TClient is connected to server localhost:5184 2024-11-21T17:46:52.586426Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:52.586444Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:52.586448Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:52.586506Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-9a33-70fb" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-9a33-d6d1" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-3" reason: "YELLOW-9a33-e9e2-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-d6d1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-9a33-258e-ab18" type: "STORAGE" level: 2 } issue_log { id: "RED-9a33-258e-ab18" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-9a33-819b-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-9a33-99d2-3-2147483648-3-55-0-55" status: RED message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "RED-8ac8-3-3-42" reason: "RED-8ac8-3-3-43" reason: "RED-8ac8-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "RED-8ac8-3-3-42" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/6fwh/001ad7/r3tmp/tmpk0javs/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-43" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/6fwh/001ad7/r3tmp/tmpk0javs/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-44" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/6fwh/001ad7/r3tmp/tmpk0javs/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-9a33-819b-2147483648" status: RED message: "Group failed" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-9a33-99d2-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2024-11-21T17:46:53.779108Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:53.779241Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:53.779291Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:53.779452Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:53.779513Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:53.779569Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ad7/r3tmp/tmpEUkP5P/pdisk_1.dat 2024-11-21T17:46:53.901200Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22266, node 5 TClient is connected to server localhost:62919 2024-11-21T17:46:54.053864Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:54.053880Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:54.053882Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:54.053945Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:54.885144Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:287:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.885196Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:54.885215Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ad7/r3tmp/tmpT09CFm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4628, node 7 TClient is connected to server localhost:16953 self_check_result: EMERGENCY issue_log { id: "RED-f65b-f489" status: RED message: "Database has compute issues" location { database { name: "/Root/serverless" } } reason: "RED-f65b-7469" type: "DATABASE" level: 1 } issue_log { id: "RED-f65b-7469" status: RED message: "There are no compute nodes" location { database { name: "/Root/serverless" } } type: "COMPUTE" level: 2 } database_status { name: "/Root/serverless" overall: RED storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "7-1-55" overall: GREEN pdisk { id: "7-1" overall: GREEN } } } } } compute { overall: RED } } location { id: 7 host: "::1" port: 12001 } 2024-11-21T17:46:55.668267Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:330:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:55.668335Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:55.668351Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ad7/r3tmp/tmp2kaX3j/pdisk_1.dat 2024-11-21T17:46:55.769101Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32626, node 8 TClient is connected to server localhost:2501 2024-11-21T17:46:55.878472Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:55.878491Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:55.878495Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:55.878568Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2024-11-21T17:46:55.901785Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.901792Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.901796Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.901896Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:46:55.901909Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.901913Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.902513Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007124s 2024-11-21T17:46:55.902602Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.902726Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:46:55.902739Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903038Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903041Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903044Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.903087Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:46:55.903090Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903092Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903101Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008900s 2024-11-21T17:46:55.903142Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.903160Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:46:55.903168Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903325Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903328Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903330Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.903476Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T17:46:55.903481Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903483Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903492Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.214151s 2024-11-21T17:46:55.903553Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.903630Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:46:55.903638Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903821Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903825Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903827Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.903880Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T17:46:55.903888Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903890Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.903901Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.202161s 2024-11-21T17:46:55.903968Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.904022Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:46:55.904029Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.904183Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.904186Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.904188Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.904248Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.904298Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.905559Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.905613Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2024-11-21T17:46:55.905620Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.905621Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.905634Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.191308s 2024-11-21T17:46:55.905700Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T17:46:55.905919Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.905921Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.905924Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.905987Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.906079Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.906102Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.906156Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:56.006452Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.006526Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:46:56.006545Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:56.006550Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T17:46:56.006567Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T17:46:56.106890Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:46:56.106955Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T17:46:56.107276Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.107281Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.107285Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:56.107370Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:56.107504Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:56.107574Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.107664Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:56.207934Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.207978Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:46:56.207991Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:56.207995Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T17:46:56.208015Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T17:46:56.208039Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T17:46:56.208055Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:46:56.208087Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T17:46:56.208113Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TTxDataShardMiniKQL::Write >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> YdbProxy::CreateTopic >> YdbProxy::CopyTable >> YdbProxy::DropTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne [GOOD] Test command err: RandomSeed# 4258697805236208723 >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> THealthCheckTest::NoStoragePools [GOOD] >> YdbProxy::ListDirectory >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::TableStats >> YdbProxy::CreateTable >> YdbProxy::ReadTopic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] Test command err: 2024-11-21T17:46:50.657674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:46:50.661072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:50.661235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:50.661289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:50.661733Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:50.661751Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ae8/r3tmp/tmp5wlABk/pdisk_1.dat 2024-11-21T17:46:50.804935Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10494, node 1 TClient is connected to server localhost:19150 2024-11-21T17:46:50.929098Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:50.929120Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:50.929124Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:50.929193Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:52.325922Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:635:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.325994Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.326017Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:52.326111Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:633:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.326170Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.326189Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ae8/r3tmp/tmpJrylG5/pdisk_1.dat 2024-11-21T17:46:52.423418Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2406, node 3 TClient is connected to server localhost:25923 2024-11-21T17:46:52.587379Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:52.587397Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:52.587400Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:52.587454Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:53.729340Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:53.729446Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:53.729454Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:53.729531Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:53.729583Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:53.729603Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ae8/r3tmp/tmp8TZnml/pdisk_1.dat 2024-11-21T17:46:53.836745Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29126, node 5 TClient is connected to server localhost:16362 2024-11-21T17:46:53.971439Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:53.971458Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:53.971463Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:53.971526Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2024-11-21T17:46:54.981463Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:632:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.981548Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:54.981570Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:54.981676Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:630:2324], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.981755Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:54.981783Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ae8/r3tmp/tmpFDQ7Qv/pdisk_1.dat 2024-11-21T17:46:55.065801Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2444, node 7 TClient is connected to server localhost:28544 2024-11-21T17:46:55.190623Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:55.190644Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:55.190648Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:55.190707Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-7" reason: "YELLOW-9a33-e9e2-8" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 7 host: "::1" port: 12001 } 2024-11-21T17:46:56.109767Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:56.109819Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:56.109833Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ae8/r3tmp/tmptOvgXn/pdisk_1.dat 2024-11-21T17:46:56.206429Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30048, node 9 TClient is connected to server localhost:8764 2024-11-21T17:46:56.326045Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:56.326066Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:56.326070Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:56.326184Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } database_status { name: "/Root" overall: YELLOW storage { overall: GREEN pools { id: "static" overall: GREEN groups { id: "0" overall: GREEN } } } compute { overall: YELLOW nodes { id: "9" overall: YELLOW load { overall: YELLOW load: 152.044434 cores: 64 } } } } database_status { name: "/Root/shared" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "9-1-55" overall: GREEN pdisk { id: "9-1" overall: GREEN } } } } } compute { overall: GREEN nodes { id: "10" overall: GREY } } } location { id: 9 host: "::1" port: 12001 } >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] >> YdbProxy::RemoveDirectory ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoStoragePools [GOOD] Test command err: 2024-11-21T17:46:51.103344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:46:51.106867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:51.107031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:51.107080Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:51.107515Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:51.107534Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019c4/r3tmp/tmpeIhOkv/pdisk_1.dat 2024-11-21T17:46:51.226616Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63742, node 1 TClient is connected to server localhost:23346 2024-11-21T17:46:51.383104Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:51.383122Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:51.383125Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:51.383189Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:52.816491Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:635:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.816561Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.816583Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:52.816689Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:633:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.816756Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.816776Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019c4/r3tmp/tmpIqIYh6/pdisk_1.dat 2024-11-21T17:46:52.900331Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2944, node 3 TClient is connected to server localhost:27020 2024-11-21T17:46:53.001677Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:53.001694Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:53.001697Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:53.001744Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:54.070886Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.071005Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:54.071052Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:54.071190Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.071245Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:54.071298Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019c4/r3tmp/tmpKu6Bh4/pdisk_1.dat 2024-11-21T17:46:54.186267Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11338, node 5 TClient is connected to server localhost:20858 2024-11-21T17:46:54.289244Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:54.289266Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:54.289271Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:54.289377Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:55.276729Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:632:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:55.276825Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:55.276849Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:55.276984Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:630:2324], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:55.277071Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:55.277096Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019c4/r3tmp/tmpPSbYyS/pdisk_1.dat 2024-11-21T17:46:55.382235Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6211, node 7 TClient is connected to server localhost:29086 2024-11-21T17:46:55.506394Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:55.506414Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:55.506419Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:55.506482Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:56.358118Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:56.358190Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:56.358212Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019c4/r3tmp/tmpg2vaiF/pdisk_1.dat 2024-11-21T17:46:56.437253Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29636, node 9 TClient is connected to server localhost:14016 2024-11-21T17:46:56.543945Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:56.543966Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:56.543970Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:56.544080Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-06ec-be81" status: RED message: "Database has storage issues" location { database { name: "/Root/database" } } reason: "RED-06ec-caea" type: "DATABASE" level: 1 } issue_log { id: "RED-06ec-caea" status: RED message: "There are no storage pools" location { database { name: "/Root/database" } } type: "STORAGE" level: 2 } database_status { name: "/Root/database" overall: RED storage { overall: RED } compute { overall: GREEN nodes { id: "10" overall: GREEN load { overall: GREEN cores: 64 } } } } location { id: 9 host: "::1" port: 12001 } >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany >> YdbProxy::CopyTable [GOOD] >> YdbProxy::CopyTables >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> YdbProxy::MakeDirectory >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::DropTopic >> YdbProxy::DescribePath >> YdbProxy::DescribeConsumer [GOOD] >> TTxDataShardMiniKQL::ReadConstant ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2024-11-21T17:46:39.093182Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1732211199093173 2024-11-21T17:46:39.224861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790451867471977:2064];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:39.225042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:39.227147Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790453727290504:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e33/r3tmp/tmpYfZn1o/pdisk_1.dat 2024-11-21T17:46:39.256526Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:39.257808Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:39.261067Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:39.277576Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:39.280092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:39.280110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:39.280892Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:39.281155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4800, node 1 2024-11-21T17:46:39.290721Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e33/r3tmp/yandexneeYJX.tmp 2024-11-21T17:46:39.290734Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e33/r3tmp/yandexneeYJX.tmp 2024-11-21T17:46:39.295783Z INFO: TTestServer started on Port 8236 GrpcPort 4800 2024-11-21T17:46:39.297961Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e33/r3tmp/yandexneeYJX.tmp 2024-11-21T17:46:39.298042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8236 PQClient connected to localhost:4800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:39.317372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:39.324071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:39.324104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:39.328867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... 2024-11-21T17:46:39.547320Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790453727290650:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.547353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790453727290625:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.547375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.548525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T17:46:39.552014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790453727290654:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T17:46:39.613247Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790451867472904:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:39.613371Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmFmNWMxZS0xMjg1NDZlYi02OGNkNzRhZC1iYjZkYTk3Nw==, ActorId: [1:7439790451867472878:2299], ActorState: ExecuteState, TraceId: 01jd7xakjxfekekwvtsqpnvzff, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:39.613700Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790453727290697:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:39.613784Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODI1YjBlNTItZDc0YWQyOWMtZjg3YTM2YjUtOThiZjg0NDU=, ActorId: [2:7439790453727290623:2277], ActorState: ExecuteState, TraceId: 01jd7xakht3hd6b73em3109qfb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:39.613844Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:39.613918Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:39.614192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.678644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.746839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:4800", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T17:46:39.789806Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd7xakrrce7rb074f1s36tv6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTFmYjlkZGQtYmEyNzUwMDgtZjdiNTFjLWJhNWQyYWM3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790451867473328:2941] 2024-11-21T17:46:44.228346Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790451867471977:2064];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:44.228374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:44.236305Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790453727290504:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:44.236370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:46:44.893839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:4800 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:44.916920Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:4800 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 ... tId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:23752 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:52.768486Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:23752 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:53.270654Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:23752 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:53.773820Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2024-11-21T17:46:53.777856Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2024-11-21T17:46:53.778107Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2024-11-21T17:46:53.778115Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:23752 2024-11-21T17:46:53.778516Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T17:46:53.778977Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:46:53.778997Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T17:46:53.779444Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T17:46:53.779483Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:37846 2024-11-21T17:46:53.779490Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:37846 proto=v1 topic=test-topic durationSec=0 2024-11-21T17:46:53.779494Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:46:53.779980Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T17:46:53.780026Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:46:53.780028Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:46:53.780030Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:46:53.780036Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511028113504:2464] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:46:53.780553Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511028113504:2464] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:46:53.806114Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511028113504:2464] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T17:46:53.806300Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790511028113534:2464] connected; active server actors: 1 2024-11-21T17:46:53.806320Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511028113504:2464] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T17:46:53.806325Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511028113504:2464] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T17:46:53.808974Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790511028113534:2464] disconnected; active server actors: 1 2024-11-21T17:46:53.808987Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790511028113534:2464] disconnected no session 2024-11-21T17:46:53.824884Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511028113504:2464] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:46:53.824900Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511028113504:2464] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T17:46:53.824904Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790511028113504:2464] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T17:46:53.824914Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:46:53.826695Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T17:46:53.826680Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:53.826699Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439790511028113551:2464], now have 1 active actors on pipe 2024-11-21T17:46:53.826819Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:53.826825Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:53.826852Z node 4 :PERSQUEUE INFO: new Cookie src|29a876a1-2e9ff33d-a6055e40-18f11004_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T17:46:53.826885Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:46:53.826901Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:46:53.828681Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:53.828693Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:53.828722Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:46:53.828848Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|29a876a1-2e9ff33d-a6055e40-18f11004_0 2024-11-21T17:46:53.832483Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211213832 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:53.832529Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|29a876a1-2e9ff33d-a6055e40-18f11004_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T17:46:53.832873Z :INFO: [] MessageGroupId [src] SessionId [src|29a876a1-2e9ff33d-a6055e40-18f11004_0] Write session: close. Timeout = 0 ms 2024-11-21T17:46:53.832881Z :INFO: [] MessageGroupId [src] SessionId [src|29a876a1-2e9ff33d-a6055e40-18f11004_0] Write session will now close 2024-11-21T17:46:53.832887Z :DEBUG: [] MessageGroupId [src] SessionId [src|29a876a1-2e9ff33d-a6055e40-18f11004_0] Write session: aborting 2024-11-21T17:46:53.832985Z :INFO: [] MessageGroupId [src] SessionId [src|29a876a1-2e9ff33d-a6055e40-18f11004_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:46:53.832991Z :DEBUG: [] MessageGroupId [src] SessionId [src|29a876a1-2e9ff33d-a6055e40-18f11004_0] Write session: destroy 2024-11-21T17:46:53.833972Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|29a876a1-2e9ff33d-a6055e40-18f11004_0 grpc read done: success: 0 data: 2024-11-21T17:46:53.833984Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|29a876a1-2e9ff33d-a6055e40-18f11004_0 grpc read failed 2024-11-21T17:46:53.833991Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|29a876a1-2e9ff33d-a6055e40-18f11004_0 grpc closed 2024-11-21T17:46:53.834006Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|29a876a1-2e9ff33d-a6055e40-18f11004_0 is DEAD 2024-11-21T17:46:53.834279Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:46:53.834554Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:53.834577Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439790511028113551:2464] destroyed 2024-11-21T17:46:53.834591Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2024-11-21T17:46:53.893723Z :ERROR: [/Root] OnFederationDiscovery: Got error. Status: UNAVAILABLE. Description: >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> TTxDataShardMiniKQL::CrossShard_5_AllToAll >> YdbProxy::AlterTable [GOOD] >> YdbProxy::DescribeTopic [GOOD] >> YdbProxy::MakeDirectory [GOOD] >> YdbProxy::OAuthToken >> YdbProxy::DropTopic [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds >> TTxDataShardMiniKQL::CrossShard_1_Cycle >> YdbProxy::DescribePath [GOOD] >> YdbProxy::DescribeTable >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2024-11-21T17:46:56.660622Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790525770984060:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:56.660665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0014a4/r3tmp/tmprj3pXo/pdisk_1.dat 2024-11-21T17:46:56.713040Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18977 TServer::EnableGrpc on GrpcPort 11707, node 1 2024-11-21T17:46:56.735712Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:56.735726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:56.735728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:56.735761Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:56.760709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:56.760731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:56.761814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:56.788741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:57.195465Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790527514873983:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0014a4/r3tmp/tmpLiy27t/pdisk_1.dat 2024-11-21T17:46:57.200106Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:57.209780Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28879 TServer::EnableGrpc on GrpcPort 1559, node 2 2024-11-21T17:46:57.244454Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.244469Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.244470Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.244508Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:57.295186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.295213Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.296855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:57.298912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:57.300078Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> YdbProxy::CopyTables [GOOD] >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] >> YdbProxy::AlterTopic >> YdbProxy::OAuthToken [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2024-11-21T17:46:57.367899Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790530380417945:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:57.368055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001393/r3tmp/tmpDZMJQ3/pdisk_1.dat 2024-11-21T17:46:57.410757Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2096 TServer::EnableGrpc on GrpcPort 23939, node 1 2024-11-21T17:46:57.443110Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.443121Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.443123Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.443156Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2096 2024-11-21T17:46:57.465962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.465999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.467111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:57.517370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:57.519448Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:57.663391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:46:57.723555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2024-11-21T17:46:57.099139Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790526780692458:2130];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:57.099199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013ee/r3tmp/tmpOaVbem/pdisk_1.dat 2024-11-21T17:46:57.164459Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23821 TServer::EnableGrpc on GrpcPort 16672, node 1 2024-11-21T17:46:57.194483Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.194495Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.194497Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.194540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23821 2024-11-21T17:46:57.208410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.208431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.208874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:57.238535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:57.241572Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:57.514678Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790530746659770:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013ee/r3tmp/tmpxGHMKP/pdisk_1.dat 2024-11-21T17:46:57.517659Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:57.534201Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10232 TServer::EnableGrpc on GrpcPort 5528, node 2 2024-11-21T17:46:57.562738Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.562748Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.562750Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.562782Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:57.617428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.617452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.617753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:57.618531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:57.618800Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:57.676612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:46:57.678373Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2024-11-21T17:46:57.678385Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2024-11-21T17:46:57.680659Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2024-11-21T17:46:57.680677Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2024-11-21T17:46:56.751734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790525794045792:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:56.751758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001443/r3tmp/tmpieuZzw/pdisk_1.dat 2024-11-21T17:46:56.806673Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5673 TServer::EnableGrpc on GrpcPort 12620, node 1 2024-11-21T17:46:56.822931Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:56.822947Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:56.822948Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:56.822979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:56.851654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:56.851687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:56.852903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:56.877316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:57.089464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:57.168404Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001443/r3tmp/tmpvseKO3/pdisk_1.dat 2024-11-21T17:46:57.452206Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790529363705949:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:57.465759Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:57.467272Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21731 TServer::EnableGrpc on GrpcPort 11293, node 2 2024-11-21T17:46:57.487874Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.487887Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.487889Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.487918Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:57.555720Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.555753Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.556080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:57.557291Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:46:57.560447Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> TTxDataShardMiniKQL::ReadSpecialColumns >> YdbProxy::StaticCreds [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2024-11-21T17:46:57.481593Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790526815049429:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:57.481649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001301/r3tmp/tmpOhQgaE/pdisk_1.dat 2024-11-21T17:46:57.548674Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9190 TServer::EnableGrpc on GrpcPort 29016, node 1 2024-11-21T17:46:57.565469Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.565482Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.565483Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.565512Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:57.580397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.580427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.581985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:57.599093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:57.812442Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790530220687152:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:57.812600Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001301/r3tmp/tmpUfPzMH/pdisk_1.dat 2024-11-21T17:46:57.819677Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6793 TServer::EnableGrpc on GrpcPort 12621, node 2 2024-11-21T17:46:57.854521Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.854533Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.854535Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.854566Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:57.914510Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.914540Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.914921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:57.916002Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:46:57.920611Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] Test command err: RandomSeed# 9117302446772109996 >> YdbProxy::AlterTopic [GOOD] >> YdbProxy::DescribeTable [GOOD] >> TTxDataShardMiniKQL::WriteKeyTooLarge >> YdbProxy::CreateCdcStream [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] Test command err: 2024-11-21T17:46:51.333173Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:46:51.335765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:51.335874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:51.335905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:51.336178Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:51.336188Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019ba/r3tmp/tmpB9EhoM/pdisk_1.dat 2024-11-21T17:46:51.453006Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5443, node 1 TClient is connected to server localhost:16338 2024-11-21T17:46:51.584202Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:51.584220Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:51.584225Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:51.595597Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:52.845815Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:635:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.845862Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.845876Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:52.845939Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:633:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.845976Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.845988Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019ba/r3tmp/tmpr4YGZQ/pdisk_1.dat 2024-11-21T17:46:52.922035Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8829, node 3 TClient is connected to server localhost:20393 2024-11-21T17:46:53.020383Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:53.020399Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:53.020402Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:53.020443Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:54.320011Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.320123Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:54.320158Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:54.320481Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:684:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.320621Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:54.320645Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019ba/r3tmp/tmpFthRgX/pdisk_1.dat 2024-11-21T17:46:54.417289Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13357, node 5 TClient is connected to server localhost:24928 2024-11-21T17:46:54.892744Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:54.892769Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:54.892774Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:54.893131Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:54.897845Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:54.897883Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:54.938872Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2024-11-21T17:46:54.939062Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" reason: "YELLOW-9a33-e9e2-7" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2024-11-21T17:46:56.369173Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:745:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:56.369232Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:56.369266Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:56.369375Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:743:2327], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:56.369443Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:56.369462Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019ba/r3tmp/tmptQUwR9/pdisk_1.dat 2024-11-21T17:46:56.462912Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30293, node 8 TClient is connected to server localhost:61519 2024-11-21T17:46:56.902154Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:56.902176Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:56.902180Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:56.902419Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:56.902693Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:1223:2663]) [8:1478:2667] 2024-11-21T17:46:56.902748Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2024-11-21T17:46:56.904919Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2024-11-21T17:46:56.904953Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2024-11-21T17:46:56.905038Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2024-11-21T17:46:56.905048Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2024-11-21T17:46:56.905077Z node 8 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888.Leader.0 2024-11-21T17:46:56.905086Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T17:46:56.905122Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2024-11-21T17:46:56.905432Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2024-11-21T17:46:56.907119Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TE ... y 1.000002048 2024-11-21T17:46:56.957374Z node 8 :HIVE DEBUG: HIVE#72057594037968897 [FBN] Finding best node for tablet PersQueue.72075186224037888.Leader.0 2024-11-21T17:46:56.957381Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 family {PersQueue.72075186224037888.Leader.0 Booting} 2024-11-21T17:46:56.957390Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 node 8 is not alive 2024-11-21T17:46:56.957409Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected usage 0.000005171 of node 10 2024-11-21T17:46:56.957413Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected nodes count 1 2024-11-21T17:46:56.957418Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected max priority nodes count 1 2024-11-21T17:46:56.957423Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected node 10 2024-11-21T17:46:56.957429Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 10) 2024-11-21T17:46:56.957438Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2024-11-21T17:46:56.957450Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2024-11-21T17:46:56.957457Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2024-11-21T17:46:56.957468Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T17:46:56.957487Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2024-11-21T17:46:56.957535Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(PersQueue.72075186224037888.Leader.1) to node 10 storage {Version# 1 TabletID# 72075186224037888 TabletType# PersQueue Channels# {0:{Channel# 0 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.066024Z}}, 1:{Channel# 1 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.066024Z}}, 2:{Channel# 2 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.066024Z}}} Tenant: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:46:56.968892Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2024-11-21T17:46:56.968942Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T17:46:56.968960Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [10:1456:2334]} 2024-11-21T17:46:56.969119Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 10 Cookie 72075186224037888 2024-11-21T17:46:56.988638Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2024-11-21T17:46:56.988677Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Execute for tablet PersQueue.72075186224037888.Leader.1 status 0 generation 1 follower 0 from local [10:1456:2334] 2024-11-21T17:46:56.988687Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Starting -> Running (Node 10) 2024-11-21T17:46:56.988698Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2024-11-21T17:46:56.988724Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2024-11-21T17:46:56.988730Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2024-11-21T17:46:56.988735Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2024-11-21T17:46:56.988738Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2024-11-21T17:46:56.988742Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2024-11-21T17:46:56.988766Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T17:46:56.988769Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2024-11-21T17:46:56.988803Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - executing 2024-11-21T17:46:56.988808Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T17:46:56.988813Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T17:46:56.988817Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T17:46:56.999857Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040207 [8:1222:2662]} 2024-11-21T17:46:56.999891Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T17:46:57.274411Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 10: Status: 2 2024-11-21T17:46:57.274446Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Execute 2024-11-21T17:46:57.274452Z node 8 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T17:46:57.274474Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2024-11-21T17:46:57.274516Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRestartTablet(PersQueue.72075186224037888.Leader.1)::Execute 2024-11-21T17:46:57.274544Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Running -> Stopped (Node 10) 2024-11-21T17:46:57.274553Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2024-11-21T17:46:57.274572Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2024-11-21T17:46:57.274579Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2024-11-21T17:46:57.274590Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(PersQueue.72075186224037888.Leader.1 gen 1) to node 10 2024-11-21T17:46:57.274596Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Stopped -> Booting 2024-11-21T17:46:57.274603Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2024-11-21T17:46:57.274606Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2024-11-21T17:46:57.274746Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxKillNode(10)::Execute 2024-11-21T17:46:57.274765Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:46:57.274770Z node 8 :HIVE TRACE: Node(10) DeregisterInDomains (72057594046644480:1) : 1 -> 0 2024-11-21T17:46:57.274775Z node 8 :HIVE DEBUG: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(3, 10) 2024-11-21T17:46:57.274780Z node 8 :HIVE TRACE: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [8:1521:2673] 2024-11-21T17:46:57.274784Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TryToDeleteNode(10): waiting 3600.000000s 2024-11-21T17:46:57.274980Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([10:1457:2334]) [8:1521:2673] 2024-11-21T17:46:57.275507Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:1890:2694]) [8:1891:2699] 2024-11-21T17:46:57.276198Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([8:1890:2694]) [8:1891:2699] 2024-11-21T17:46:57.276759Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([11:1863:2333]) [8:1926:2701] 2024-11-21T17:46:57.278495Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [11:1862:2333] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2024-11-21T17:46:57.278521Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(11)::Execute 2024-11-21T17:46:57.278552Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.278561Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T17:46:57.278565Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2024-11-21T17:46:57.278569Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T17:46:57.278572Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2024-11-21T17:46:57.278582Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.278731Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 11 Location DataCenter: "4" Module: "4" Rack: "4" Unit: "4" self_check_result: EMERGENCY issue_log { id: "RED-9a33-f489" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-9a33-6fa7" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "RED-9a33-6fa7" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-9a33-e5e3-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-10" reason: "YELLOW-9a33-e9e2-11" reason: "YELLOW-9a33-e9e2-8" reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-11" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 11 host: "::1" port: 12004 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-e5e3-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 8 host: "::1" port: 12001 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2024-11-21T17:46:57.366848Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790530600641045:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:57.366918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00139a/r3tmp/tmp4lqncd/pdisk_1.dat 2024-11-21T17:46:57.432998Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19311 TServer::EnableGrpc on GrpcPort 15952, node 1 2024-11-21T17:46:57.459908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.459921Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.459922Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.459966Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:57.472481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.472503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:19311 2024-11-21T17:46:57.476593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:57.497721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:57.499733Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:46:57.519225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00139a/r3tmp/tmpBPaWDr/pdisk_1.dat 2024-11-21T17:46:57.848797Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790528993985360:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:57.851538Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:57.864118Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61984 TServer::EnableGrpc on GrpcPort 61212, node 2 2024-11-21T17:46:57.896486Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.896502Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.896507Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.896556Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:57.948654Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.948702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.952925Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:57.953388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:57.957045Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:57.968722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211218003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 ... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211218003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 2 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 ... (TRUNCATED) 2024-11-21T17:46:58.123415Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] |76.8%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2024-11-21T17:46:56.689082Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790526214405970:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:56.689140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00140b/r3tmp/tmpsrIo1C/pdisk_1.dat 2024-11-21T17:46:56.728809Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24551 TServer::EnableGrpc on GrpcPort 4295, node 1 2024-11-21T17:46:56.758360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:56.758378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:56.758380Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:56.758432Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24551 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:56.788955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:56.788990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:56.790098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:56.812392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:56.972916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00140b/r3tmp/tmpqZFqqv/pdisk_1.dat 2024-11-21T17:46:57.404319Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:57.406230Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8921 TServer::EnableGrpc on GrpcPort 26863, node 2 2024-11-21T17:46:57.424938Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.424949Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.424951Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.424980Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:46:57.495179Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.495200Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.495511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:57.496456Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:57.497628Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:57.750357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:57.760814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00140b/r3tmp/tmpY38cRr/pdisk_1.dat 2024-11-21T17:46:58.207610Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790532027329645:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:58.207937Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:58.222470Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2218 TServer::EnableGrpc on GrpcPort 62044, node 3 2024-11-21T17:46:58.259838Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:58.259853Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:58.259856Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:58.259901Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:58.308704Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:58.308739Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:58.309123Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:58.309662Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:58.370845Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> Compression::WriteRAW ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: 2024-11-21T17:46:57.513308Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790528696814688:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:57.513343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001377/r3tmp/tmpPpCprE/pdisk_1.dat 2024-11-21T17:46:57.590195Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14927 TServer::EnableGrpc on GrpcPort 14323, node 1 2024-11-21T17:46:57.610482Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.610497Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.610498Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.610539Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:57.612569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.612606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.613736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:57.644747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001377/r3tmp/tmpmsoT0Q/pdisk_1.dat 2024-11-21T17:46:57.984353Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:57.993541Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4610 TServer::EnableGrpc on GrpcPort 26112, node 2 2024-11-21T17:46:58.072505Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:58.072519Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:58.072521Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:58.072560Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:58.088698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:58.088727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:58.092725Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:58.175869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:58.181073Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:58.365596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013d5/r3tmp/tmpYBFvm8/pdisk_1.dat 2024-11-21T17:46:57.304379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:57.330799Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:57.347157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.347189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.349225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31268 TServer::EnableGrpc on GrpcPort 4975, node 1 2024-11-21T17:46:57.382120Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.382132Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.382134Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.382161Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:57.418179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:57.423711Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:57.610381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013d5/r3tmp/tmpZgdv9G/pdisk_1.dat 2024-11-21T17:46:58.048378Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:58.054218Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12032 TServer::EnableGrpc on GrpcPort 2742, node 2 2024-11-21T17:46:58.100794Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:58.100806Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:58.100808Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:58.100844Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:58.112576Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:58.112606Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:58.116717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:58.197661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:58.199653Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:58.452012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2024-11-21T17:46:57.805553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:46:57.805575Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:57.805590Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:57.807975Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:57.808093Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:46:57.808135Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:57.808750Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:57.814509Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:57.814649Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:57.814789Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:46:57.814798Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:46:57.814804Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:46:57.814843Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:57.817830Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:46:57.817889Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:57.817925Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:46:57.817930Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:46:57.817935Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:46:57.817940Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:57.818016Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:57.818031Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:57.818064Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:46:57.818081Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:46:57.818129Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:57.818135Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:57.818142Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:46:57.818147Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:46:57.818151Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:46:57.818155Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:46:57.818161Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:46:57.825705Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:57.825728Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:57.825745Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:46:57.826118Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:46:57.826132Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:46:57.826152Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:46:57.826191Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:46:57.826201Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:46:57.826210Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:46:57.826219Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:57.826223Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:46:57.826228Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:46:57.826232Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:57.826297Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:46:57.826302Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:46:57.826305Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:46:57.826308Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:57.826318Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:46:57.826321Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:46:57.826324Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:46:57.826327Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:57.826331Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:46:57.847420Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:46:57.847447Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:57.847454Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:57.847466Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:46:57.847481Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:57.847616Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:57.847638Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:57.847645Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:46:57.847667Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:46:57.847671Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:46:57.847716Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:57.847727Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:57.847731Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:46:57.847735Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:46:57.848347Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:46:57.848368Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:57.848427Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:57.848433Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:57.848441Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:57.848448Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:57.848452Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:46:57.848460Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:46:57.848465Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:46:57.848470Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:57.848473Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:46:57.848477Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:46:57.848480Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:46:57.848524Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:46:57.848528Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:57.848531Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:46:57.848534Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:46:57.848537Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:46:57.848550Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:57.848554Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:46:57.848557Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:46:57.848559Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:46:57.848573Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:46:57.848576Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:46:57.848579Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:46:57.848584Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:57.848587Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:46:57.848590Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... ARD TRACE: StateInit, received event# 268828672, Sender [3:224:2220], Recipient [3:226:2221]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:58.640819Z node 3 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [3:224:2220], Recipient [3:226:2221]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:58.640906Z node 3 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [3:224:2220], Recipient [3:226:2221]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:58.641860Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [3:226:2221] 2024-11-21T17:46:58.641911Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:58.642228Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2024-11-21T17:46:58.664468Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:58.664514Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:58.664762Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:46:58.664779Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:46:58.664786Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:46:58.664840Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:58.664854Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2024-11-21T17:46:58.664873Z node 3 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2024-11-21T17:46:58.664897Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2024-11-21T17:46:58.664928Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [3:270:2258] 2024-11-21T17:46:58.664933Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:46:58.664938Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2024-11-21T17:46:58.664942Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:58.664990Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2024-11-21T17:46:58.665001Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2024-11-21T17:46:58.665043Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:226:2221], Recipient [3:226:2221]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.665049Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.665100Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:46:58.665111Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:46:58.665120Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [3:24:2071], Recipient [3:226:2221]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2024-11-21T17:46:58.665124Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T17:46:58.665128Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2024-11-21T17:46:58.665132Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:58.665172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 226 RawX2: 12884904109 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2024-11-21T17:46:58.665179Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:58.665184Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:58.665191Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:46:58.665195Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:46:58.665198Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:46:58.665202Z node 3 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:46:58.665207Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:46:58.665223Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [3:24:2071], Recipient [3:226:2221]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2024-11-21T17:46:58.665227Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T17:46:58.665231Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2024-11-21T17:46:58.665236Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 9437184: waitStep# 0 readStep# 0 observedStep# 1000001 2024-11-21T17:46:58.665244Z node 3 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 9437184 promoting UnprotectedReadEdge to v0/18446744073709551615 2024-11-21T17:46:58.665254Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [3:268:2256], Recipient [3:226:2221]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T17:46:58.665257Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2024-11-21T17:46:58.665267Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [3:120:2146], Recipient [3:226:2221]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2024-11-21T17:46:58.665271Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T17:46:58.665276Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2024-11-21T17:46:58.665280Z node 3 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2024-11-21T17:46:58.676206Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:268:2256], Recipient [3:226:2221]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:46:58.676246Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:46:58.739374Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2024-11-21T17:46:58.739396Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2024-11-21T17:46:58.739478Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:278:2264], Recipient [3:226:2221]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.739483Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.739490Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:276:2263], serverId# [3:278:2264], sessionId# [0:0:0] 2024-11-21T17:46:58.739525Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2024-11-21T17:46:58.739529Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:46:58.739551Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:46:58.739713Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2024-11-21T17:46:58.739739Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T17:46:58.739744Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2024-11-21T17:46:58.739748Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:46:58.739752Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:46:58.739761Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T17:46:58.739776Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2024-11-21T17:46:58.739781Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T17:46:58.739784Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:46:58.739788Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:46:58.739791Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T17:46:58.739887Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2024-11-21T17:46:58.739896Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:46:58.739907Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T17:46:58.739909Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:46:58.739913Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2024-11-21T17:46:58.739919Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2024-11-21T17:46:58.739925Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T17:46:58.739943Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayComplete 2024-11-21T17:46:58.739946Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2024-11-21T17:46:58.739949Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2024-11-21T17:46:58.739952Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2024-11-21T17:46:58.739963Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T17:46:58.739965Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2024-11-21T17:46:58.739969Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2024-11-21T17:46:58.739979Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:46:58.739983Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2024-11-21T17:46:58.739990Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey >> ReadOnlyVDisk::TestSync |76.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> ReadOnlyVDisk::TestGetWithMustRestoreFirst |76.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> ReadOnlyVDisk::TestStorageLoad |76.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] >> YdbProxy::ReadNonExistentTopic [GOOD] >> BasicUsage::ReadMirrored [GOOD] >> ReadOnlyVDisk::TestReads |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> ReadOnlyVDisk::TestDiscover >> TKqpScanData::EmptyColumns [GOOD] >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch >> TTxDataShardMiniKQL::WriteLargeExternalBlob >> TKqpScanData::ArrowToUnboxedValueConverter >> KqpOlapIndexes::IndexesInLocalMetadata [GOOD] >> ReadOnlyVDisk::TestDiscover [GOOD] >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] |76.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2024-11-21T17:46:57.160323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790528543042522:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:57.160393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013b9/r3tmp/tmp4lzoO2/pdisk_1.dat 2024-11-21T17:46:57.276261Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27887 TServer::EnableGrpc on GrpcPort 4244, node 1 2024-11-21T17:46:57.308355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:57.308382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:57.309481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:57.348937Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:57.348953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:57.348955Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:57.348998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:57.395989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:57.399393Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:58.162912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:2, at schemeshard: 72057594046644480 2024-11-21T17:46:58.228630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790532838010525:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:58.228629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790532838010512:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:58.228649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:58.229306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:2, at schemeshard: 72057594046644480 2024-11-21T17:46:58.230840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790532838010527:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:46:58.368092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:58.415519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:46:58.475886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2024-11-21T17:46:58.531047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:46:58.593075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T17:46:59.234373Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790536952948466:2072];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013b9/r3tmp/tmpWbgFK9/pdisk_1.dat 2024-11-21T17:46:59.237180Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:59.248768Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24048 TServer::EnableGrpc on GrpcPort 19302, node 2 2024-11-21T17:46:59.289523Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:59.289547Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:59.289549Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:59.289594Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:59.339335Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:59.339374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:59.339677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:59.342383Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 9315705319475378339 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2024-11-21T17:46:59.658024Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2024-11-21T17:46:59.659941Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2024-11-21T17:46:59.662174Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T17:46:59.662551Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2024-11-21T17:46:59.663604Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2024-11-21T17:46:59.663896Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2024-11-21T17:46:59.664285Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2024-11-21T17:46:59.664581Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2024-11-21T17:46:59.911371Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] 2024-11-21T17:46:59.911402Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:708] 2024-11-21T17:46:59.911429Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:701] 2024-11-21T17:46:59.911567Z 1 00h05m30.211024s :BS_PROXY_PUT ERROR: [bb7fce2bc8299cd4] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2024-11-21T17:46:59.911911Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] 2024-11-21T17:46:59.912172Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:701] 2024-11-21T17:46:59.912481Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:708] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2024-11-21T17:46:59.912783Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] 2024-11-21T17:46:59.912899Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:701] 2024-11-21T17:46:59.913044Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:708] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:14:0:0:131072:0] 2024-11-21T17:46:59.913220Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:708] 2024-11-21T17:46:59.913466Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] 2024-11-21T17:46:59.913581Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:701] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:15:0:0:32768:0] 2024-11-21T17:46:59.913736Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:708] 2024-11-21T17:46:59.913746Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:701] 2024-11-21T17:46:59.913881Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:16:0:0:131072:0] 2024-11-21T17:46:59.914108Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:708] 2024-11-21T17:46:59.914118Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:701] 2024-11-21T17:46:59.914366Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2024-11-21T17:46:59.914630Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] 2024-11-21T17:46:59.914659Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:708] 2024-11-21T17:46:59.914668Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:701] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2024-11-21T17:46:59.914998Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] 2024-11-21T17:46:59.915036Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:701] 2024-11-21T17:46:59.915051Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:708] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2024-11-21T17:46:59.915458Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] 2024-11-21T17:46:59.915487Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:708] 2024-11-21T17:46:59.915500Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:701] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2024-11-21T17:46:59.915854Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] 2024-11-21T17:46:59.915870Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:708] 2024-11-21T17:46:59.915887Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:701] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2024-11-21T17:46:59.917005Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:695] 2024-11-21T17:46:59.917036Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:702] 2024-11-21T17:46:59.917044Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:709] 2024-11-21T17:46:59.917124Z 1 00h05m30.211024s :BS_PROXY_GET ERROR: [8f954deddc796914] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED"} Marker# BPG29 2024-11-21T17:46:59.917146Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:702] 2024-11-21T17:46:59.917153Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:709] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED"} |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2024-11-21T17:46:58.030439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:46:58.030460Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:58.030476Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:58.035535Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:58.035713Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:46:58.035781Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:58.036773Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:58.044401Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:58.044554Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:58.044699Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:46:58.044708Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:46:58.044715Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:46:58.044758Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:58.048010Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:46:58.048072Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:58.048107Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:46:58.048113Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:46:58.048118Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:46:58.048123Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:58.048204Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.048217Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.048304Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:46:58.048324Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:46:58.048384Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:58.048391Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:58.048397Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:46:58.048402Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:46:58.048406Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:46:58.048411Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:46:58.048416Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:46:58.065271Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.065295Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.065313Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:46:58.065791Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:46:58.065801Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:46:58.065824Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:46:58.065850Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:46:58.065858Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:46:58.065865Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:46:58.065872Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:58.065876Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:46:58.065880Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:46:58.065883Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:58.065973Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:46:58.065978Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:46:58.065982Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:46:58.065986Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:58.065997Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:46:58.065999Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:46:58.066001Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:46:58.066003Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:58.066006Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:46:58.087249Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:46:58.087281Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:58.087289Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:58.087302Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:46:58.087318Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:58.087447Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.087454Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.087461Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:46:58.087480Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2024-11-21T17:46:58.087484Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:46:58.087533Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:58.087543Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:46:58.087547Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:46:58.087551Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:46:58.088176Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:46:58.088198Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:58.088304Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.088313Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.088322Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:58.088329Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:58.088335Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:46:58.088345Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2024-11-21T17:46:58.088351Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2024-11-21T17:46:58.088358Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:46:58.088362Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:46:58.088367Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:46:58.088371Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:46:58.088402Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Restart 2024-11-21T17:46:58.088407Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:58.088410Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:46:58.088414Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:46:58.088417Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:46:58.088483Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:58.088487Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:46:58.088508Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2024-11-21T17:46:58.088512Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:46:58.088515Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:46:58.088519Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:46:58.088522Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:46:58.088534Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:58.088538Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:46:58.088541Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:46:58.088544Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:46:58.088557Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is ... 5] at 9437185 to execution unit CompleteOperation 2024-11-21T17:46:59.396924Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompleteOperation 2024-11-21T17:46:59.396997Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is DelayComplete 2024-11-21T17:46:59.397001Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompleteOperation 2024-11-21T17:46:59.397005Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompletedOperations 2024-11-21T17:46:59.397009Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompletedOperations 2024-11-21T17:46:59.397016Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is Executed 2024-11-21T17:46:59.397019Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompletedOperations 2024-11-21T17:46:59.397023Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437185 has finished 2024-11-21T17:46:59.397030Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:59.397034Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T17:46:59.397038Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T17:46:59.397042Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T17:46:59.397072Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{14, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2024-11-21T17:46:59.397083Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2024-11-21T17:46:59.397132Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 10776726b}, Memory{0 dyn 10776726} 2024-11-21T17:46:59.397161Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:59.397166Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2024-11-21T17:46:59.397410Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437184 restored its data 2024-11-21T17:46:59.397664Z node 3 :TX_DATASHARD TRACE: Operation [6:5] at 9437184 exceeded memory limit 10776726 and requests 86213808 more for the next try 2024-11-21T17:46:59.397681Z node 3 :TX_DATASHARD DEBUG: tx 5 released its data 2024-11-21T17:46:59.397687Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is Restart 2024-11-21T17:46:59.397692Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:46:59.397696Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:46:59.397700Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 out-of-order limits exceeded 2024-11-21T17:46:59.397704Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:46:59.397714Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 4 -> retry Change{14, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T17:46:59.397724Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} touch new 0b, 0b lo load (0b in total), 86213808b requested for data (96990534b in total) 2024-11-21T17:46:59.397730Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release tx data 2024-11-21T17:46:59.397734Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} released on update Res{3 10776726b}, Memory{0 dyn 0} 2024-11-21T17:46:59.397740Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} update Res{3 96990534b} type transaction 2024-11-21T17:46:59.397760Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:352:2303]) (release resources {0, 96990534}) 2024-11-21T17:46:59.397775Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 18.818640 to 1.881864 (remove task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:352:2303])) 2024-11-21T17:46:59.397797Z node 3 :RESOURCE_BROKER DEBUG: Update task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:249:2222]) (priority=5 type=transaction resources={0, 96990534} resubmit=1) 2024-11-21T17:46:59.397802Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:249:2222]) to queue queue_transaction 2024-11-21T17:46:59.397810Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:249:2222]) from queue queue_transaction 2024-11-21T17:46:59.397814Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:249:2222]) to queue queue_transaction 2024-11-21T17:46:59.397820Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 0.000000 to 15.243099 (insert task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:249:2222])) 2024-11-21T17:46:59.397830Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2024-11-21T17:46:59.397838Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:59.397842Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2024-11-21T17:46:59.397935Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437184 restored its data 2024-11-21T17:46:59.470997Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2024-11-21T17:46:59.471041Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:46:59.471063Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:59.471071Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:46:59.471077Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompleteOperation 2024-11-21T17:46:59.471083Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2024-11-21T17:46:59.471164Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is DelayComplete 2024-11-21T17:46:59.471169Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2024-11-21T17:46:59.471177Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompletedOperations 2024-11-21T17:46:59.471220Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2024-11-21T17:46:59.471227Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is Executed 2024-11-21T17:46:59.471231Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2024-11-21T17:46:59.471235Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437184 has finished 2024-11-21T17:46:59.471242Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:59.471247Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:46:59.471253Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:46:59.471257Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:46:59.471293Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{14, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2024-11-21T17:46:59.471306Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2024-11-21T17:46:59.471374Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:249:2222]) (release resources {0, 96990534}) 2024-11-21T17:46:59.471390Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 15.243099 to 0.000000 (remove task Tx{18, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:249:2222])) 2024-11-21T17:46:59.482504Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} commited cookie 1 for step 8 2024-11-21T17:46:59.482534Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T17:46:59.482542Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2024-11-21T17:46:59.482562Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:46:59.482577Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2024-11-21T17:46:59.482586Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2024-11-21T17:46:59.482669Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} commited cookie 1 for step 8 2024-11-21T17:46:59.482675Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:46:59.482679Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2024-11-21T17:46:59.482687Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:46:59.482693Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2024-11-21T17:46:59.482697Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:59.482736Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:330:2303], Recipient [3:435:2385]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2024-11-21T17:46:59.482743Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:46:59.482753Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2024-11-21T17:46:59.482769Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:227:2222], Recipient [3:435:2385]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2024-11-21T17:46:59.482772Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:46:59.482775Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2024-11-21T17:46:40.468300Z :PropagateSessionClosed INFO: Random seed for debugging is 1732211200468294 2024-11-21T17:46:40.563387Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790455962315965:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:40.563408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:40.566076Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790457372295399:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:40.566240Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:40.590360Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e13/r3tmp/tmpzOOKip/pdisk_1.dat 2024-11-21T17:46:40.598400Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:40.617858Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24355, node 1 2024-11-21T17:46:40.663687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:40.663714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:40.668552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e13/r3tmp/yandexZVEl9c.tmp 2024-11-21T17:46:40.668570Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e13/r3tmp/yandexZVEl9c.tmp 2024-11-21T17:46:40.668621Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e13/r3tmp/yandexZVEl9c.tmp 2024-11-21T17:46:40.668671Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:40.669429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:40.678666Z INFO: TTestServer started on Port 27134 GrpcPort 24355 TClient is connected to server localhost:27134 PQClient connected to localhost:24355 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:46:40.692017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:40.692041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:46:40.693680Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:40.694298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:40.702453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:40.708752Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:46:40.939976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790457372295673:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.940013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790457372295698:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.940036Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.940466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790455962316865:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.940488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790455962316861:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.940503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.941121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:40.942532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790455962316905:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.942585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.945312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790455962316875:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T17:46:40.945361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790457372295702:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T17:46:40.977736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.005470Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790457372295782:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:41.005932Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmQwMDYxNGUtMWRiZDhmYWMtYTk5MzZjNzMtZjJmNDBhYmE=, ActorId: [2:7439790457372295671:2277], ActorState: ExecuteState, TraceId: 01jd7xamx96anvpehbf6ejxnfk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:41.006460Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:41.045659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.049919Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790460257284393:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:41.050266Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjhhMjdjYTEtMmUwODJmNDMtZGE0NTk2OWUtMzBhMWRjYTA=, ActorId: [1:7439790455962316858:2299], ActorState: ExecuteState, TraceId: 01jd7xamx98y1yrtegq68gv3sy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:41.050417Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:41.082017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:24355", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T17:46:41.129093Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd7xan2k2wxs2c27md0mvn7k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ1OTNmMGQtMmVkZTlhZDAtNmVhMzdlYjMtN2U4YmI5MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790460257284648:2938] 2024-11-21T17:46:45.563700Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790455962315965:2052];se ... direct read id: 0 2024-11-21T17:46:59.251541Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 2} (3-3) 2024-11-21T17:46:59.251544Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 3} (4-4) 2024-11-21T17:46:59.250944Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--test-topic-mirrored-from-dc3' partition 0 user user offset 1 count 4 size 1920 endOffset 5 max time lag 0ms effective offset 1 2024-11-21T17:46:59.250948Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 1 2024-11-21T17:46:59.250984Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2024-11-21T17:46:59.250986Z node 4 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:46:59.251008Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic-mirrored-from-dc3' partition: 0 messageNo: 0 requestId: cookie: 1 >>> event from dataHandler: DataReceived { Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 Message { Data: ..130 bytes.. Information: { Offset: 1 ProducerId: "src_id" SeqNo: 2 CreateTime: 2024-11-21T17:46:59.245000Z WriteTime: 2024-11-21T17:46:59.248000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:58004", "server": "ipv6:[::1]:58004" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..240 bytes.. Information: { Offset: 2 ProducerId: "src_id" SeqNo: 3 CreateTime: 2024-11-21T17:46:59.245000Z WriteTime: 2024-11-21T17:46:59.248000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:58004", "server": "ipv6:[::1]:58004" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..350 bytes.. Information: { Offset: 3 ProducerId: "src_id" SeqNo: 4 CreateTime: 2024-11-21T17:46:59.245000Z WriteTime: 2024-11-21T17:46:59.248000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:58004", "server": "ipv6:[::1]:58004" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..460 bytes.. Information: { Offset: 4 ProducerId: "src_id" SeqNo: 5 CreateTime: 2024-11-21T17:46:59.245000Z WriteTime: 2024-11-21T17:46:59.248000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:58004", "server": "ipv6:[::1]:58004" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } } >>> get 4 messages in this event 2024-11-21T17:46:59.251606Z :DEBUG: [/Root] [/Root] [d2e893d-101975d-6f79f123-c5dbd352] [] The application data is transferred to the client. Number of messages 4, size 1180 bytes 2024-11-21T17:46:59.251608Z :DEBUG: [/Root] [/Root] [d2e893d-101975d-6f79f123-c5dbd352] [] Returning serverBytesSize = 0 to budget 2024-11-21T17:46:59.251647Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_11959014782699110263_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1445 } } 2024-11-21T17:46:59.251667Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_11959014782699110263_v1 got read request: guid# 5a338ade-6d9c669b-edf2f479-efddba39 2024-11-21T17:46:59.345548Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|5eb7cb01-b292976b-a3c15574-c896a2_0] Write session will now close 2024-11-21T17:46:59.345578Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|5eb7cb01-b292976b-a3c15574-c896a2_0] Write session: aborting 2024-11-21T17:46:59.345820Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|5eb7cb01-b292976b-a3c15574-c896a2_0] Write session: gracefully shut down, all writes complete >>> Writes to test-topic-mirrored-from-dc3 successful 2024-11-21T17:46:59.345836Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|5eb7cb01-b292976b-a3c15574-c896a2_0] Write session: destroy 2024-11-21T17:46:59.345902Z :INFO: [/Root] [/Root] [d2e893d-101975d-6f79f123-c5dbd352] Closing read session. Close timeout: 18446744073709.551615s 2024-11-21T17:46:59.345942Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:4:0 -:test-topic-mirrored-from-dc2:0:2:4:0 -:test-topic-mirrored-from-dc3:0:3:4:0 2024-11-21T17:46:59.345952Z :INFO: [/Root] [/Root] [d2e893d-101975d-6f79f123-c5dbd352] Counters: { Errors: 0 CurrentSessionLifetimeMs: 351 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:59.346158Z :INFO: [/Root] [/Root] [d2e893d-101975d-6f79f123-c5dbd352] Closing read session. Close timeout: 0.000000s 2024-11-21T17:46:59.346167Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:4:0 -:test-topic-mirrored-from-dc2:0:2:4:0 -:test-topic-mirrored-from-dc3:0:3:4:0 2024-11-21T17:46:59.346171Z :INFO: [/Root] [/Root] [d2e893d-101975d-6f79f123-c5dbd352] Counters: { Errors: 0 CurrentSessionLifetimeMs: 351 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:59.346177Z :INFO: [/Root] [/Root] [d2e893d-101975d-6f79f123-c5dbd352] Closing read session. Close timeout: 0.000000s 2024-11-21T17:46:59.346181Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:4:0 -:test-topic-mirrored-from-dc2:0:2:4:0 -:test-topic-mirrored-from-dc3:0:3:4:0 2024-11-21T17:46:59.346183Z :INFO: [/Root] [/Root] [d2e893d-101975d-6f79f123-c5dbd352] Counters: { Errors: 0 CurrentSessionLifetimeMs: 351 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:59.346200Z :NOTICE: [/Root] [/Root] [d2e893d-101975d-6f79f123-c5dbd352] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:46:59.346253Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|5eb7cb01-b292976b-a3c15574-c896a2_0 grpc read done: success: 0 data: 2024-11-21T17:46:59.346269Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|5eb7cb01-b292976b-a3c15574-c896a2_0 grpc read failed 2024-11-21T17:46:59.346279Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|5eb7cb01-b292976b-a3c15574-c896a2_0 grpc closed 2024-11-21T17:46:59.346287Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|5eb7cb01-b292976b-a3c15574-c896a2_0 is DEAD 2024-11-21T17:46:59.346513Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_11959014782699110263_v1 grpc read done: success# 0, data# { } 2024-11-21T17:46:59.346532Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_11959014782699110263_v1 grpc read failed 2024-11-21T17:46:59.346539Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_11959014782699110263_v1 grpc closed 2024-11-21T17:46:59.346547Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:46:59.346558Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_11959014782699110263_v1 is DEAD 2024-11-21T17:46:59.346907Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7439790535530280273:2550] disconnected; active server actors: 1 2024-11-21T17:46:59.346921Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7439790535530280273:2550] client user disconnected session shared/user_3_1_11959014782699110263_v1 2024-11-21T17:46:59.346948Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7439790535530280271:2550] disconnected; active server actors: 1 2024-11-21T17:46:59.346951Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7439790535530280271:2550] client user disconnected session shared/user_3_1_11959014782699110263_v1 2024-11-21T17:46:59.346962Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790535530280272:2550] disconnected; active server actors: 1 2024-11-21T17:46:59.346964Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790535530280272:2550] client user disconnected session shared/user_3_1_11959014782699110263_v1 2024-11-21T17:46:59.347088Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:59.347109Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7439790535530280398:2565] destroyed 2024-11-21T17:46:59.347115Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:59.347119Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_11959014782699110263_v1 2024-11-21T17:46:59.347126Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7439790535530280284:2557] destroyed 2024-11-21T17:46:59.347130Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:59.347133Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_11959014782699110263_v1 2024-11-21T17:46:59.347135Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [3:7439790535530280283:2556] destroyed 2024-11-21T17:46:59.347139Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:59.347141Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_11959014782699110263_v1 2024-11-21T17:46:59.347144Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439790535530280285:2554] destroyed 2024-11-21T17:46:59.347175Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:46:59.347216Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_11959014782699110263_v1 2024-11-21T17:46:59.347225Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_11959014782699110263_v1 2024-11-21T17:46:59.347228Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_11959014782699110263_v1 2024-11-21T17:46:59.533299Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439790535530280423:2570], TxId: 281474976715703, task: 1, CA Id [3:7439790535530280421:2570]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2024-11-21T17:46:59.565552Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439790535530280423:2570], TxId: 281474976715703, task: 1, CA Id [3:7439790535530280421:2570]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T17:46:59.611613Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439790535530280423:2570], TxId: 281474976715703, task: 1, CA Id [3:7439790535530280421:2570]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapIndexes::IndexesInLocalMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 5778, MsgBus: 12387 2024-11-21T17:46:28.747972Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790406220692983:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:28.748082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dc3/r3tmp/tmpcDEvsi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5778, node 1 2024-11-21T17:46:28.799516Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:28.803769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:28.803782Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:28.803783Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:28.803813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12387 TClient is connected to server localhost:12387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:28.849077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:28.849112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:28.850201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:28.877154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:28.886815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:28.896527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790406220693642:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:28.896586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790406220693642:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:28.896611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790406220693642:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:28.896631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790406220693642:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:28.896642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790406220693642:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:28.896656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790406220693642:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:28.896668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790406220693642:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:28.896685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790406220693642:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:28.896704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790406220693642:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:28.896721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790406220693642:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:28.896736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790406220693642:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:28.896752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790406220693642:2288];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:28.898752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790406220693643:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:28.898771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790406220693643:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:28.898787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790406220693643:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:28.898801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790406220693643:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:28.898816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790406220693643:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:28.898830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790406220693643:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:28.898842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790406220693643:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:28.898856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790406220693643:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:28.898866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790406220693643:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:28.898879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790406220693643:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:28.898893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790406220693643:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:28.898907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790406220693643:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:28.901214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790406220693646:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:28.901228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790406220693646:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:28.901245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790406220693646:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:28.901259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790406220693646:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:28.901275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790406220693646:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:28.901285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790406220693646:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:28.901297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790406220693646:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:28.901311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790406220693646:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:28.901324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790406220693646 ... 7:46:48.935945Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211208672, txId: 18446744073709551615] shutting down 2024-11-21T17:46:49.164452Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211209000, txId: 18446744073709551615] shutting down 2024-11-21T17:46:49.364056Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211209078, txId: 18446744073709551615] shutting down 2024-11-21T17:46:49.620642Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211209295, txId: 18446744073709551615] shutting down 2024-11-21T17:46:49.786067Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211209505, txId: 18446744073709551615] shutting down 2024-11-21T17:46:49.957325Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211209729, txId: 18446744073709551615] shutting down 2024-11-21T17:46:50.118991Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211210002, txId: 18446744073709551615] shutting down 2024-11-21T17:46:50.286059Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211210065, txId: 18446744073709551615] shutting down 2024-11-21T17:46:50.480325Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211210226, txId: 18446744073709551615] shutting down 2024-11-21T17:46:50.670885Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211210415, txId: 18446744073709551615] shutting down 2024-11-21T17:46:50.871779Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211210597, txId: 18446744073709551615] shutting down 2024-11-21T17:46:51.047849Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211210793, txId: 18446744073709551615] shutting down 2024-11-21T17:46:51.269880Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211211000, txId: 18446744073709551615] shutting down 2024-11-21T17:46:51.536533Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211211178, txId: 18446744073709551615] shutting down 2024-11-21T17:46:51.696538Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211211423, txId: 18446744073709551615] shutting down 2024-11-21T17:46:51.951366Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211211640, txId: 18446744073709551615] shutting down 2024-11-21T17:46:52.151349Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211212000, txId: 18446744073709551615] shutting down 2024-11-21T17:46:52.312918Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211212067, txId: 18446744073709551615] shutting down 2024-11-21T17:46:52.507075Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211212256, txId: 18446744073709551615] shutting down 2024-11-21T17:46:52.696424Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211212417, txId: 18446744073709551615] shutting down 2024-11-21T17:46:52.847476Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211212627, txId: 18446744073709551615] shutting down 2024-11-21T17:46:52.966659Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211212802, txId: 18446744073709551615] shutting down 2024-11-21T17:46:53.103463Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211213000, txId: 18446744073709551615] shutting down 2024-11-21T17:46:53.241217Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211213061, txId: 18446744073709551615] shutting down 2024-11-21T17:46:53.396294Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211213201, txId: 18446744073709551615] shutting down 2024-11-21T17:46:53.566503Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211213348, txId: 18446744073709551615] shutting down 2024-11-21T17:46:53.711729Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211213509, txId: 18446744073709551615] shutting down 2024-11-21T17:46:53.864881Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211213670, txId: 18446744073709551615] shutting down 2024-11-21T17:46:54.042317Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211213817, txId: 18446744073709551615] shutting down 2024-11-21T17:46:54.246251Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211214000, txId: 18446744073709551615] shutting down 2024-11-21T17:46:54.393678Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211214174, txId: 18446744073709551615] shutting down 2024-11-21T17:46:54.547649Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211214335, txId: 18446744073709551615] shutting down 2024-11-21T17:46:54.694920Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211214496, txId: 18446744073709551615] shutting down 2024-11-21T17:46:54.846917Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211214657, txId: 18446744073709551615] shutting down 2024-11-21T17:46:55.001421Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211214783, txId: 18446744073709551615] shutting down 2024-11-21T17:46:55.156406Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211215000, txId: 18446744073709551615] shutting down 2024-11-21T17:46:55.317472Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211215105, txId: 18446744073709551615] shutting down 2024-11-21T17:46:55.485782Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211215266, txId: 18446744073709551615] shutting down 2024-11-21T17:46:55.642351Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211215420, txId: 18446744073709551615] shutting down 2024-11-21T17:46:55.800189Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211215581, txId: 18446744073709551615] shutting down 2024-11-21T17:46:55.958097Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211215749, txId: 18446744073709551615] shutting down 2024-11-21T17:46:56.113305Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211216000, txId: 18446744073709551615] shutting down 2024-11-21T17:46:56.271771Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211216064, txId: 18446744073709551615] shutting down 2024-11-21T17:46:56.402432Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211216218, txId: 18446744073709551615] shutting down 2024-11-21T17:46:56.512158Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211216365, txId: 18446744073709551615] shutting down 2024-11-21T17:46:56.634537Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211216484, txId: 18446744073709551615] shutting down 2024-11-21T17:46:56.763445Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211216603, txId: 18446744073709551615] shutting down 2024-11-21T17:46:56.900184Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211216729, txId: 18446744073709551615] shutting down 2024-11-21T17:46:57.058259Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211217000, txId: 18446744073709551615] shutting down 2024-11-21T17:46:57.315740Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211217002, txId: 18446744073709551615] shutting down 2024-11-21T17:46:57.489543Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211217226, txId: 18446744073709551615] shutting down 2024-11-21T17:46:57.642203Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211217436, txId: 18446744073709551615] shutting down 2024-11-21T17:46:57.789678Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211217590, txId: 18446744073709551615] shutting down 2024-11-21T17:46:57.943872Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211217744, txId: 18446744073709551615] shutting down 2024-11-21T17:46:58.163419Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211218000, txId: 18446744073709551615] shutting down 2024-11-21T17:46:58.344356Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211218073, txId: 18446744073709551615] shutting down 2024-11-21T17:46:58.475660Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211218283, txId: 18446744073709551615] shutting down 2024-11-21T17:46:58.627044Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211218444, txId: 18446744073709551615] shutting down 2024-11-21T17:46:58.813906Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211218577, txId: 18446744073709551615] shutting down 2024-11-21T17:46:58.976781Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211218731, txId: 18446744073709551615] shutting down 2024-11-21T17:46:59.161008Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211219000, txId: 18446744073709551615] shutting down 2024-11-21T17:46:59.320994Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211219088, txId: 18446744073709551615] shutting down 2024-11-21T17:46:59.483251Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211219263, txId: 18446744073709551615] shutting down 2024-11-21T17:46:59.625965Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211219431, txId: 18446744073709551615] shutting down 2024-11-21T17:46:59.760662Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211219592, txId: 18446744073709551615] shutting down 2024-11-21T17:46:59.933734Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211219711, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 12854119082853858398 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2024-11-21T17:46:59.966293Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T17:46:59.994765Z 1 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T17:46:59.995104Z 2 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2024-11-21T17:47:00.021525Z 3 00h02m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5299:708] 2024-11-21T17:47:00.021754Z 1 00h02m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:694] 2024-11-21T17:47:00.021842Z 2 00h02m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5292:701] 2024-11-21T17:47:00.021868Z 1 00h02m30.160512s :BS_PROXY_PUT ERROR: [feb0e36dd96def7c] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2024-11-21T17:46:58.452959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:46:58.452983Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:58.453000Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:58.455842Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:58.456022Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:46:58.456083Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:58.457229Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:58.466506Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:58.466654Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:58.466806Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:46:58.466816Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:46:58.466824Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:46:58.466868Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:58.470658Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:46:58.470726Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:58.470765Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:46:58.470771Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:46:58.470776Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:46:58.470782Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:58.470866Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.470883Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.470918Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:46:58.470938Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:46:58.470985Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:58.470991Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:58.470998Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:46:58.471004Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:46:58.471008Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:46:58.471013Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:46:58.471019Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:46:58.479068Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.479093Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.479110Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:46:58.479542Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:46:58.479558Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:46:58.479580Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:46:58.479620Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:46:58.479631Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:46:58.479640Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:46:58.479649Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:58.479653Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:46:58.479658Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:46:58.479662Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:58.479727Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:46:58.479732Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:46:58.479736Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:46:58.479740Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:58.479750Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:46:58.479754Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:46:58.479757Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:46:58.479760Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:58.479765Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:46:58.501068Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:46:58.501097Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:58.501103Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:58.501114Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:46:58.501130Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:58.501253Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.501288Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.501301Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:46:58.501334Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:46:58.501341Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:46:58.501393Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:58.501401Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:58.501405Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:46:58.501411Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:46:58.502095Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:46:58.502114Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:58.502172Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.502179Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.502186Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:58.502193Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:58.502198Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:46:58.502206Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:46:58.502211Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:46:58.502218Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:58.502222Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:46:58.502226Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:46:58.502231Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:46:58.502273Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:46:58.502277Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:58.502281Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:46:58.502284Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:46:58.502288Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:46:58.502299Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:58.502302Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:46:58.502306Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:46:58.502309Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:46:58.502321Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:46:58.502325Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:46:58.502328Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:46:58.502334Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:58.502337Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:4 ... : TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:46:59.536367Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:6] at 9437184 on unit FinishPropose 2024-11-21T17:46:59.536372Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:59.536886Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2024-11-21T17:46:59.536899Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2024-11-21T17:46:59.536944Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:290:2276], Recipient [3:226:2221]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:59.536948Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:59.536953Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:289:2275], serverId# [3:290:2276], sessionId# [0:0:0] 2024-11-21T17:46:59.536978Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\351\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4e\005\'?8\003\013?>\003?\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\0 2024-11-21T17:46:59.537842Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:46:59.537851Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:46:59.537951Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2024-11-21T17:46:59.537962Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2024-11-21T17:46:59.537966Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2024-11-21T17:46:59.537969Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:46:59.537973Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:46:59.537978Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T17:46:59.537985Z node 3 :TX_DATASHARD TRACE: Activated operation [0:8] at 9437184 2024-11-21T17:46:59.537992Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2024-11-21T17:46:59.537995Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:46:59.537998Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:46:59.538001Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2024-11-21T17:46:59.538062Z node 3 :TX_DATASHARD TRACE: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2024-11-21T17:46:59.538069Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:46:59.538075Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2024-11-21T17:46:59.538078Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:46:59.538081Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit FinishPropose 2024-11-21T17:46:59.538084Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit FinishPropose 2024-11-21T17:46:59.538088Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T17:46:59.538098Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is DelayComplete 2024-11-21T17:46:59.538102Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2024-11-21T17:46:59.538105Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit CompletedOperations 2024-11-21T17:46:59.538108Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2024-11-21T17:46:59.538113Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2024-11-21T17:46:59.538116Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2024-11-21T17:46:59.538119Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:8] at 9437184 has finished 2024-11-21T17:46:59.538126Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:46:59.538130Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:8] at 9437184 on unit FinishPropose 2024-11-21T17:46:59.538134Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |76.8%| [TA] {RESULT} $(B)/ydb/core/erasure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |76.8%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] |76.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2024-11-21T17:46:58.837439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:46:58.837461Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:58.837480Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:58.840527Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:58.840714Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:46:58.840779Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:58.841941Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:58.865912Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:58.866058Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:58.866159Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:46:58.866166Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:46:58.866172Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:46:58.866204Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:58.868464Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:46:58.868534Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:58.868579Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:46:58.868586Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:46:58.868590Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:46:58.868596Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:58.868697Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.868715Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.868749Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:46:58.868768Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:46:58.868823Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:58.868830Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:58.868837Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:46:58.868842Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:46:58.868846Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:46:58.868850Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:46:58.868855Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:46:58.878051Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.878081Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.878101Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:46:58.878533Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:46:58.878545Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:46:58.878571Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:46:58.878605Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:46:58.878615Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:46:58.878624Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:46:58.878635Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:58.878640Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:46:58.878645Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:46:58.878649Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:58.878720Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:46:58.878724Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:46:58.878727Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:46:58.878731Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:58.878741Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:46:58.878744Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:46:58.878747Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:46:58.878750Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:58.878754Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:46:58.900734Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:46:58.900763Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:58.900769Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:58.900782Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:46:58.900799Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:58.900937Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.900946Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.900953Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:46:58.900975Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:46:58.900980Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:46:58.901044Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:58.901053Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:58.901058Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:46:58.901063Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:46:58.901845Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:46:58.901869Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:58.901937Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.901946Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.901954Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:58.901963Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:58.901968Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:46:58.901976Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:46:58.901981Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:46:58.901988Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:58.901993Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:46:58.901998Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:46:58.902002Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:46:58.902053Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:46:58.902058Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:58.902061Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:46:58.902065Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:46:58.902068Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:46:58.902082Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:58.902085Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:46:58.902088Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:46:58.902092Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:46:58.902102Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:46:58.902106Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:46:58.902109Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:46:58.902115Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:58.902118Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:46:58.902122Z node 1 :TX_DATASHARD TRACE: ... ated operation [0:2] at 9437184 2024-11-21T17:47:00.227394Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T17:47:00.227397Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:47:00.227399Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:47:00.227402Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:00.227409Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T17:47:00.227418Z node 3 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 33554432 more memory 2024-11-21T17:47:00.227422Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2024-11-21T17:47:00.227491Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:00.227494Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:00.227496Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T17:47:00.239086Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2024-11-21T17:47:00.239127Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 7340039, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:47:00.239150Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:47:00.239158Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:47:00.239165Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2024-11-21T17:47:00.239170Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2024-11-21T17:47:00.239197Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:47:00.239200Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2024-11-21T17:47:00.239203Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2024-11-21T17:47:00.239205Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2024-11-21T17:47:00.239217Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T17:47:00.239221Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2024-11-21T17:47:00.239225Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2024-11-21T17:47:00.248185Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:47:00.248215Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2024-11-21T17:47:00.248262Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2024-11-21T17:47:00.248298Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:47:00.426018Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2024-11-21T17:47:00.426040Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2024-11-21T17:47:00.426102Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:287:2272], Recipient [3:226:2221]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:00.426106Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:00.426111Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:286:2271], serverId# [3:287:2272], sessionId# [0:0:0] 2024-11-21T17:47:00.473107Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2024-11-21T17:47:00.473145Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:47:00.473184Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:00.482832Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2024-11-21T17:47:00.482872Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2024-11-21T17:47:00.482876Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2024-11-21T17:47:00.482879Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:47:00.482882Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:47:00.482892Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T17:47:00.482907Z node 3 :TX_DATASHARD TRACE: Activated operation [0:3] at 9437184 2024-11-21T17:47:00.482911Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2024-11-21T17:47:00.482913Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:47:00.482915Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:47:00.482918Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:00.482924Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T17:47:00.482931Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 requested 46269638 more memory 2024-11-21T17:47:00.482934Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2024-11-21T17:47:00.482988Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:00.482991Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:00.482995Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T17:47:00.492751Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 exceeded memory limit 50463942 and requests 403711536 more for the next try 2024-11-21T17:47:00.492824Z node 3 :TX_DATASHARD DEBUG: tx 3 released its data 2024-11-21T17:47:00.492834Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2024-11-21T17:47:00.492953Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:00.492960Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:00.505054Z node 3 :TX_DATASHARD DEBUG: tx 3 at 9437184 restored its data 2024-11-21T17:47:00.505088Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T17:47:00.544914Z node 3 :TX_DATASHARD TRACE: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2024-11-21T17:47:00.544942Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:47:00.544961Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:47:00.544969Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:47:00.544974Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit FinishPropose 2024-11-21T17:47:00.544980Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit FinishPropose 2024-11-21T17:47:00.544992Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is DelayComplete 2024-11-21T17:47:00.544995Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2024-11-21T17:47:00.544999Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit CompletedOperations 2024-11-21T17:47:00.545003Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2024-11-21T17:47:00.545015Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2024-11-21T17:47:00.545017Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2024-11-21T17:47:00.545020Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:3] at 9437184 has finished 2024-11-21T17:47:00.575270Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:47:00.575292Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:3] at 9437184 on unit FinishPropose 2024-11-21T17:47:00.575301Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 7 ms, status: COMPLETE 2024-11-21T17:47:00.575322Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:47:00.575890Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2024-11-21T17:47:00.575902Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2024-11-21T17:47:00.576473Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:224:2220], Recipient [3:226:2221]: NKikimr::TEvTablet::TEvFollowerGcApplied |76.8%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadOnlyVDisk::TestReads [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 10041264109326948885 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} >> ReadOnlyVDisk::TestSync [GOOD] >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId >> TNodeBrokerTest::BasicFunctionality |76.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |76.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |76.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest >> TLocalTests::TestRemoveTenantWhileResolving >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> TGRpcCmsTest::DisabledTxTest |76.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 4135959819580425324 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2024-11-21T17:46:59.609009Z 1 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:8779:935] 2024-11-21T17:46:59.609098Z 2 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:8786:942] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2024-11-21T17:46:59.891934Z 3 00h06m00.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:8793:949] 2024-11-21T17:46:59.891967Z 2 00h06m00.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:8786:942] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2024-11-21T17:47:00.458725Z 5 00h14m00.860512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:8807:963] 2024-11-21T17:47:00.458749Z 4 00h14m00.860512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:8800:956] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T17:47:00.731088Z 6 00h18m01.050512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:8814:970] 2024-11-21T17:47:00.731110Z 5 00h18m01.050512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:8807:963] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2024-11-21T17:47:01.011572Z 7 00h22m01.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:8821:977] 2024-11-21T17:47:01.011599Z 6 00h22m01.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:8814:970] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2024-11-21T17:47:01.288608Z 7 00h26m01.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:8821:977] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |76.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] |76.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |76.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] Test command err: 2024-11-21T17:47:01.781633Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Bootstrap 2024-11-21T17:47:01.810854Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Become StateWork (SchemeCache [1:103:2137]) 2024-11-21T17:47:01.822736Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:01.824309Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:01.824772Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:01.824913Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:01.825306Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:01.825316Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:01.825356Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:01.828471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:01.828581Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:01.828600Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:01.828627Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:01.828642Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:01.828657Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:01.856650Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:01.856709Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:01.867674Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:01.867726Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:01.867743Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:01.867756Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:01.867784Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:01.867793Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:01.867798Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:01.867807Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:01.878683Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:01.878742Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:01.878912Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:01.878920Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:01.880095Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:01.880280Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:01.880491Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/001db9/r3tmp/tmpzCB7MW/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T17:47:01.880570Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/001db9/r3tmp/tmpzCB7MW/pdisk_1.dat 2024-11-21T17:47:01.880716Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:01.880737Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:01.880747Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:01.880771Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:01.880796Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:01.881176Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:01.881209Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:01.896742Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:01.896933Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:01.897021Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:01.897030Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:01.897063Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:01.897082Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:01.897181Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:01.897190Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:01.897194Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:01.897210Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:312:2281] 2024-11-21T17:47:01.897610Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:01.897619Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:307:2278] 2024-11-21T17:47:01.897746Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:312:2281] 2024-11-21T17:47:01.897763Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:01.897771Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:01.897774Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:01.914906Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T17:47:01.914924Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2024-11-21T17:47:01.914955Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2024-11-21T17:47:01.918549Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2024-11-21T17:47:01.918581Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Missing task for /dc-1/users/tenant-1 2024-11-21T17:47:01.918631Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2024-11-21T17:47:01.918648Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:01.918740Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:01.918747Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:01.918760Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:396:2337] 2024-11-21T17:47:01.918899Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:396:2337] 2024-11-21T17:47:01.918911Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:01.918916Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:01.918918Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk |76.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest |76.9%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcCmsTest::DescribeOptionsTest >> ReadOnlyVDisk::TestStorageLoad [GOOD] >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] >> TGRpcCmsTest::DisabledTxTest [GOOD] >> TGRpcCmsTest::SimpleTenantsTest [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> TGRpcCmsTest::AuthTokenTest >> TGRpcCmsTest::RemoveWithAnotherTokenTest >> TNodeBrokerTest::SingleDomainModeBannedIds |76.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2024-11-21T17:47:01.809137Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790544329778391:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:01.809393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011bd/r3tmp/tmp1lSc6o/pdisk_1.dat 2024-11-21T17:47:01.861143Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10447, node 1 2024-11-21T17:47:01.874891Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:01.874903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:01.874904Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:01.874940Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:01.899544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.900450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:01.900474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.901160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:01.901207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:01.901214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:47:01.901583Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:01.901595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:47:01.901740Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:01.901938Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.902791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211221951, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:01.902805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:47:01.902858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:47:01.903291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:01.903344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:01.903362Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:47:01.903373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:47:01.903382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:47:01.903397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:47:01.903920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:47:01.903943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:47:01.903947Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:47:01.903961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:47:01.909634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:01.909672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:01.911270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:01.917744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/users, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.917796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:01.917807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/users/user-1, opId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T17:47:01.917898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:01.917907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T17:47:01.918315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/users/user-1 2024-11-21T17:47:01.918373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:01.918438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:01.918470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:1 ProgressState, operation type: TxCreateSubDomain, at tablet72057594046644480 2024-11-21T17:47:01.918481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 2 -> 3 2024-11-21T17:47:01.918526Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:01.918552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:47:01.918669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:47:01.918680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:47:01.918684Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:47:01.918714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:47:01.918722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:47:01.918723Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:47:01.918738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:47:01.918745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:47:01.918747Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 2 2024-11-21T17:47:01.918939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:01.918951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 3 -> 128 2024-11-21T17:47:01.919228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T17:47:01.920040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211221965, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:01.920053Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211221965, at schemeshard: 72057594046644480 2024-11-21T17:47:01.920074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T17:47:01.920095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:1, at tablet 72057594046644480 2024-11-21T17:47:01.920132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 128 -> 240 2024-11-21T17:47:01.920538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:01.920600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:01.920618Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:1 ProgressState 2024-11-21T17:47:01.920638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:1 progress is 1/2 2024-11-21T17:47:01.920667Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T17:47:01.920696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 2/2 2024-11-21T17:47:01.920709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T17:47:01.920718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:1 2024-11-21T17:47:01.920722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 3, subscribers: 1 2024-11-21T17:47:01.920863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:47:01.920880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:47:01.920892Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:47:01.920948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:47:01.920958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:47:01.920960Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T17:47:01.920977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:47:01.920981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:47:01.920986Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T17:47:01.920993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T17:47:01.922788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/users/user-1, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.922841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:01.922848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.923243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/users/user-1 2024-11-21T17:47:01.923287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710659:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:01.923296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 2 -> 3 2024-11-21T17:47:01.923348Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T17:47:01.923654Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:01.923663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 3 -> 128 2024-11-21T17:47:01.923896Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.930113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211221979, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:01.930131Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710659:0, at tablet 72057594046644480 2024-11-21T17:47:01.930184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 128 -> 240 2024-11-21T17:47:01.930571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:01.930632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:01.930651Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:0 ProgressState 2024-11-21T17:47:01.930670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T17:47:01.930686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T17:47:01.930707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2024-11-21T17:47:01.930808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T17:47:01.930827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T17:47:01.930831Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2024-11-21T17:47:01.930844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2024-11-21T17:47:01.846828Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790546890128604:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:01.846864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011dc/r3tmp/tmpZuDK79/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32680, node 1 2024-11-21T17:47:01.902769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:47:01.902794Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:01.905803Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:01.905816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:01.905818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:01.905870Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:01.947125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:01.947158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:01.948797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:01.973010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.974043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:01.974065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.974582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:01.974628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:01.974636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:47:01.974974Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:01.974983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:47:01.975164Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:01.975302Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.975986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211222021, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:01.975997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:47:01.976052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:47:01.976396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:01.976436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:01.976450Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:47:01.976465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:47:01.976476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:47:01.976486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:47:01.976877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:47:01.976889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:47:01.976893Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:47:01.976905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:47:01.986018Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7439790546890129327:2283], Recipient [1:7439790546890129058:2204]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2024-11-21T17:47:01.986033Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2024-11-21T17:47:01.986040Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:01.986043Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:01.986065Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2024-11-21T17:47:01.986101Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1732211221985825) 2024-11-21T17:47:01.986186Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1732211221985825 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2024-11-21T17:47:01.986233Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2024-11-21T17:47:01.987971Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2024-11-21T17:47:01.988102Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211221985825&action=1" } } } 2024-11-21T17:47:01.988137Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:01.988161Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T17:47:01.988198Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T17:47:01.988311Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2024-11-21T17:47:01.988335Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T17:47:01.988997Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439790546890129335:2284], Recipient [1:7439790546890129058:2204]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211221985825&action=1" } UserToken: "" } 2024-11-21T17:47:01.989007Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T17:47:01.989040Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211221985825&action=1" } } 2024-11-21T17:47:01.989878Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2024-11-21T17:47:01.989893Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T17:47:01.989904Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7439790546890129332:2204], Recipient [1:7439790546890129058:2204]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T17:47:01.989907Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2024-11-21T17:47:01.989910Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:01.989912Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:01.989921Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2024-11-21T17:47:01.989926Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2024-11-21T17:47:01.989939Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2024-11-21T17:47:01.991340Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T17:47:01.991350Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:01.991352Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:01.991353Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:01.991364Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2024-11-21T17:47:01.991370Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1732211221985825 err ... Hive 72057594037968897 shardIdx 72057594046644480:3 2024-11-21T17:47:02.082999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T17:47:02.083001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T17:47:02.083007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:8 2024-11-21T17:47:02.083009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T17:47:02.083021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T17:47:02.083030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:5 2024-11-21T17:47:02.083031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T17:47:02.083033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T17:47:02.083171Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715660 2024-11-21T17:47:02.083183Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2024-11-21T17:47:02.083196Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2024-11-21T17:47:02.083216Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7439790551185097120:2204], Recipient [1:7439790546890129058:2204]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2024-11-21T17:47:02.083226Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2024-11-21T17:47:02.083232Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.083234Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.083241Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2024-11-21T17:47:02.083254Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1732211222074963 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T17:47:02.083267Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732211222074963 issue= 2024-11-21T17:47:02.084857Z node 3 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:02.085043Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2024-11-21T17:47:02.085063Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2024-11-21T17:47:02.085068Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2024-11-21T17:47:02.085072Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2024-11-21T17:47:02.085076Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2024-11-21T17:47:02.085079Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2024-11-21T17:47:02.085082Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2024-11-21T17:47:02.085085Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2024-11-21T17:47:02.085089Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2024-11-21T17:47:02.085092Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2024-11-21T17:47:02.086048Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2024-11-21T17:47:02.086076Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2024-11-21T17:47:02.086081Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.086144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:47:02.086160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:47:02.086337Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439790546890128947:2203], Recipient [1:7439790546890129058:2204]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T17:47:02.086347Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T17:47:02.086354Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.086360Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.086366Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2024-11-21T17:47:02.086378Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1732211222074963 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T17:47:02.087635Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2024-11-21T17:47:02.087651Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.087660Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T17:47:02.087698Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T17:47:02.087820Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2024-11-21T17:47:02.087835Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2024-11-21T17:47:02.089201Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2024-11-21T17:47:02.089235Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7439790551185097295:2204], Recipient [1:7439790546890129058:2204]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2024-11-21T17:47:02.089250Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2024-11-21T17:47:02.089254Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.089259Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.089269Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2024-11-21T17:47:02.089279Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2024-11-21T17:47:02.090677Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T17:47:02.090691Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.090692Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.090694Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.090705Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1732211222074963 2024-11-21T17:47:02.090707Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732211222074963 issue= 2024-11-21T17:47:02.090710Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1732211222074963 issue= 2024-11-21T17:47:02.090713Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2024-11-21T17:47:02.090731Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1732211222074963 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T17:47:02.091638Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2024-11-21T17:47:02.091673Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.128585Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439790551185097314:2323], Recipient [1:7439790546890129058:2204]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211222074963&action=2" } UserToken: "" } 2024-11-21T17:47:02.128599Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T17:47:02.128654Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211222074963&action=2" ready: true status: SUCCESS } } 2024-11-21T17:47:02.129441Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7439790551185097317:2325], Recipient [1:7439790546890129058:2204]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2024-11-21T17:47:02.129452Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2024-11-21T17:47:02.129487Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2024-11-21T17:47:02.130134Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7439790551185097320:2326], Recipient [1:7439790546890129058:2204]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2024-11-21T17:47:02.130143Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2024-11-21T17:47:02.130195Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2024-11-21T17:47:02.131097Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T17:47:02.131173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:47:02.327760Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:47:02.327996Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439790552057006095:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> TNodeBrokerTest::MinDynamicNodeIdShifted ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2024-11-21T17:47:01.813179Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790546961722433:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:01.813307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011ac/r3tmp/tmpbOCx7r/pdisk_1.dat 2024-11-21T17:47:01.863869Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3141, node 1 2024-11-21T17:47:01.875827Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:01.875840Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:01.875842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:01.875876Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:01.913361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:01.913393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:01.914889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:01.940969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.941950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:01.941975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.942740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:01.942805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:01.942814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:47:01.943183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:01.943194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:47:01.943387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:01.943561Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:01.944653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211221993, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:01.944667Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:47:01.944729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:47:01.945151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:01.945212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:01.945233Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:47:01.945253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:47:01.945270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:47:01.945290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:47:01.945802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:47:01.945822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:47:01.945827Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:47:01.945839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:47:01.972775Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7439790546961723156:2283], Recipient [1:7439790546961722858:2197]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2024-11-21T17:47:01.972795Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2024-11-21T17:47:01.972803Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:01.972805Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:01.972828Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2024-11-21T17:47:01.972870Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1732211221972550) 2024-11-21T17:47:01.972967Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1732211221972550 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2024-11-21T17:47:01.973019Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2024-11-21T17:47:01.974826Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2024-11-21T17:47:01.974986Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211221972550&action=1" } } } 2024-11-21T17:47:01.975032Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:01.975055Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T17:47:01.975110Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T17:47:01.975142Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285139, Sender [1:7439790546961723156:2283], Recipient [1:7439790546961722858:2197]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211221972550&action=1" } UserToken: "" } 2024-11-21T17:47:01.975151Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2024-11-21T17:47:01.975260Z node 1 :CMS_TENANTS DEBUG: Add subscription to /Root/users/user-1 for [1:7439790546961723156:2283] 2024-11-21T17:47:01.975279Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211221972550&action=1" } } 2024-11-21T17:47:01.975302Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2024-11-21T17:47:01.975332Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T17:47:01.977208Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2024-11-21T17:47:01.977227Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T17:47:01.977240Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7439790546961723161:2197], Recipient [1:7439790546961722858:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T17:47:01.977244Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2024-11-21T17:47:01.977248Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:01.977250Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:01.977262Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2024-11-21T17:47:01.977268Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2024-11-21T17:47:01.977287Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2024-11-21T17:47:01.978645Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T17:47:01.978657Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:01.978659Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:01.978660Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:01.978671Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_ ... n request from 72057594046644480 to 72057594037968897 2024-11-21T17:47:02.061704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:3 2024-11-21T17:47:02.061706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T17:47:02.061708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T17:47:02.061716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:8 2024-11-21T17:47:02.061722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T17:47:02.061724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T17:47:02.061731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:5 2024-11-21T17:47:02.061733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T17:47:02.061735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T17:47:02.061806Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715660 2024-11-21T17:47:02.061813Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2024-11-21T17:47:02.061822Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2024-11-21T17:47:02.061834Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7439790551256691143:2197], Recipient [1:7439790546961722858:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2024-11-21T17:47:02.061841Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2024-11-21T17:47:02.061846Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.061848Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.061856Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2024-11-21T17:47:02.061869Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1732211222050388 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T17:47:02.061899Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732211222050388 issue= 2024-11-21T17:47:02.063057Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2024-11-21T17:47:02.063079Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2024-11-21T17:47:02.063084Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.063156Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439790546961722758:2196], Recipient [1:7439790546961722858:2197]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T17:47:02.063164Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T17:47:02.063169Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.063171Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.063177Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2024-11-21T17:47:02.063182Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1732211222050388 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T17:47:02.063388Z node 3 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:02.063742Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2024-11-21T17:47:02.063755Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2024-11-21T17:47:02.063757Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2024-11-21T17:47:02.063760Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2024-11-21T17:47:02.063763Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2024-11-21T17:47:02.064318Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2024-11-21T17:47:02.064413Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2024-11-21T17:47:02.064667Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2024-11-21T17:47:02.064679Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2024-11-21T17:47:02.064784Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2024-11-21T17:47:02.064925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:47:02.064931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:47:02.065090Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2024-11-21T17:47:02.065098Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.065105Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T17:47:02.065135Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T17:47:02.065332Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2024-11-21T17:47:02.065341Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2024-11-21T17:47:02.066644Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2024-11-21T17:47:02.066664Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7439790551256691343:2197], Recipient [1:7439790546961722858:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2024-11-21T17:47:02.066681Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2024-11-21T17:47:02.066685Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.066687Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.066694Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2024-11-21T17:47:02.066700Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2024-11-21T17:47:02.067669Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T17:47:02.067675Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.067677Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.067679Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.067696Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1732211222050388 2024-11-21T17:47:02.067699Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732211222050388 issue= 2024-11-21T17:47:02.067702Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1732211222050388 issue= 2024-11-21T17:47:02.067703Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2024-11-21T17:47:02.067724Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1732211222050388 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T17:47:02.072382Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2024-11-21T17:47:02.072469Z node 1 :CMS_TENANTS TRACE: Send /Root/users/user-1 notification to [1:7439790551256691134:2395]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211222050388&action=2" ready: true status: SUCCESS } } 2024-11-21T17:47:02.072510Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.073334Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7439790551256691372:2396], Recipient [1:7439790546961722858:2197]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2024-11-21T17:47:02.073345Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2024-11-21T17:47:02.073378Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2024-11-21T17:47:02.073961Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7439790551256691375:2397], Recipient [1:7439790546961722858:2197]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2024-11-21T17:47:02.073969Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2024-11-21T17:47:02.074026Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2024-11-21T17:47:02.074838Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T17:47:02.074903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:47:02.287939Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:47:02.288059Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439790551347283625:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:47:02.340443Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439790551347283625:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 2458570542665158807 2024-11-21T17:46:59.983239Z 1 00h01m08.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3084328:2] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T17:46:59.983350Z 5 00h01m08.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3084328:6] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T17:46:59.983358Z 4 00h01m08.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3084328:5] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T17:46:59.983364Z 2 00h01m08.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3084328:3] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T17:46:59.983372Z 8 00h01m08.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3084328:1] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} 2024-11-21T17:46:59.983378Z 3 00h01m08.010512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:2:11:0:11:3084328:4] barrier# {Soft# {Gen# 2 Step# 7} Hard# {Gen# 2 Step# 4294967295}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2024-11-21T17:47:00.093238Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.093946Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.094687Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.095561Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.095579Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.099991Z 1 00h02m38.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.102956Z 1 00h02m38.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.108490Z 1 00h02m38.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.111389Z 1 00h02m38.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.116646Z 1 00h02m38.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.119318Z 1 00h02m38.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.129238Z 1 00h02m39.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.129298Z 1 00h02m39.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.132506Z 1 00h02m39.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.150490Z 1 00h02m39.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.152883Z 1 00h02m39.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.166766Z 1 00h02m39.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.168924Z 1 00h02m39.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.178956Z 1 00h02m40.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.181609Z 1 00h02m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.181650Z 1 00h02m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.187693Z 1 00h02m40.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.190580Z 1 00h02m40.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.192576Z 1 00h02m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.194261Z 1 00h02m40.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.199453Z 1 00h02m40.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.202388Z 1 00h02m40.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.205357Z 1 00h02m40.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.216491Z 1 00h02m40.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.220335Z 1 00h02m41.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.222666Z 1 00h02m41.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.222722Z 1 00h02m41.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.227804Z 1 00h02m41.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.230696Z 1 00h02m41.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.236456Z 1 00h02m41.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.239602Z 1 00h02m41.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.244726Z 1 00h02m41.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.251525Z 1 00h02m42.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.263582Z 1 00h02m42.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.263630Z 1 00h02m42.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.273292Z 1 00h02m42.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.275182Z 1 00h02m42.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.284777Z 1 00h02m42.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.287003Z 1 00h02m42.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.295761Z 1 00h02m42.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.307986Z 1 00h02m43.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.310897Z 1 00h02m43.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.310936Z 1 00h02m43.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.313917Z 1 00h02m43.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.316392Z 1 00h02m43.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.318852Z 1 00h02m43.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.320357Z 1 00h02m43.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.323413Z 1 00h02m43.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.326178Z 1 00h02m43.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.328585Z 1 00h02m43.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.330858Z 1 00h02m43.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.333209Z 1 00h02m44.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.340333Z 1 00h02m44.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.344063Z 1 00h02m44.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.347355Z 1 00h02m44.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.357222Z 1 00h02m44.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.359560Z 1 00h02m44.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.365666Z 1 00h02m44.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.367629Z 1 00h02m44.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.378031Z 1 00h02m45.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.381323Z 1 00h02m45.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.383594Z 1 00h02m45.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.395872Z 1 00h02m45.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.398252Z 1 00h02m45.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.402426Z 1 00h02m45.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.404527Z 1 00h02m45.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.410913Z 1 00h02m46.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.410950Z 1 00h02m46.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.410960Z 1 00h02m46.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5285:696] 2024-11-21T17:47:00.412475Z 5 00h02m46.110512s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:3:11:0:11:2231199:3] barrier# {Soft# {Gen# 3 Step# 8} Hard# {Gen# 3 Step# 4294967295}} 2024-11-21T17:47:00.412605Z 4 00h ... tVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2024-11-21T17:47:01.580533Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.581044Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.583805Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.584692Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.584746Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.588496Z 8 00h20m55.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.590912Z 8 00h20m55.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.595368Z 8 00h20m55.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.601897Z 8 00h20m55.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.605443Z 8 00h20m55.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.607534Z 8 00h20m55.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.611781Z 8 00h20m56.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.611844Z 8 00h20m56.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.614138Z 8 00h20m56.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.629502Z 8 00h20m56.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.631722Z 8 00h20m56.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.639417Z 8 00h20m56.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.641524Z 8 00h20m56.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.649418Z 8 00h20m56.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.654133Z 8 00h20m57.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.654166Z 8 00h20m57.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.656972Z 8 00h20m57.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.663875Z 8 00h20m57.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.666629Z 8 00h20m57.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.668917Z 8 00h20m57.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.671269Z 8 00h20m57.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.673326Z 8 00h20m57.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.679023Z 8 00h20m57.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.685513Z 8 00h20m57.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.697236Z 8 00h20m58.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.697280Z 8 00h20m58.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.713397Z 8 00h20m58.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.715693Z 8 00h20m58.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.719520Z 8 00h20m58.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.725657Z 8 00h20m58.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.730276Z 8 00h20m58.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.732825Z 8 00h20m58.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.736994Z 8 00h20m59.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.737042Z 8 00h20m59.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.743393Z 8 00h20m59.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.746461Z 8 00h20m59.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.750167Z 8 00h20m59.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.756455Z 8 00h20m59.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.761469Z 8 00h20m59.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.763719Z 8 00h20m59.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.771300Z 8 00h21m00.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.771334Z 8 00h21m00.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.778128Z 8 00h21m00.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.784657Z 8 00h21m00.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.790058Z 8 00h21m00.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.799510Z 8 00h21m00.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.803583Z 8 00h21m00.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.808953Z 8 00h21m00.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.816407Z 8 00h21m01.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.816444Z 8 00h21m01.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.824270Z 8 00h21m01.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.827364Z 8 00h21m01.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.840410Z 8 00h21m01.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.843547Z 8 00h21m01.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.845861Z 8 00h21m01.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.850947Z 8 00h21m01.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.857667Z 8 00h21m01.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.867001Z 8 00h21m02.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.869971Z 8 00h21m02.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.872009Z 8 00h21m02.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.884829Z 8 00h21m02.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.887757Z 8 00h21m02.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.897013Z 8 00h21m02.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.899475Z 8 00h21m02.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.907387Z 8 00h21m03.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.907435Z 8 00h21m03.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.907464Z 8 00h21m03.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5334:745] 2024-11-21T17:47:01.908933Z 1 00h21m03.052048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2128127:3] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2024-11-21T17:47:01.909063Z 4 00h21m03.052048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2128127:6] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2024-11-21T17:47:01.909093Z 3 00h21m03.052048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2128127:5] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2024-11-21T17:47:01.909102Z 7 00h21m03.052048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2128127:1] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2024-11-21T17:47:01.909112Z 2 00h21m03.052048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2128127:4] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2024-11-21T17:47:01.909181Z 5 00h21m03.052048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2128127:2] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} 2024-11-21T17:47:01.910111Z 5 00h21m03.052048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# LogoBlobs; putting blob beyond the barrier id# [1:5:11:0:11:2128127:2] barrier# {Soft# {Gen# 5 Step# 7} Hard# {Gen# 5 Step# 4294967295}} |76.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |76.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |76.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> TGRpcCmsTest::DescribeOptionsTest [GOOD] >> TGRpcCmsTest::AlterRemoveTest [GOOD] >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TLocalTests::TestAlterTenant |76.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2024-11-21T17:47:02.581121Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790552346889620:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:02.581174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001157/r3tmp/tmpRFnXvc/pdisk_1.dat 2024-11-21T17:47:02.633558Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3383, node 1 2024-11-21T17:47:02.638387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:02.638398Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:02.638399Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:02.638422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:02.656627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.657286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:02.657302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.657941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:02.658007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:02.658025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:47:02.658406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:02.658454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:02.658461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:47:02.658728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.659460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211222707, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:02.659473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:47:02.659525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:47:02.659855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:02.659906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:02.659917Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:47:02.659925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:47:02.659937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:47:02.659948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:47:02.660337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:47:02.660352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:47:02.660355Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:47:02.660390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:18273 2024-11-21T17:47:02.673980Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locking 2024-11-21T17:47:02.673991Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locked by parent 2024-11-21T17:47:02.675671Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now active 2024-11-21T17:47:02.681299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:02.681328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:02.682887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:02.685204Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285140, Sender [1:7439790552346890299:2284], Recipient [1:7439790552346890059:2208]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" } 2024-11-21T17:47:02.685222Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2024-11-21T17:47:02.685609Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: 2024-11-21T17:47:02.315561Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790551871482765:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:02.315624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00117b/r3tmp/tmp0jf6Ug/pdisk_1.dat 2024-11-21T17:47:02.375853Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15900, node 1 2024-11-21T17:47:02.387415Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:02.387432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:02.387434Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:02.387477Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9605 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:02.415784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:02.415822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:02.415847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.416925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:02.416947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.417359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:02.417460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:02.417519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:02.417529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:47:02.418034Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:02.418066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:02.418069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:47:02.418426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.419356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211222462, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:02.419368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:47:02.419427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:47:02.419802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:02.419855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:02.419871Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:47:02.419882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:47:02.419897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:47:02.419914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:47:02.420331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:47:02.420347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:47:02.420352Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:47:02.420364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:47:02.428284Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7439790551871483483:2283], Recipient [1:7439790551871483193:2196]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2024-11-21T17:47:02.428307Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2024-11-21T17:47:02.428314Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.428318Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.428344Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2024-11-21T17:47:02.428389Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1732211222428358) 2024-11-21T17:47:02.428503Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1732211222428358 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2024-11-21T17:47:02.428561Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2024-11-21T17:47:02.430162Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2024-11-21T17:47:02.430362Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211222428358&action=1" } } } 2024-11-21T17:47:02.430407Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.430434Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T17:47:02.430465Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T17:47:02.430611Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2024-11-21T17:47:02.430633Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T17:47:02.431516Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439790551871483491:2284], Recipient [1:7439790551871483193:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211222428358&action=1" } UserToken: "" } 2024-11-21T17:47:02.431529Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T17:47:02.431568Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211222428358&action=1" } } 2024-11-21T17:47:02.432557Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2024-11-21T17:47:02.432574Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T17:47:02.432586Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7439790551871483488:2196], Recipient [1:7439790551871483193:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T17:47:02.432589Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2024-11-21T17:47:02.432594Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.432598Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.432607Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2024-11-21T17:47:02.432618Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2024-11-21T17:47:02.432635Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2024-11-21T17:47:02.433921Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T17:47:02.433934Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.433935Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.433937Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.433952Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2024-11-21T17:47:02.433964Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1732211222428358 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T17:47:02.435305Z node 1 :CMS_TE ... 7594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T17:47:02.489216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T17:47:02.489218Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T17:47:02.489450Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2024-11-21T17:47:02.489847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211222539, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:02.489859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 281474976710660:0 HandleReply TEvOperationPlan, step: 1732211222539, at schemeshard: 72057594046644480 2024-11-21T17:47:02.489871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 128 -> 134 2024-11-21T17:47:02.490231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 281474976710660:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:02.490264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 134 -> 135 2024-11-21T17:47:02.490530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:02.490578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:02.490599Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDeleteParts opId# 281474976710660:0 ProgressState 2024-11-21T17:47:02.490609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 135 -> 240 2024-11-21T17:47:02.490714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T17:47:02.490726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T17:47:02.490729Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2024-11-21T17:47:02.490754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T17:47:02.490763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T17:47:02.490765Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:47:02.491011Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:0 ProgressState 2024-11-21T17:47:02.491029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:0 progress is 1/1 2024-11-21T17:47:02.491036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2024-11-21T17:47:02.491410Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2024-11-21T17:47:02.491419Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2024-11-21T17:47:02.491424Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2024-11-21T17:47:02.491432Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7439790551871483702:2196], Recipient [1:7439790551871483193:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2024-11-21T17:47:02.491433Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2024-11-21T17:47:02.491436Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.491437Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.491440Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2024-11-21T17:47:02.491444Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1732211222486216 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T17:47:02.491452Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732211222486216 issue= 2024-11-21T17:47:02.493402Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2024-11-21T17:47:02.493428Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2024-11-21T17:47:02.493430Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.493463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037889, at schemeshard: 72057594046644480 2024-11-21T17:47:02.493476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037894, at schemeshard: 72057594046644480 2024-11-21T17:47:02.493533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:47:02.493593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037891, at schemeshard: 72057594046644480 2024-11-21T17:47:02.493604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:47:02.493608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037897, at schemeshard: 72057594046644480 2024-11-21T17:47:02.493609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037888, at schemeshard: 72057594046644480 2024-11-21T17:47:02.493611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037896, at schemeshard: 72057594046644480 2024-11-21T17:47:02.493613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037893, at schemeshard: 72057594046644480 2024-11-21T17:47:02.493614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037890, at schemeshard: 72057594046644480 2024-11-21T17:47:02.493616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037895, at schemeshard: 72057594046644480 2024-11-21T17:47:02.493648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186224037892, at schemeshard: 72057594046644480 2024-11-21T17:47:02.493744Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439790551871483083:2192], Recipient [1:7439790551871483193:2196]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T17:47:02.493758Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T17:47:02.493763Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.493765Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.493771Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2024-11-21T17:47:02.493774Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1732211222486216 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T17:47:02.494787Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2024-11-21T17:47:02.494803Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.494810Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T17:47:02.494844Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T17:47:02.494979Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2024-11-21T17:47:02.494998Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2024-11-21T17:47:02.496479Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2024-11-21T17:47:02.496503Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7439790551871483904:2196], Recipient [1:7439790551871483193:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2024-11-21T17:47:02.496514Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2024-11-21T17:47:02.496517Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.496518Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.496524Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2024-11-21T17:47:02.496528Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2024-11-21T17:47:02.497907Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T17:47:02.497921Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.497922Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.497923Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.497931Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1732211222486216 2024-11-21T17:47:02.497933Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732211222486216 issue= 2024-11-21T17:47:02.497934Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1732211222486216 issue= 2024-11-21T17:47:02.497935Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2024-11-21T17:47:02.497947Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1732211222486216 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T17:47:02.498818Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2024-11-21T17:47:02.498842Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.539515Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439790551871483923:2292], Recipient [1:7439790551871483193:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211222486216&action=2" } UserToken: "" } 2024-11-21T17:47:02.539530Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T17:47:02.539582Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211222486216&action=2" ready: true status: SUCCESS } } |76.9%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] >> TLocalTests::TestAlterTenant [GOOD] >> TGRpcCmsTest::AuthTokenTest [GOOD] >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2024-11-21T17:47:02.987930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:02.987952Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:02.992391Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:02.992776Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:02.992851Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:02.992857Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:02.992864Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:02.993005Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:02.993326Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:02.993333Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:02.993336Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:02.993338Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:02.993347Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:02.993374Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:02.993388Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:02.993392Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:03.014628Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:03.014656Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.023000Z 2024-11-21T17:47:03.014660Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:03.014667Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.024955Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:198:2196], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.025383Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039946, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigRequest { Config { BannedNodeIds { From: 1025 To: 1032 } } } 2024-11-21T17:47:03.025396Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvSetConfigRequest 2024-11-21T17:47:03.025402Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.025407Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.025427Z node 1 :NODE_BROKER DEBUG: TTxUpdateConfig Execute Config { BannedNodeIds { From: 1025 To: 1032 } } 2024-11-21T17:47:03.025448Z node 1 :NODE_BROKER DEBUG: Update config in database config=BannedNodeIds { From: 1025 To: 1032 } 2024-11-21T17:47:03.036111Z node 1 :NODE_BROKER DEBUG: TTxUpdateConfig Complete 2024-11-21T17:47:03.036159Z node 1 :NODE_BROKER TRACE: TTxUpdateConfig reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigResponse { Status { Code: OK } } 2024-11-21T17:47:03.036165Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.036269Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:202:2200], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.036281Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:03.036286Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.036297Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:03.036328Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:204:2202], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.036360Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:03.036365Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:03.036373Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:03.036940Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:03.036961Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:03.036982Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:205:2177], Recipient [1:171:2177]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:03.036987Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:03.036992Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.036995Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.037009Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:03.037013Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:03.037111Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:03.037148Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:47:03.047852Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:03.047872Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T17:47:03.047877Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:47:03.047880Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T17:47:03.047923Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } 2024-11-21T17:47:03.047928Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.048009Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:217:2208], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.048033Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:03.048038Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:03.048044Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:03.048103Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:03.048111Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:03.048118Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:218:2177], Recipient [1:171:2177]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:03.048121Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:03.048123Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.048125Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.048133Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:03.048135Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:03.048166Z node 1 :NODE_BROKER DEBUG: Adding node #1033 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2024-11-21T17:47:03.048190Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T17:47:03.058952Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:03.058974Z node 1 :NODE_BROKER DEBUG: Added node #1033 host2:1001 2024-11-21T17:47:03.058981Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T17:47:03.058985Z node 1 :NODE_BROKER DEBUG: Add node #1033 host2:1001 to epoch cache 2024-11-21T17:47:03.059039Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1033 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200023000 Name: "slot-1" } 2024-11-21T17:47:03.059045Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.059138Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:223:2213], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.059164Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host3" Port: 1001 ResolveHost: "host3.yandex.net" Address: "1.2.3.6" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "6" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:4 ... DE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:171:2177], Recipient [1:171:2177]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:03.310966Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:03.310974Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.310979Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.311011Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:03.311022Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #3.6 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T17:47:03.343197Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 3 } 2024-11-21T17:47:03.343222Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.343229Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T17:47:03.354159Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:03.354184Z node 1 :NODE_BROKER DEBUG: Node #1024 host1:1001 has expired 2024-11-21T17:47:03.354195Z node 1 :NODE_BROKER DEBUG: Move to new epoch #3.6 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T17:47:03.354213Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T03:00:00.023000Z 2024-11-21T17:47:03.354218Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #3 nodes=2 expired=1 2024-11-21T17:47:03.354250Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.6 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T17:47:03.354259Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.354387Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:290:2270], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.354416Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:03.354422Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.354428Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.6 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T17:47:03.354471Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:292:2272], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.354493Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2024-11-21T17:47:03.354498Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:47:03.354514Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T17:47:03.354550Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:294:2274], Recipient [1:171:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.354575Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:171:2177]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:03.354580Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:03.354589Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:03.354695Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:03.354710Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:03.354722Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:295:2177], Recipient [1:171:2177]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:03.354726Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:03.354731Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.354735Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.354747Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:03.354751Z node 1 :NODE_BROKER DEBUG: Registration request from host4:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:03.354759Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2024-11-21T17:47:03.354774Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:03.354782Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: ERROR_TEMP Reason: "No free node IDs" } 2024-11-21T17:47:03.354787Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.354932Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268829696, Sender [1:164:2173], Recipient [1:171:2177]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T17:47:03.354953Z node 1 :NODE_BROKER INFO: OnTabletDead: 72057594037936129 2024-11-21T17:47:03.354958Z node 1 :NODE_BROKER DEBUG: TNodeBroker::Cleanup 2024-11-21T17:47:03.356701Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:03.357418Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:03.357475Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:03.357821Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.357829Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.357840Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:03.357885Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:03.357890Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.357893Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.357896Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.357950Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:03.358023Z node 1 :NODE_BROKER DEBUG: Loaded config: BannedNodeIds { From: 1024 To: 1029 } BannedNodeIds { From: 1031 To: 1032 } 2024-11-21T17:47:03.358033Z node 1 :NODE_BROKER DEBUG: Loaded current epoch: #3.6 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T17:47:03.358051Z node 1 :NODE_BROKER DEBUG: Added expired node #1024 host1:1001 2024-11-21T17:47:03.358067Z node 1 :NODE_BROKER DEBUG: Loaded node #1024 host1:1001 expiring Thu, 01 Jan 1970 02:00:00 UTC 2024-11-21T17:47:03.358076Z node 1 :NODE_BROKER DEBUG: Added node #1030 host3:1001 2024-11-21T17:47:03.358081Z node 1 :NODE_BROKER DEBUG: Loaded node #1030 host3:1001 expiring Thu, 01 Jan 1970 03:00:00 UTC 2024-11-21T17:47:03.358087Z node 1 :NODE_BROKER DEBUG: Added node #1033 host2:1001 2024-11-21T17:47:03.358091Z node 1 :NODE_BROKER DEBUG: Loaded node #1033 host2:1001 expiring Thu, 01 Jan 1970 03:00:00 UTC 2024-11-21T17:47:03.358104Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:03.358116Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T03:00:00.023000Z 2024-11-21T17:47:03.358121Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #3 nodes=2 expired=1 2024-11-21T17:47:03.358138Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.359068Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:338:2305], Recipient [1:304:2278]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.359114Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:304:2278]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:03.359121Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:03.359130Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:03.359210Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:03.359223Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:03.359235Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:339:2278], Recipient [1:304:2278]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:03.359239Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:03.359244Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.359247Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.359255Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:03.359259Z node 1 :NODE_BROKER DEBUG: Registration request from host4:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:03.359268Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2024-11-21T17:47:03.359279Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:03.359288Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: ERROR_TEMP Reason: "No free node IDs" } 2024-11-21T17:47:03.359292Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TLocalTests::TestAlterTenant [GOOD] Test command err: 2024-11-21T17:47:03.363121Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Bootstrap 2024-11-21T17:47:03.393826Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Become StateWork (SchemeCache [1:103:2137]) 2024-11-21T17:47:03.406073Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:03.407557Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:03.407970Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:03.408104Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:03.408488Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:03.408500Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:03.408528Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:03.410563Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:03.410609Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:03.410629Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:03.410649Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:03.410661Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:03.410673Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:03.433750Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:03.433813Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:03.444740Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:03.444791Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:03.444809Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:03.444822Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:03.444846Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:03.444855Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:03.444862Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:03.444870Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:03.455894Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:03.455961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:03.456157Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:03.456165Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:03.457742Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:03.457963Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:03.458229Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/001da0/r3tmp/tmpvNRBvl/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T17:47:03.458326Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/001da0/r3tmp/tmpvNRBvl/pdisk_1.dat 2024-11-21T17:47:03.458534Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:03.458561Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:03.458577Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:03.458615Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:03.458651Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:03.459096Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:03.459179Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:03.470127Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:03.470306Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:03.470372Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:03.470380Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:03.470410Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:03.470434Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:03.470516Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:03.470523Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:03.470529Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:03.470546Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:312:2281] 2024-11-21T17:47:03.470877Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:03.470884Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:307:2278] 2024-11-21T17:47:03.471001Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:312:2281] 2024-11-21T17:47:03.471017Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:03.471025Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:03.471028Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:03.486368Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T17:47:03.486392Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2024-11-21T17:47:03.491861Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2024-11-21T17:47:03.491919Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 5 Memory: 5 Network: 1) 2024-11-21T17:47:03.492037Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:03.492044Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:03.492062Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:369:2316] 2024-11-21T17:47:03.492463Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:369:2316] 2024-11-21T17:47:03.492575Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:03.492585Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:03.492589Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:03.495137Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Alter tenant /dc-1/users/tenant-1 2024-11-21T17:47:03.495182Z node 1 :LOCAL DEBUG: Updated resoure limit: CPU: 10 Memory: 10 Network: 10 2024-11-21T17:47:03.495187Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:03.495216Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Alter tenant /dc-1/users/tenant-2 2024-11-21T17:47:03.495228Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Alter tenant /dc-1/users/tenant-unknown >> TEnumerationTest::TestPublish [GOOD] >> TNodeBrokerTest::TestListNodes >> TNodeBrokerTest::ResolveScopeIdForServerless ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2024-11-21T17:47:02.798597Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790550892287587:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:02.798641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00112d/r3tmp/tmphH7eO6/pdisk_1.dat 2024-11-21T17:47:02.848904Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3470, node 1 2024-11-21T17:47:02.860139Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:02.860153Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:02.860154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:02.860195Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:02.882991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.883659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:02.883674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.884339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:02.884391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:02.884400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:47:02.884768Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:02.884780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:47:02.884828Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:02.885098Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.885811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211222931, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:02.885824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:47:02.885879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:47:02.886221Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:02.886265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:02.886278Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:47:02.886292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:47:02.886305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:47:02.886319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:47:02.886635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:47:02.886650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:47:02.886654Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:47:02.886664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:47:02.898798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:02.898841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:02.900313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61360 2024-11-21T17:47:02.905366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.905418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:02.905423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.905435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T17:47:02.905466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T17:47:02.905475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T17:47:02.905947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:user-1@builtin 2024-11-21T17:47:02.906004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:02.906052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:02.906177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:47:02.906193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:47:02.906197Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:47:02.906212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2024-11-21T17:47:02.915494Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7439790550892288321:2283], Recipient [1:7439790550892288009:2193]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2024-11-21T17:47:02.915510Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2024-11-21T17:47:02.915517Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.915519Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.915540Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" 2024-11-21T17:47:02.915579Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1732211222915424) 2024-11-21T17:47:02.915660Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1732211222915424 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2024-11-21T17:47:02.915704Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2024-11-21T17:47:02.917149Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2024-11-21T17:47:02.917367Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211222915424&action=1" } } } 2024-11-21T17:47:02.917445Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.917476Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T17:47:02.917516Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T17:47:02.917670Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2024-11-21T17:47:02.917698Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T17:47:02.918513Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439790550892288330:2284], Recipient [1:7439790550892288009:2193]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenan ... 2024-11-21T17:47:03.069575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T17:47:03.069582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:6 2024-11-21T17:47:03.069584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T17:47:03.069585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T17:47:03.069592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:3 2024-11-21T17:47:03.069593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T17:47:03.069596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T17:47:03.069603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:8 2024-11-21T17:47:03.069604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T17:47:03.069606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T17:47:03.069614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:5 2024-11-21T17:47:03.069623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T17:47:03.069625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T17:47:03.069704Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710663 2024-11-21T17:47:03.069712Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2024-11-21T17:47:03.069724Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2024-11-21T17:47:03.069737Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7439790555187256118:2193], Recipient [1:7439790550892288009:2193]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2024-11-21T17:47:03.069745Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2024-11-21T17:47:03.069751Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:03.069753Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:03.069760Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2024-11-21T17:47:03.069774Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1732211223061867 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2024-11-21T17:47:03.069792Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732211223061867 issue=AccessDenied: Access denied for request 2024-11-21T17:47:03.070995Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2024-11-21T17:47:03.071022Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2024-11-21T17:47:03.071027Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:03.071353Z node 3 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:03.071778Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439790550892287904:2192], Recipient [1:7439790550892288009:2193]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T17:47:03.071790Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T17:47:03.071796Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:03.071798Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:03.071805Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2024-11-21T17:47:03.071813Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1732211223061867 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2024-11-21T17:47:03.071817Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2024-11-21T17:47:03.071823Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2024-11-21T17:47:03.071826Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2024-11-21T17:47:03.071828Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2024-11-21T17:47:03.071831Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2024-11-21T17:47:03.071833Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2024-11-21T17:47:03.071836Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2024-11-21T17:47:03.071839Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2024-11-21T17:47:03.071842Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2024-11-21T17:47:03.071853Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2024-11-21T17:47:03.072860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:47:03.072874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:47:03.073603Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2024-11-21T17:47:03.073622Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:03.073631Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T17:47:03.073667Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T17:47:03.073838Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2024-11-21T17:47:03.073861Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2024-11-21T17:47:03.074973Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2024-11-21T17:47:03.075005Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7439790555187256288:2193], Recipient [1:7439790550892288009:2193]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2024-11-21T17:47:03.075022Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2024-11-21T17:47:03.075026Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:03.075028Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:03.075036Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2024-11-21T17:47:03.075042Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2024-11-21T17:47:03.075997Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T17:47:03.076007Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:03.076008Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:03.076009Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:03.076018Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1732211223061867 2024-11-21T17:47:03.076020Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1732211223061867 issue=AccessDenied: Access denied for request 2024-11-21T17:47:03.076022Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1732211223061867 issue=AccessDenied: Access denied for request 2024-11-21T17:47:03.076023Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2024-11-21T17:47:03.076035Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1732211223061867 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2024-11-21T17:47:03.076986Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2024-11-21T17:47:03.077008Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:03.115504Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439790555187256337:2334], Recipient [1:7439790550892288009:2193]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211223061867&action=2" } UserToken: "" } 2024-11-21T17:47:03.115520Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T17:47:03.115582Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211223061867&action=2" ready: true status: SUCCESS } } 2024-11-21T17:47:03.130703Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T17:47:03.130782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:47:03.232885Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439790553980407642:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:47:03.233007Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:47:03.328410Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439790553980407642:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> TNodeBrokerTest::TestListNodesEpochDeltas ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2024-11-21T17:47:02.828618Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790548668088918:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:02.828795Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001142/r3tmp/tmplKUuAd/pdisk_1.dat 2024-11-21T17:47:02.878434Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9953, node 1 2024-11-21T17:47:02.889423Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:02.889437Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:02.889438Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:02.889484Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:02.928734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:02.928773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:02.930192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:02.955932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.956989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:02.957014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.957636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:02.957709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:02.957717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:47:02.958117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:02.958133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:47:02.958196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:02.958614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.959609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211223008, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:02.959623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:47:02.959687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:47:02.960167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:02.960259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:02.960282Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:47:02.960306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:47:02.960324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:47:02.960353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:47:02.960795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:47:02.960819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:47:02.960824Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:47:02.960865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:47:02.968107Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7439790548668089637:2283], Recipient [1:7439790548668089344:2192]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2024-11-21T17:47:02.968122Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2024-11-21T17:47:02.968129Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.968131Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.968147Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" 2024-11-21T17:47:02.968194Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1732211222967930) 2024-11-21T17:47:02.968296Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1732211222967930 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2024-11-21T17:47:02.968351Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2024-11-21T17:47:02.970383Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2024-11-21T17:47:02.970559Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211222967930&action=1" } } } 2024-11-21T17:47:02.970599Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.970624Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2024-11-21T17:47:02.970664Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2024-11-21T17:47:02.970777Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2024-11-21T17:47:02.970804Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T17:47:02.971457Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7439790548668089646:2284], Recipient [1:7439790548668089344:2192]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211222967930&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2024-11-21T17:47:02.971465Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2024-11-21T17:47:02.971500Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1732211222967930&action=1" } } 2024-11-21T17:47:02.972535Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2024-11-21T17:47:02.972547Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T17:47:02.972559Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7439790548668089642:2192], Recipient [1:7439790548668089344:2192]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T17:47:02.972561Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2024-11-21T17:47:02.972564Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.972566Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.972573Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2024-11-21T17:47:02.972577Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2024-11-21T17:47:02.972588Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2024-11-21T17:47:02.974008Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2024-11-21T17:47:02.974020Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:02.974022Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:02.974023Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:02.974035Z no ... hemeshard: 72057594046644480 2024-11-21T17:47:03.044948Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TSyncHive, operationId 281474976715659:0, ProgressState, NeedSyncHive: 0 2024-11-21T17:47:03.044956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 240 -> 240 2024-11-21T17:47:03.045115Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7439790552963057406:2322], Recipient [1:7439790548668089344:2192]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2024-11-21T17:47:03.045124Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2024-11-21T17:47:03.045128Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2024-11-21T17:47:03.045138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:47:03.045147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:47:03.045156Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2024-11-21T17:47:03.045187Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439790548668089251:2202], Recipient [1:7439790548668089344:2192]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T17:47:03.045194Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T17:47:03.045270Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2024-11-21T17:47:03.045360Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T17:47:03.045378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T17:47:03.045394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:47:03.045715Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7439790552963057420:2323], Recipient [1:7439790548668089344:2192]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2024-11-21T17:47:03.045723Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2024-11-21T17:47:03.045728Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2024-11-21T17:47:03.045740Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439790548668089251:2202], Recipient [1:7439790548668089344:2192]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T17:47:03.045746Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T17:47:03.045818Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2024-11-21T17:47:03.045839Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715659 2024-11-21T17:47:03.045845Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2024-11-21T17:47:03.045850Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2024-11-21T17:47:03.045857Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7439790548668089742:2192], Recipient [1:7439790548668089344:2192]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2024-11-21T17:47:03.045863Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2024-11-21T17:47:03.045867Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2024-11-21T17:47:03.045869Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2024-11-21T17:47:03.045876Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2024-11-21T17:47:03.045881Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1732211222967930 errorcode=STATUS_CODE_UNSPECIFIED issue= 2024-11-21T17:47:03.045893Z node 1 :CMS_TENANTS TRACE: Update database for /Root/users/user-1 confirmedsubdomain=2 2024-11-21T17:47:03.046316Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7439790552963057424:2324], Recipient [1:7439790548668089344:2192]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2024-11-21T17:47:03.046323Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2024-11-21T17:47:03.046328Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2024-11-21T17:47:03.046342Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439790548668089251:2202], Recipient [1:7439790548668089344:2192]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T17:47:03.046343Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T17:47:03.046402Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2024-11-21T17:47:03.046725Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7439790552963057428:2325], Recipient [1:7439790548668089344:2192]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2024-11-21T17:47:03.046730Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2024-11-21T17:47:03.046732Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2024-11-21T17:47:03.046754Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439790548668089251:2202], Recipient [1:7439790548668089344:2192]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T17:47:03.046762Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T17:47:03.046831Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2024-11-21T17:47:03.047129Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2024-11-21T17:47:03.047137Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2024-11-21T17:47:03.047205Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7439790552963057434:2326], Recipient [1:7439790548668089344:2192]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2024-11-21T17:47:03.047210Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2024-11-21T17:47:03.047212Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2024-11-21T17:47:03.047225Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7439790548668089251:2202], Recipient [1:7439790548668089344:2192]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2024-11-21T17:47:03.047230Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2024-11-21T17:47:03.047270Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } TClient is connected to server localhost:5859 TClient::Ls request: /Root/users/user-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root/users/user-1" PathId: 1 SchemeshardId: 72075186224037889 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037889 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanReso... (TRUNCATED) 2024-11-21T17:47:03.086364Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T17:47:03.086571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:47:03.086671Z node 3 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:03.370501Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:47:03.370653Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439790555565141914:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TEnumerationTest::TestPublish [GOOD] >> TNodeBrokerTest::BasicFunctionality [GOOD] >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2024-11-21T17:47:01.755614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:01.755637Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:01.760510Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:01.760986Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:01.761075Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:01.761082Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:01.761093Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:01.761266Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:01.762292Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:01.762305Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:01.762310Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:01.762314Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:01.762330Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:01.762377Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:01.762396Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:01.762404Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:01.794537Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:01.794571Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T17:47:01.794575Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:01.794582Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:01.804913Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:577:2205], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.805317Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:541:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:01.805334Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:01.805350Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:01.805437Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:579:2207], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.805498Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:530:2178], Recipient [1:541:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:01.805505Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:01.805514Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:01.806217Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:01.806239Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:01.806257Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:580:2184], Recipient [1:541:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:01.806261Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:01.806266Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:01.806269Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:01.806288Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:01.806292Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:01.806385Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:01.806424Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:47:01.817254Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:01.817279Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T17:47:01.817286Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:47:01.817291Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T17:47:01.817354Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2024-11-21T17:47:01.817361Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:01.817512Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:592:2213], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.817528Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:530:2178], Recipient [1:541:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2024-11-21T17:47:01.817534Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:47:01.817571Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } } 2024-11-21T17:47:01.817618Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:594:2215], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.817632Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:541:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:01.817637Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:01.817646Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:01.817694Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:596:2217], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.817716Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:530:2178], Recipient [1:541:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:01.817720Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:01.817729Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:01.817843Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:01.817857Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:01.817871Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:597:2184], Recipient [1:541:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:01.817875Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:01.817879Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:01.817882Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:01.817893Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:01.817897Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:01.817923Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2024-11-21T17:47:01.817967Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T17:47:01.828619Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:01.828639Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:1001 2024-11-21T17:47:01.828644Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T17:47:01.828648Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:1001 to epoch cache 2024-11-21T17:47:01.828695Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200025000 Name: "slot-1" } 2024-11-21T17:47:01.828701Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:01.828813Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:602:2222], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.828837Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:541:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListN ... teWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.625777Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #5 2024-11-21T17:47:02.625782Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:776:2313] 2024-11-21T17:47:02.625784Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.625787Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #5 2024-11-21T17:47:02.636545Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:02.636568Z node 1 :NODE_BROKER DEBUG: Node #1026 host3:1001 has expired 2024-11-21T17:47:02.636576Z node 1 :NODE_BROKER DEBUG: Node #1027 host1:1001 has expired 2024-11-21T17:47:02.636583Z node 1 :NODE_BROKER DEBUG: Move to new epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.636598Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T05:00:00.025000Z 2024-11-21T17:47:02.636604Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #5 nodes=2 expired=2 2024-11-21T17:47:02.636640Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.636649Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.636657Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.636664Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.636671Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.636679Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.636686Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.636692Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.636698Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:02.637000Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:812:2337], Recipient [1:653:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:02.637028Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:653:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:02.637033Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.637039Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.637127Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:814:2339], Recipient [1:653:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:02.637135Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:653:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:02.637139Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.637144Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.637189Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:816:2341], Recipient [1:653:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:02.637201Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:653:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:02.637204Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.637209Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.760724Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:653:2244], Recipient [1:653:2244]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:02.760749Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:02.760757Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:02.760763Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:02.760783Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:02.760790Z node 1 :NODE_BROKER DEBUG: Removing node #1026 from database 2024-11-21T17:47:02.760814Z node 1 :NODE_BROKER DEBUG: Removing node #1027 from database 2024-11-21T17:47:02.760823Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T17:47:02.791496Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:653:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 6 } 2024-11-21T17:47:02.791523Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.791530Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T17:47:02.791580Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:774:2311] 2024-11-21T17:47:02.791585Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.791589Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T17:47:02.791617Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:770:2307] 2024-11-21T17:47:02.791621Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.791624Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T17:47:02.791633Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:771:2308] 2024-11-21T17:47:02.791636Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.791640Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T17:47:02.791643Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:772:2309] 2024-11-21T17:47:02.791645Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.791647Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T17:47:02.791650Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:775:2312] 2024-11-21T17:47:02.791652Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.791655Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T17:47:02.791660Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:776:2313] 2024-11-21T17:47:02.791663Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.791666Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T17:47:02.791671Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:773:2310] 2024-11-21T17:47:02.791674Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.791677Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2024-11-21T17:47:02.802485Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:02.802511Z node 1 :NODE_BROKER DEBUG: Node #1024 host1:1001 has expired 2024-11-21T17:47:02.802518Z node 1 :NODE_BROKER DEBUG: Node #1025 host4:1001 has expired 2024-11-21T17:47:02.802522Z node 1 :NODE_BROKER DEBUG: Remove node #1026 host3:1001 2024-11-21T17:47:02.802525Z node 1 :NODE_BROKER DEBUG: Remove node #1027 host1:1001 2024-11-21T17:47:02.802531Z node 1 :NODE_BROKER DEBUG: Move to new epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T17:47:02.802543Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T06:00:00.025000Z 2024-11-21T17:47:02.802547Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #6 nodes=0 expired=2 2024-11-21T17:47:02.802574Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T17:47:02.802580Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T17:47:02.802587Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T17:47:02.802593Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T17:47:02.802597Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T17:47:02.802601Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T17:47:02.802605Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T17:47:02.802609Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T17:47:02.802613Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:02.802840Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:830:2346], Recipient [1:653:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:02.802869Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:653:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:02.802874Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.802879Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2024-11-21T17:47:02.802916Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:832:2348], Recipient [1:653:2244]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:02.802923Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:653:2244]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:02.802925Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.802928Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2024-11-21T17:47:04.113989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:04.114010Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:04.118134Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:04.118492Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:04.118575Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:04.118582Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:04.118592Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:04.118742Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:04.119580Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:04.119591Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:04.119596Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:04.119599Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:04.119630Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:04.119663Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:04.119679Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.119686Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.150998Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:04.151032Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T17:47:04.151038Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:04.151046Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:04.161314Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:245:2199], Recipient [1:215:2178]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.161661Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:204:2172], Recipient [1:215:2178]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:04.161672Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.161685Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.165576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:47:04.168748Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:275:2227], Recipient [1:215:2178]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.168792Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:204:2172], Recipient [1:215:2178]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "/dc-1/SharedDB" } 2024-11-21T17:47:04.168796Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:04.168802Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "/dc-1/SharedDB" 2024-11-21T17:47:04.169226Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/SharedDB TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:04.169244Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "/dc-1/SharedDB": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:04.169255Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:276:2178], Recipient [1:215:2178]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:04.169258Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:04.169261Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:04.169263Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:04.169270Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:04.169273Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: /dc-1/SharedDB 2024-11-21T17:47:04.169342Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:04.169360Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:47:04.179988Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:04.180000Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T17:47:04.180005Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:47:04.180008Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T17:47:04.180045Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2024-11-21T17:47:04.180050Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:04.180261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:47:04.182427Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:312:2257], Recipient [1:215:2178]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.182464Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:204:2172], Recipient [1:215:2178]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "/dc-1/ServerlessDB" } 2024-11-21T17:47:04.182468Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:04.182477Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "/dc-1/ServerlessDB" 2024-11-21T17:47:04.182639Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/ServerlessDB TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } 2024-11-21T17:47:04.182657Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "/dc-1/ServerlessDB": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:3 2024-11-21T17:47:04.182665Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:313:2178], Recipient [1:215:2178]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:04.182668Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:04.182671Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:04.182673Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:04.182679Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:04.182682Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: /dc-1/ServerlessDB 2024-11-21T17:47:04.182701Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:3 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:04.182717Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T17:47:04.193304Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:04.193320Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:1001 2024-11-21T17:47:04.193325Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T17:47:04.193328Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:1001 to epoch cache 2024-11-21T17:47:04.193366Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200025000 Name: "slot-0" } 2024-11-21T17:47:04.193371Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] Test command err: 2024-11-21T17:47:01.780874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:01.780894Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:01.786132Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:01.786513Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:01.786594Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:01.786600Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:01.786612Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:01.786762Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:01.787304Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:01.787313Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:01.787317Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:01.787320Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:01.787357Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:01.787404Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:01.787419Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:01.787425Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:01.819380Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:01.819414Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2024-11-21T17:47:01.819420Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:01.819428Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:01.829768Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:571:2205], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.830147Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:01.830156Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:01.830169Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:01.830230Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:573:2207], Recipient [1:535:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.830278Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:524:2178], Recipient [1:535:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:01.830284Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:01.830294Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:01.830971Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:01.830992Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:01.831009Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:574:2184], Recipient [1:535:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:01.831014Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:01.831019Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:01.831023Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:01.831038Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:01.831043Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:01.831137Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:01.831171Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:47:01.842126Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:01.842152Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T17:47:01.842159Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:47:01.842164Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T17:47:01.842225Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } 2024-11-21T17:47:01.842234Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:01.842396Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268829696, Sender [1:528:2180], Recipient [1:535:2184]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T17:47:01.842420Z node 1 :NODE_BROKER INFO: OnTabletDead: 72057594037936129 2024-11-21T17:47:01.842425Z node 1 :NODE_BROKER DEBUG: TNodeBroker::Cleanup 2024-11-21T17:47:01.843844Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:01.844694Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:01.844758Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:01.845021Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:01.845029Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:01.845040Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:01.845085Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:01.845089Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:01.845093Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:01.845096Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:01.845132Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:01.845180Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:01.845192Z node 1 :NODE_BROKER DEBUG: Loaded current epoch: #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:01.845213Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T17:47:01.845227Z node 1 :NODE_BROKER DEBUG: Loaded node #1024 host1:1001 expiring Thu, 01 Jan 1970 02:00:00 UTC 2024-11-21T17:47:01.845240Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:01.845257Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2024-11-21T17:47:01.845261Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=1 expired=0 2024-11-21T17:47:01.845276Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:01.846470Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:626:2241], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.846510Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:524:2178], Recipient [1:592:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:01.846516Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:01.846527Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:01.949958Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:651:2242], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.950049Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:592:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2024-11-21T17:47:01.950058Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:01.950073Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:01.950524Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:652:2243], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.950566Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:653:2244], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.950605Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:654:2245], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.950650Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:655:2246], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.950664Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:656:2247], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.950712Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:652:2243] 2024-11-21T17:47:01.950716Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:01.950724Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:01.950740Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:653:2244] 2024-11-21T17:47:01.950743Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:01.950749Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:01.950818Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:657:2248], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:01.950830Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1: ... poch #6.7 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T17:47:02.739062Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:745:2296], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:02.739070Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:524:2178], Recipient [1:592:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:02.739073Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.739076Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.7 1970-01-01T05:00:00.024000Z - 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z 2024-11-21T17:47:02.862497Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:592:2214], Recipient [1:592:2214]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:02.862522Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:02.862529Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:02.862533Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:02.862550Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:02.862557Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T17:47:02.893321Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:592:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 7 } 2024-11-21T17:47:02.893347Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.893353Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:02.893446Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:653:2244] 2024-11-21T17:47:02.893451Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.893455Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:02.893468Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:654:2245] 2024-11-21T17:47:02.893488Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.893492Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:02.893497Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:655:2246] 2024-11-21T17:47:02.893500Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.893504Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:02.893509Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:656:2247] 2024-11-21T17:47:02.893512Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.893515Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:02.893521Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:657:2248] 2024-11-21T17:47:02.893524Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.893527Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:02.893533Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:658:2249] 2024-11-21T17:47:02.893536Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.893539Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:02.893545Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:652:2243] 2024-11-21T17:47:02.893558Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.893561Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:02.904330Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:02.904351Z node 1 :NODE_BROKER DEBUG: Move to new epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T17:47:02.904365Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T07:00:00.024000Z 2024-11-21T17:47:02.904369Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #7 nodes=0 expired=0 2024-11-21T17:47:02.904379Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T17:47:02.904388Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T17:47:02.904394Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T17:47:02.904399Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T17:47:02.904403Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T17:47:02.904408Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T17:47:02.904412Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T17:47:02.904416Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T17:47:02.904421Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:02.904614Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:762:2302], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:02.904638Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:524:2178], Recipient [1:592:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:02.904641Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.904645Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T17:47:02.904679Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:764:2304], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:02.904687Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:524:2178], Recipient [1:592:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:02.904689Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:02.904692Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.024000Z - 1970-01-01T07:00:00.024000Z - 1970-01-01T08:00:00.024000Z 2024-11-21T17:47:02.904717Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:766:2306], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:02.904733Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:524:2178], Recipient [1:592:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2024-11-21T17:47:02.904738Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:47:02.904755Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T17:47:02.904786Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:768:2308], Recipient [1:592:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:02.904805Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:524:2178], Recipient [1:592:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:02.904807Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:02.904814Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:02.904955Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:02.904964Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:02.904973Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:769:2214], Recipient [1:592:2214]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:02.904975Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:02.904979Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:02.904981Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:02.904990Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:02.904993Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:02.905013Z node 1 :NODE_BROKER DEBUG: Adding node #1026 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 08:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:02.905040Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=9 2024-11-21T17:47:02.915827Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:02.915850Z node 1 :NODE_BROKER DEBUG: Added node #1026 host2:1001 2024-11-21T17:47:02.915857Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 8 to 9 2024-11-21T17:47:02.915861Z node 1 :NODE_BROKER DEBUG: Add node #1026 host2:1001 to epoch cache 2024-11-21T17:47:02.915913Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1026 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 28800024000 Name: "slot-0" } 2024-11-21T17:47:02.915920Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig >> KqpOlapBlobsSharing::SplitEmpty [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2024-11-21T17:47:03.049798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:03.049820Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:03.053314Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:03.053666Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:03.053738Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.053744Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.053751Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:03.053902Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:03.054585Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:03.054592Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.054596Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.054598Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.054608Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:03.054638Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:03.054651Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:03.054656Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:03.086508Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:03.086550Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T17:47:03.086557Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:03.086567Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.096918Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:573:2205], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.097335Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:03.097345Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.097361Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:03.097446Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:575:2207], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.097492Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:03.097499Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:03.097508Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:03.098184Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:03.098203Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:03.098219Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:576:2184], Recipient [1:537:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:03.098223Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:03.098229Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.098232Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.098252Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:03.098257Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:03.098345Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:03.098388Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:47:03.109435Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:03.109481Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T17:47:03.109490Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:47:03.109495Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T17:47:03.109568Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2024-11-21T17:47:03.109580Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.109808Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268829696, Sender [1:530:2180], Recipient [1:537:2184]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T17:47:03.109835Z node 1 :NODE_BROKER INFO: OnTabletDead: 72057594037936129 2024-11-21T17:47:03.109840Z node 1 :NODE_BROKER DEBUG: TNodeBroker::Cleanup 2024-11-21T17:47:03.111280Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:03.111988Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:03.112087Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.112095Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.112108Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:03.112158Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:03.112161Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.112165Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.112168Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.112219Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:03.112281Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:03.112355Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:03.112368Z node 1 :NODE_BROKER DEBUG: Loaded current epoch: #1.2 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:03.112391Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T17:47:03.112404Z node 1 :NODE_BROKER DEBUG: Loaded node #1024 host1:1001 expiring Thu, 01 Jan 1970 02:00:00 UTC 2024-11-21T17:47:03.112416Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:03.112430Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T17:47:03.112433Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=1 expired=0 2024-11-21T17:47:03.112445Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.113329Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:628:2241], Recipient [1:594:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.113382Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:526:2178], Recipient [1:594:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:03.113387Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:03.113393Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:03.113477Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:03.113489Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:03.113499Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:629:2214], Recipient [1:594:2214]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:03.113502Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:03.113506Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.113508Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.113515Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:03.113518Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:03.113537Z node 1 :NODE_BROKER DEBUG: Adding node #1026 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2024-11-21T17:47:03.113572Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T17:47:03.124389Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:03.124412Z node 1 :NODE_BROKER DEBUG: Added node #1026 host2:1001 2024-11-21T17:47:03.124418Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T17:47:03.124422Z node 1 :NODE_BROKER DEBUG: Add node #1026 host2:1001 to epoch cache 2024-11-21T17:47:03.124477Z node 1 : ... :03.228551Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:03.228563Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:665:2253] 2024-11-21T17:47:03.228568Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.228573Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:03.228595Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:666:2254] 2024-11-21T17:47:03.228598Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.228603Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:03.289857Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:594:2214], Recipient [1:594:2214]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:03.289878Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:03.289885Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:03.289890Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:03.289909Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:03.289918Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.330851Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:594:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2024-11-21T17:47:03.330882Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.330889Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:03.330927Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:660:2248] 2024-11-21T17:47:03.330930Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.330933Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:03.330954Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:661:2249] 2024-11-21T17:47:03.330956Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.330958Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:03.330970Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:662:2250] 2024-11-21T17:47:03.330972Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.330973Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:03.330977Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:663:2251] 2024-11-21T17:47:03.330979Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.330981Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:03.330984Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:664:2252] 2024-11-21T17:47:03.330986Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.330987Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:03.330991Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:665:2253] 2024-11-21T17:47:03.330992Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.330994Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:03.330998Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:666:2254] 2024-11-21T17:47:03.330999Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.331001Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:03.342029Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:03.342057Z node 1 :NODE_BROKER DEBUG: Move to new epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.342090Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:03.342094Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=2 expired=0 2024-11-21T17:47:03.342125Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.342134Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.342143Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.342150Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.342156Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.342165Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.342174Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.342181Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.342187Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:03.342460Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:671:2259], Recipient [1:594:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.342494Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:594:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:03.342500Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.342507Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.342550Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:673:2261], Recipient [1:594:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.342564Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:594:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:03.342568Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:03.342572Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.342607Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:675:2263], Recipient [1:594:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.342629Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:526:2178], Recipient [1:594:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1024 } 2024-11-21T17:47:03.342635Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T17:47:03.342641Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1024 2024-11-21T17:47:03.342647Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T17:47:03.342649Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) is now active 2024-11-21T17:47:03.342652Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) enqueue tx 2024-11-21T17:47:03.342654Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) starts new tx 2024-11-21T17:47:03.342665Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1024 2024-11-21T17:47:03.342671Z node 1 :NODE_BROKER DEBUG: Update node #1024 host1:1001 lease in database lease=2 expire=1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.353626Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T17:47:03.353693Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 10800025000 Epoch { Id: 2 Version: 4 Start: 3600025000 End: 7200025000 NextEnd: 10800025000 } } 2024-11-21T17:47:03.353712Z node 1 :NODE_BROKER DEBUG: Extended lease of #1024 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2024-11-21T17:47:03.353719Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) completed tx 2024-11-21T17:47:03.353723Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) unlink from parent 2024-11-21T17:47:03.353726Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1024 2024-11-21T17:47:03.353731Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T17:47:03.353855Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:679:2267], Recipient [1:594:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:03.353879Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:526:2178], Recipient [1:594:2214]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1026 } 2024-11-21T17:47:03.353885Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T17:47:03.353891Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1026 2024-11-21T17:47:03.353895Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T17:47:03.353898Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) is now active 2024-11-21T17:47:03.353902Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) enqueue tx 2024-11-21T17:47:03.353905Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) starts new tx 2024-11-21T17:47:03.353917Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1026 2024-11-21T17:47:03.353939Z node 1 :NODE_BROKER DEBUG: Update node #1026 host2:1001 lease in database lease=2 expire=1970-01-01T03:00:00.025000Z 2024-11-21T17:47:03.365177Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T17:47:03.365265Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1026 Expire: 10800025000 Epoch { Id: 2 Version: 4 Start: 3600025000 End: 7200025000 NextEnd: 10800025000 } } 2024-11-21T17:47:03.365287Z node 1 :NODE_BROKER DEBUG: Extended lease of #1026 host2:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2024-11-21T17:47:03.365295Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) completed tx 2024-11-21T17:47:03.365299Z node 1 :NODE_BROKER TRACE: TTxProcessor(1026) unlink from parent 2024-11-21T17:47:03.365302Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1026 2024-11-21T17:47:03.365307Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.0%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId >> TTenantPoolTests::TestStateStatic >> TTenantPoolTests::TestSensorsConfigForStaticSlot >> TSlotIndexesPoolTest::Init [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] >> THealthCheckTest::NoBscResponse [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::SplitEmpty [GOOD] Test command err: Trying to start YDB, gRPC: 14099, MsgBus: 25906 2024-11-21T17:46:25.458644Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790393241419722:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:25.458966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000df0/r3tmp/tmpEkk0C4/pdisk_1.dat 2024-11-21T17:46:25.515384Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14099, node 1 2024-11-21T17:46:25.527778Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:25.527792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:25.527793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:25.527829Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25906 2024-11-21T17:46:25.559716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:25.559743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:25.560826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:25.578665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:25.582246Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:46:25.708819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:26.407653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397536389434:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.407653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790397536389432:2297];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.407688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790397536389432:2297];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:26.407706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397536389434:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:26.407745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790397536389432:2297];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:26.407749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397536389434:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:26.407774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397536389434:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:26.407774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790397536389432:2297];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:26.407802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397536389434:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:26.407821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790397536389432:2297];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:26.407826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397536389434:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:26.407841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790397536389432:2297];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:26.407846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397536389434:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:26.407860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790397536389432:2297];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:26.407865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397536389434:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:26.407883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790397536389432:2297];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:26.407885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397536389434:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:26.407904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790397536389432:2297];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:26.407910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397536389434:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:26.407930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790397536389432:2297];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:26.407930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397536389434:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:26.407954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790397536389434:2299];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:26.407978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790397536389432:2297];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:26.408038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790397536389432:2297];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:26.411635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439790397536389450:2303];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.411663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439790397536389450:2303];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:26.411701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439790397536389450:2303];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:26.411723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790397536389451:2304];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:26.411727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439790397536389450:2303];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:26.411747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439790397536389450:2303];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:26.411750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790397536389451:2304];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:26.411763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038143;self_id=[1:7439790397536389450:2303];tablet_id=72075186224038143;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedu ... emeOpAlterColumnTable, opId: 281474976720705:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.674049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720706:0, at schemeshard: 72057594046644480 2024-11-21T17:46:44.694916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720707:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:45.781230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720708:0, at schemeshard: 72057594046644480 2024-11-21T17:46:45.791158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720709:0, at schemeshard: 72057594046644480 2024-11-21T17:46:45.816315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720710:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:47.311378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720711:0, at schemeshard: 72057594046644480 2024-11-21T17:46:47.320044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720712:0, at schemeshard: 72057594046644480 2024-11-21T17:46:47.340530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720713:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:48.929040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720714:0, at schemeshard: 72057594046644480 2024-11-21T17:46:48.944059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720715:0, at schemeshard: 72057594046644480 2024-11-21T17:46:48.957285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720716:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:50.283661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720717:0, at schemeshard: 72057594046644480 2024-11-21T17:46:50.294803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720718:0, at schemeshard: 72057594046644480 2024-11-21T17:46:50.310768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720719:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:51.434987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720720:0, at schemeshard: 72057594046644480 2024-11-21T17:46:51.444213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720721:0, at schemeshard: 72057594046644480 2024-11-21T17:46:51.455675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720722:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:52.437425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720723:0, at schemeshard: 72057594046644480 2024-11-21T17:46:52.445580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720724:0, at schemeshard: 72057594046644480 2024-11-21T17:46:52.455908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720725:0, at schemeshard: 72057594046644480 2024-11-21T17:46:52.817929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720726:0, at schemeshard: 72057594046644480 2024-11-21T17:46:52.825946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720727:0, at schemeshard: 72057594046644480 2024-11-21T17:46:52.831412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720728:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:53.807572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720729:0, at schemeshard: 72057594046644480 2024-11-21T17:46:53.820154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720730:0, at schemeshard: 72057594046644480 2024-11-21T17:46:53.834475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720731:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:54.730794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720732:0, at schemeshard: 72057594046644480 2024-11-21T17:46:54.748372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720733:0, at schemeshard: 72057594046644480 2024-11-21T17:46:54.758242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720734:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:56.043044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720735:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:56.052109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720736:0, at schemeshard: 72057594046644480 2024-11-21T17:46:56.058907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720737:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:57.272710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720738:0, at schemeshard: 72057594046644480 2024-11-21T17:46:57.280891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720739:0, at schemeshard: 72057594046644480 2024-11-21T17:46:57.293479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720740:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:58.313834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720741:0, at schemeshard: 72057594046644480 2024-11-21T17:46:58.320208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720742:0, at schemeshard: 72057594046644480 2024-11-21T17:46:58.327369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720743:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:46:59.704809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720744:0, at schemeshard: 72057594046644480 2024-11-21T17:46:59.713361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720745:0, at schemeshard: 72057594046644480 2024-11-21T17:46:59.719559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720746:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:47:00.732123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720747:0, at schemeshard: 72057594046644480 2024-11-21T17:47:00.743758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720748:0, at schemeshard: 72057594046644480 2024-11-21T17:47:00.749959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720749:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_WAIT_FINISHED... () 2024-11-21T17:47:02.718520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720750:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.722821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720751:0, at schemeshard: 72057594046644480 2024-11-21T17:47:02.729201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720752:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () 2024-11-21T17:47:03.695157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976720753:0, at schemeshard: 72057594046644480 RESHARDING_WAIT_FINISHED... () RESHARDING_FINISHED 2024-11-21T17:47:04.207132Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211224000, txId: 18446744073709551615] shutting down [[0u]] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2024-11-21T17:47:05.073077Z node 2 :TX_PROXY DEBUG: actor# [2:190:2086] Bootstrap 2024-11-21T17:47:05.098876Z node 2 :TX_PROXY DEBUG: actor# [2:190:2086] Become StateWork (SchemeCache [2:200:2089]) 2024-11-21T17:47:05.099016Z node 1 :TX_PROXY DEBUG: actor# [1:189:2134] Bootstrap 2024-11-21T17:47:05.099928Z node 1 :TX_PROXY DEBUG: actor# [1:189:2134] Become StateWork (SchemeCache [1:202:2139]) 2024-11-21T17:47:05.099982Z node 3 :TX_PROXY DEBUG: actor# [3:191:2086] Bootstrap 2024-11-21T17:47:05.100680Z node 3 :TX_PROXY DEBUG: actor# [3:191:2086] Become StateWork (SchemeCache [3:204:2089]) 2024-11-21T17:47:05.110910Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:05.112219Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:05.112651Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:05.112794Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:05.113423Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:05.113433Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:05.113463Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:05.115616Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:05.115640Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:05.115657Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:05.115672Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:05.115698Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:05.115708Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:05.162120Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:05.162177Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:05.173190Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:05.173251Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:05.173269Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:05.173281Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:05.173301Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:05.173307Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:05.173311Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:05.173319Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:05.184104Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:05.184160Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:05.184356Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:05.184366Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:05.185690Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:05.185960Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:05.186032Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 3 DeclarativePDiskManagement: true } 2024-11-21T17:47:05.186051Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T17:47:05.186209Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/001d71/r3tmp/tmp23yeW7/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } } } } 2024-11-21T17:47:05.186282Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /home/runner/.ya/build/build_root/6fwh/001d71/r3tmp/tmp23yeW7/pdisk_1.dat 2024-11-21T17:47:05.186286Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/6fwh/001d71/r3tmp/tmp23yeW7/pdisk_1.dat 2024-11-21T17:47:05.186289Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/001d71/r3tmp/tmp23yeW7/pdisk_1.dat 2024-11-21T17:47:05.186442Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:05.186465Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:05.186480Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:05.186514Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:05.186547Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:05.186985Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:05.187035Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:05.198210Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:05.198280Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 3 Devices# [] 2024-11-21T17:47:05.198321Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T17:47:05.200936Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T17:47:05.201092Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6fwh/001d71/r3tmp/tmp23yeW7/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T17:47:05.201236Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6fwh/001d71/r3tmp/tmp23yeW7/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001d71/r3tmp/tmp23yeW7/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10904155002880771576 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T17:47:05.203180Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T17:47:05.203339Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6fwh/001d71/r3tmp/tmp23yeW7/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T17:47:05.203389Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6fwh/001d71/r3tmp/tmp23yeW7/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001d71/r3tmp/tmp23yeW7/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13015129283308641785 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T17:47:05.203563Z node 3 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:05.203612Z node 3 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:05.203617Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:05.203645Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:05.203649Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:05.203678Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:05.203684Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:05.203709Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:05.203723Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:05.203726Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:05.203730Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:05.203736Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:05.203738Z node 3 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:05.203741Z node 3 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-2 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:05.203747Z node 3 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:05.203816Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:05.203820Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:05.203823Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:05.203834Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:437:2282] 2024-11-21T17:47:05.204959Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:05.204968Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:422:2279] 2024-11-21T17:47:05.205130Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:437:2282] 2024-11-21T17:47:05.205147Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:05.205153Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:05.205156Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:05.205251Z node 3 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T17:47:05.205256Z node 3 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2024-11-21T17:47:05.205275Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T17:47:05.205277Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2024-11-21T17:47:05.208486Z node 3 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2024-11-21T17:47:05.208533Z node 3 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:05.208629Z node 3 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:05.208633Z node 3 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:05.208648Z node 3 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[3:446:2097] 2024-11-21T17:47:05.208716Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2024-11-21T17:47:05.208724Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:05.208767Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:05.208769Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:05.208775Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:449:2097] 2024-11-21T17:47:05.208794Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-1 2024-11-21T17:47:05.208797Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:423:2094] 2024-11-21T17:47:05.208827Z node 3 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-2 2024-11-21T17:47:05.208830Z node 3 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [3:424:2094] 2024-11-21T17:47:05.209236Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:449:2097] 2024-11-21T17:47:05.209285Z node 3 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[3:446:2097] 2024-11-21T17:47:05.209296Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:05.209303Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:05.209305Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:05.209331Z node 3 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:05.209334Z node 3 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:05.209335Z node 3 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoBscResponse [GOOD] Test command err: 2024-11-21T17:46:51.193064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:51.193177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:51.193206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:51.193312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:51.193340Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:51.193360Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019c0/r3tmp/tmpwsk7oy/pdisk_1.dat 2024-11-21T17:46:51.405235Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14977, node 1 TClient is connected to server localhost:5503 2024-11-21T17:46:51.551021Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:51.551042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:51.551046Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:51.551124Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:52.903170Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:635:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.903224Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.903237Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:52.903296Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:633:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:52.903333Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:52.903344Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019c0/r3tmp/tmpYCEQcj/pdisk_1.dat 2024-11-21T17:46:52.978376Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14778, node 3 TClient is connected to server localhost:5150 2024-11-21T17:46:53.085307Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:53.085324Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:53.085327Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:53.085371Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:54.269750Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.269855Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:54.269899Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:54.270028Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:54.270081Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:54.270115Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019c0/r3tmp/tmpquEMBv/pdisk_1.dat 2024-11-21T17:46:54.362778Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14431, node 5 TClient is connected to server localhost:27099 2024-11-21T17:46:54.472304Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:54.472325Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:54.472329Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:54.472418Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:55.537568Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:55.537645Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:55.537661Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:55.537681Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:55.537710Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:55.537730Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019c0/r3tmp/tmpUOht6R/pdisk_1.dat 2024-11-21T17:46:55.623796Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23380, node 7 TClient is connected to server localhost:8507 2024-11-21T17:46:55.750573Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:55.750596Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:55.750600Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:55.750742Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:56.559324Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:56.559397Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:56.559420Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019c0/r3tmp/tmpvuT3Yx/pdisk_1.dat 2024-11-21T17:46:56.641940Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10154, node 9 TClient is connected to server localhost:11401 2024-11-21T17:46:56.749502Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:56.749535Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:56.749539Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:56.749609Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:56.824216Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:56.824269Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:56.836070Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: EMERGENCY issue_log { id: "RED-9a33-70fb" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-9a33-4e47" reason: "RED-9a33-53b5" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 } issue_log { id: "RED-9a33-4e47" status: RED message: "Compute has issues with system tablets" location { database { name: "/Root" } } reason: "RED-9a33-c138-BSController" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-c138-BSController" status: RED message: "System tablet is unresponsive" location { compute { tablet { type: "BSController" id: "72057594037932033" } } database { name: "/Root" } } type: "SYSTEM_TABLET" level: 3 } issue_log { id: "RED-9a33-53b5" status: RED message: "System tablet BSC didn\'t provide information" location { database { name: "/Root" } } type: "STORAGE" level: 2 } database_status { name: "/Root" overall: RED storage { overall: RED pools { id: "static" overall: GREEN groups { id: "0" overall: GREEN vdisks { id: "0-1-0-0-0" overall: GREEN pdisk { id: "9-1" overall: GREEN } } } } } compute { overall: RED nodes { id: "9" overall: YELLOW load { overall: YELLOW load: 152.044434 cores: 64 } } } } location { id: 9 host: "::1" port: 12001 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] Test command err: 2024-11-21T17:47:05.661313Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Bootstrap 2024-11-21T17:47:05.686428Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Become StateWork (SchemeCache [1:103:2137]) 2024-11-21T17:47:05.696633Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:05.698193Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:05.698586Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:05.698702Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:05.699008Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:05.699015Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:05.699037Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:05.701229Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:05.701285Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:05.701299Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:05.701322Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:05.701334Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:05.701348Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:05.725903Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:05.725961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:05.737017Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:05.737072Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:05.737092Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:05.737103Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:05.737131Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:05.737140Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:05.737146Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:05.737154Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:05.748304Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:05.748372Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:05.748733Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:05.748766Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:05.750294Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:05.750517Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:05.750762Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/001d55/r3tmp/tmpLztoUW/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T17:47:05.750848Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/001d55/r3tmp/tmpLztoUW/pdisk_1.dat 2024-11-21T17:47:05.751034Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:05.751057Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:05.751070Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:05.751110Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:05.751144Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:05.751662Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:05.751848Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:05.762765Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:05.778416Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:05.778495Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:05.778539Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:05.778577Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1/users/tenant-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:05.778598Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:05.778786Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T17:47:05.778792Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2024-11-21T17:47:05.784553Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2024-11-21T17:47:05.784632Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:05.784781Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:05.784789Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:05.784810Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:365:2314] 2024-11-21T17:47:05.784909Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1/users/tenant-1 2024-11-21T17:47:05.784918Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:95:2130] 2024-11-21T17:47:05.785630Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:365:2314] 2024-11-21T17:47:05.785669Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:05.785679Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:05.785682Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk >> TNodeBrokerTest::NodeNameReuseRestart >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> TDynamicNameserverTest::TestCacheUsage [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2024-11-21T17:47:04.123213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:04.123234Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:04.127370Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:04.127749Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:04.127822Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:04.127830Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:04.127837Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:04.127989Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:04.128908Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:04.128924Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:04.128930Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:04.128934Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:04.128953Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:04.128998Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:04.129015Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.129022Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.161191Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:04.161240Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T17:47:04.161246Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:04.161256Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:04.171592Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:577:2205], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.171966Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:541:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:04.171996Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.172013Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.264537Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:600:2206], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.264669Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:541:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2024-11-21T17:47:04.264675Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.264687Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.264954Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:603:2207], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.264992Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:604:2208], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.265062Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:605:2209], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.265077Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:606:2210], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.265103Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:603:2207] 2024-11-21T17:47:04.265105Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.265110Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.265120Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:607:2211], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.265148Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:604:2208] 2024-11-21T17:47:04.265152Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.265156Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.265169Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:608:2212], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.265189Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:609:2213], Recipient [1:541:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.265207Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:605:2209] 2024-11-21T17:47:04.265209Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.265212Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.265226Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:606:2210] 2024-11-21T17:47:04.265228Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.265231Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.265239Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:607:2211] 2024-11-21T17:47:04.265241Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.265244Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.265249Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:608:2212] 2024-11-21T17:47:04.265253Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.265256Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.265266Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:609:2213] 2024-11-21T17:47:04.265268Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.265271Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.316450Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:541:2184], Recipient [1:541:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:04.316471Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:04.316478Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:04.316482Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:04.316498Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:04.316505Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:04.347116Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:541:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2024-11-21T17:47:04.347137Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.347143Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.347216Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:603:2207] 2024-11-21T17:47:04.347219Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.347221Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.347225Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:604:2208] 2024-11-21T17:47:04.347228Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.347229Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.347239Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:606:2210] 2024-11-21T17:47:04.347241Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.347243Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.347246Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:607:2211] 2024-11-21T17:47:04.347248Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.347250Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.347253Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:608:2212] 2024-11-21T17:47:04.347254Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.347256Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.347259Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:609:2213] 2024-11-21T17:47:04.347261Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.347263Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.347267Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:605:2209] 2024-11-21T17:47:04.347268Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.347270Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.358243Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:04.358267Z node 1 :NODE_BROKER DEBUG: Move to new epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:04.358283Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:04.358287Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=0 expired=0 2024-11-21T17:47:04.358298Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:04.358307Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2024-11-21T17:47:04.358314Z ... 8944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:04.564116Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:04.564125Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:729:2270], Recipient [1:690:2270]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:04.564128Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:04.564132Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:04.564134Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:04.564152Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:04.564154Z node 1 :NODE_BROKER DEBUG: Registration request from host4:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:04.564178Z node 1 :NODE_BROKER DEBUG: Adding node #1027 host4:1001 to database resolvehost=host4.yandex.net address=1.2.3.7 dc=1 location=DC=1/M=2/R=3/U=7/ lease=1 expire=Thu, 01 Jan 1970 04:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=3 authorizedbycertificate=false 2024-11-21T17:47:04.564222Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=7 2024-11-21T17:47:04.575035Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:04.575054Z node 1 :NODE_BROKER DEBUG: Added node #1027 host4:1001 2024-11-21T17:47:04.575059Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 6 to 7 2024-11-21T17:47:04.575063Z node 1 :NODE_BROKER DEBUG: Add node #1027 host4:1001 to epoch cache 2024-11-21T17:47:04.575107Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } Expire: 14400025000 Name: "slot-3" } 2024-11-21T17:47:04.575113Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:04.575209Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:734:2306], Recipient [1:690:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.575223Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:690:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:04.575227Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.575234Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2024-11-21T17:47:04.575271Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:736:2308], Recipient [1:690:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.575280Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:690:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2024-11-21T17:47:04.575282Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.575286Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2024-11-21T17:47:04.575313Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:738:2310], Recipient [1:690:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.575324Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:690:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:04.575327Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.575331Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2024-11-21T17:47:04.575371Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:740:2312], Recipient [1:690:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.575384Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:690:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2024-11-21T17:47:04.575388Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.575392Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2024-11-21T17:47:04.575433Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:742:2314], Recipient [1:690:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.575442Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:690:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:04.575447Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.575450Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2024-11-21T17:47:04.718916Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:690:2270], Recipient [1:690:2270]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:04.718935Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:04.718940Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:04.718944Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:04.718955Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:04.718962Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T17:47:04.769943Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:760:2315], Recipient [1:690:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.769994Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:690:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 4 } 2024-11-21T17:47:04.769999Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.770004Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:04.781329Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:04.781354Z node 1 :NODE_BROKER DEBUG: Move to new epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T17:47:04.781470Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2024-11-21T17:47:04.781478Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #4 nodes=4 expired=0 2024-11-21T17:47:04.781520Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T17:47:04.781531Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:04.781691Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:772:2320], Recipient [1:690:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.781728Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:690:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:04.781734Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.781742Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T17:47:04.781799Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:774:2322], Recipient [1:690:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.781813Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:690:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:04.781817Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.781822Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T17:47:04.781868Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:776:2324], Recipient [1:690:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.781876Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:690:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:04.781879Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.781884Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T17:47:04.781926Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:778:2326], Recipient [1:690:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.781949Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:690:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 8 } 2024-11-21T17:47:04.781954Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.781958Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T17:47:04.782003Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:780:2328], Recipient [1:690:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.782013Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:690:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:04.782016Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.782021Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2024-11-21T17:47:04.782066Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:782:2330], Recipient [1:690:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.782080Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:530:2178], Recipient [1:690:2270]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2024-11-21T17:47:04.782084Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.782088Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2024-11-21T17:47:05.655870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:05.655896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:05.655901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:05.655906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:05.655917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:05.655919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:05.655927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:05.655993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:05.658636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:05.658653Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:05.659922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:05.660071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:05.660085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T17:47:05.660858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:05.660949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:05.661017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T17:47:05.661079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:47:05.661607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:47:05.661880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:47:05.661890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:47:05.661903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:05.661910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:47:05.661914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:05.661927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T17:47:05.694240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T17:47:05.694339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:47:05.694396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T17:47:05.694438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T17:47:05.694454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:47:05.695150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T17:47:05.695177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T17:47:05.695225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:47:05.695236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T17:47:05.695240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:05.695245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:05.695641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:47:05.695653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T17:47:05.695658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:05.696017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:47:05.696028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:47:05.696034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:47:05.696040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:05.696438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:05.696717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:05.696779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:47:05.696923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:47:05.696928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T17:47:05.696931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:47:05.862007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T17:47:05.862074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 4294969520 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T17:47:05.862086Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:47:05.862151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:05.862156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:47:05.862188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:47:05.862197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:47:05.862676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:47:05.862685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:47:05.862726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:47:05.862731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:241:2231], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T17:47:05.862812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:47:05.862820Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:05.862831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:05.862835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:05.862840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:05.862846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:05.862851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:05.862855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:05.862865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T17:47:05.862870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:47:05.862873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T17:47:05.863210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:47:05.863224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:47:05.863227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T17:47:05.863231Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T17:47:05.863236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:47:05.863253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T17:47:05.863257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:95:2130] 2024-11-21T1 ... dified from very-static to static 2024-11-21T17:47:06.014393Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:380:2338] 2024-11-21T17:47:06.014412Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:380:2338], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:47:06.014416Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:47:06.014435Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:382:2336], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:47:06.014438Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:47:06.025801Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [1:383:2337], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false } Version { Items { Kind: 10 Id: 4 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } } } } 2024-11-21T17:47:06.025822Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T17:47:06.025847Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T17:47:06.025861Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:380:2338]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T17:47:06.025866Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T17:47:06.025871Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:382:2336]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T17:47:06.026235Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { StaticSlotLabelValue: "static-again" } } 2024-11-21T17:47:06.026240Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) static slot label modified from static to static-again 2024-11-21T17:47:06.026243Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:380:2338] 2024-11-21T17:47:06.026263Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:380:2338], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:47:06.026266Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:47:06.026283Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:382:2336], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:47:06.026285Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:47:06.037607Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [1:383:2337], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false } Version { Items { Kind: 10 Id: 5 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } Items { Kind: 10 Id: 5 Generation: 1 } } } } 2024-11-21T17:47:06.037632Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T17:47:06.037666Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T17:47:06.037681Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:380:2338]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T17:47:06.037687Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T17:47:06.037697Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:382:2336]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T17:47:06.038599Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: false StaticSlotLabelValue: "static-again" } } 2024-11-21T17:47:06.038621Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:380:2338], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:47:06.038629Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:47:06.038642Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:382:2336], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:47:06.038645Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:47:06.050189Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [1:383:2337], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false } Version { Items { Kind: 10 Id: 6 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } Items { Kind: 10 Id: 5 Generation: 1 } Items { Kind: 10 Id: 6 Generation: 1 } } } } 2024-11-21T17:47:06.050216Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T17:47:06.050251Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T17:47:06.050270Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:380:2338]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T17:47:06.050277Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T17:47:06.050286Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:382:2336]: Config { MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T17:47:06.051272Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: true DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } 2024-11-21T17:47:06.051305Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:380:2338], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:47:06.051312Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:47:06.051330Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:382:2336], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:47:06.051334Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:47:06.062909Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273285146, Sender [1:383:2337], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } FeatureFlags { EnableExternalHive: false EnableColumnStatistics: false } Version { Items { Kind: 10 Id: 7 Generation: 1 } } } AffectedKinds: 10 RawConsoleConfig { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } Version { Items { Kind: 10 Id: 1 Generation: 1 } Items { Kind: 10 Id: 2 Generation: 1 } Items { Kind: 10 Id: 3 Generation: 1 } Items { Kind: 10 Id: 4 Generation: 1 } Items { Kind: 10 Id: 5 Generation: 1 } Items { Kind: 10 Id: 6 Generation: 1 } Items { Kind: 10 Id: 7 Generation: 1 } } } } 2024-11-21T17:47:06.062935Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T17:47:06.062968Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T17:47:06.062984Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:380:2338]: Config { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T17:47:06.062994Z node 1 :CONFIGS_DISPATCHER TRACE: Sending for kinds: MonitoringConfigItem 2024-11-21T17:47:06.063004Z node 1 :CONFIGS_DISPATCHER TRACE: Send TEvConsole::TEvConfigNotificationRequest to [1:382:2336]: Config { MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } } ItemKinds: 10 Local: true 2024-11-21T17:47:06.063896Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Got new monitoring config: MonitoringConfig { ForceDatabaseLabels: false DatabaseLabels { Enabled: true StaticSlotLabelValue: "static-again" } } 2024-11-21T17:47:06.063927Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:380:2338], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:47:06.063933Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse 2024-11-21T17:47:06.063949Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, received event# 273286162, Sender [1:382:2336], Recipient [1:379:2337]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationResponse { SubscriptionId: 0 ConfigId { } } 2024-11-21T17:47:06.063953Z node 1 :CONFIGS_DISPATCHER TRACE: StateWork, processing event TEvConsole::TEvConfigNotificationResponse |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.1%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSlotIndexesPoolTest::Ranges [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage [GOOD] Test command err: 2024-11-21T17:47:06.127612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:06.127629Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:06.130520Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:06.130822Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:06.130881Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.130886Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.130892Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:06.131004Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:06.131293Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:06.131297Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.131300Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:06.131302Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.131309Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:06.131340Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:06.131356Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:06.131362Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:06.152721Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:06.152762Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.023000Z 2024-11-21T17:47:06.152769Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:06.152779Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:06.244928Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:207:2196], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.245289Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2024-11-21T17:47:06.245300Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:06.245312Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:06.245369Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:209:2198], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.245400Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:06.245404Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:06.245411Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:06.245931Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:06.245949Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:06.245970Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:210:2176], Recipient [1:168:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.245973Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.245978Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.245980Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.246000Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:06.246003Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:06.246064Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:06.246095Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:47:06.257102Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:06.257126Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T17:47:06.257133Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:47:06.257137Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T17:47:06.257203Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } 2024-11-21T17:47:06.257214Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:06.257371Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:223:2203], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.257389Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:221:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2024-11-21T17:47:06.257395Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:47:06.257411Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200023000 Name: "slot-0" } } 2024-11-21T17:47:06.257464Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877764, Sender [1:223:2203], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:06.257510Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:228:2204], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.257520Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:226:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2024-11-21T17:47:06.257523Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:47:06.257531Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T17:47:06.257545Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877764, Sender [1:228:2204], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:06.257579Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:231:2205], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.257588Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:229:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2024-11-21T17:47:06.257590Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:47:06.257595Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T17:47:06.257606Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877764, Sender [1:231:2205], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:06.257632Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 1 } 2024-11-21T17:47:06.257635Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:06.257645Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:06.257687Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 2 } 2024-11-21T17:47:06.257691Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:06.257695Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:06.257735Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:233:2207], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.257747Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:160:2171], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:06.257750Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:06.257755Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:06.329147Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:168:2176], Recipient [1:168:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:06.329177Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:06.329183Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.329188Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.329207Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:06.329215Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #2.3 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T17:47:06.359673Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2024-11-21T17:47:06.359694Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:06.359699Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:06.359778Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:240:2210], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.359805Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:160:2171], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:06.359809Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:06.359815Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:06.359891Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:06.359900Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:06.359909Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:241:2176], Recipient [1:168:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.359911Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.359915Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.370784Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:06.370811Z node 1 :NODE_BROKER DEBUG: Move to new epoch #2.3 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T17:47:06.370827Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:06.370833Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=1 expired=0 2024-11-21T17:47:06.370868Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T17:47:06.370877Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:06.370884Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.370949Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:06.370955Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:06.370982Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:1001 to database resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 03:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2024-11-21T17:47:06.371036Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=4 2024-11-21T17:47:06.381903Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:06.381925Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:1001 2024-11-21T17:47:06.381930Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 3 to 4 2024-11-21T17:47:06.381933Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:1001 to epoch cache 2024-11-21T17:47:06.381986Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 10800023000 Name: "slot-1" } 2024-11-21T17:47:06.381993Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:06.382088Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 3 } 2024-11-21T17:47:06.382095Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:06.382105Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T17:47:06.382236Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:250:2218], Recipient [1:168:2176]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.382254Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:160:2171], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:06.382258Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:06.382263Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T17:47:06.433234Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:168:2176], Recipient [1:168:2176]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:06.433261Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:06.433267Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.433272Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.433289Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:06.433299Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T17:47:06.463837Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:168:2176]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 3 } 2024-11-21T17:47:06.463871Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:06.463876Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T17:47:06.474683Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:06.474725Z node 1 :NODE_BROKER DEBUG: Node #1024 host1:1001 has expired 2024-11-21T17:47:06.474736Z node 1 :NODE_BROKER DEBUG: Move to new epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T17:47:06.474753Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T03:00:00.023000Z 2024-11-21T17:47:06.474759Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #3 nodes=1 expired=1 2024-11-21T17:47:06.474788Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.5 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z - 1970-01-01T04:00:00.023000Z 2024-11-21T17:47:06.474795Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx >> TNodeBrokerTest::ExtendLeaseRestartRace >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |77.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.1%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::NodeNameExpiration |77.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::TestListNodes [GOOD] >> Compression::WriteRAW [GOOD] >> TNodeBrokerTest::FixedNodeId [GOOD] >> Compression::WriteGZIP |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.1%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |77.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2024-11-21T17:47:05.668216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:05.668259Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:05.672643Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:05.673076Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:05.673162Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:05.673168Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:05.673175Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:05.673334Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:05.674234Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:05.674245Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:05.674250Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:05.674253Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:05.674267Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:05.674309Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:05.674325Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:05.674330Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:05.705978Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:05.706005Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2024-11-21T17:47:05.706009Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:05.706015Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:05.716308Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:573:2205], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.716573Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:05.716582Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.716595Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2024-11-21T17:47:05.716664Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:575:2207], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.716704Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: true Path: "dc-1" } 2024-11-21T17:47:05.716711Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:05.716720Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: true Path: "dc-1" 2024-11-21T17:47:05.717274Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:05.717298Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: true Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:05.717313Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:576:2184], Recipient [1:537:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:05.717316Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:05.717319Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:05.717321Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:05.717335Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:05.717338Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (fixed) tenant: dc-1 2024-11-21T17:47:05.717351Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=NEVER servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:05.717377Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:47:05.728194Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:05.728216Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T17:47:05.728245Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:47:05.728250Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T17:47:05.728299Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18446744073709551615 Name: "slot-0" } 2024-11-21T17:47:05.728304Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:05.728403Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:588:2213], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.728435Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2024-11-21T17:47:05.728440Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:47:05.728452Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18446744073709551615 Name: "slot-0" } } 2024-11-21T17:47:05.728484Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:590:2215], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.728494Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1024 } 2024-11-21T17:47:05.728497Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T17:47:05.728503Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1024 2024-11-21T17:47:05.728507Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T17:47:05.728509Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) is now active 2024-11-21T17:47:05.728511Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) enqueue tx 2024-11-21T17:47:05.728513Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) starts new tx 2024-11-21T17:47:05.728522Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1024 2024-11-21T17:47:05.728542Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T17:47:05.728556Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 18446744073709551615 Epoch { Id: 1 Version: 2 Start: 25000 End: 3600025000 NextEnd: 7200025000 } } 2024-11-21T17:47:05.728561Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) completed tx 2024-11-21T17:47:05.728564Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) unlink from parent 2024-11-21T17:47:05.728567Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1024 2024-11-21T17:47:05.728570Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T17:47:05.728619Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:592:2217], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.728637Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:05.728640Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:05.728645Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:05.728705Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:05.728713Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:05.728722Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:593:2184], Recipient [1:537:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:05.728724Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:05.728726Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:05.728728Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:05.728731Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:05.728733Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:05.728743Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:05.728751Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18446744073709551615 N ... t2:1001 to epoch cache 2024-11-21T17:47:05.739892Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200025000 Name: "slot-1" } 2024-11-21T17:47:05.739899Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:05.740003Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:604:2227], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.740031Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: true Path: "dc-1" } 2024-11-21T17:47:05.740037Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:05.740044Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: true Path: "dc-1" 2024-11-21T17:47:05.740118Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:05.740128Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: true Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:05.740139Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:605:2184], Recipient [1:537:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:05.740143Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:05.740147Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:05.740149Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:05.740159Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:05.740163Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (fixed) tenant: dc-1 2024-11-21T17:47:05.740172Z node 1 :NODE_BROKER DEBUG: Fix ID for node: #1025 host2:1001 2024-11-21T17:47:05.750955Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:05.750975Z node 1 :NODE_BROKER DEBUG: Fix ID for node #1025 host2:1001 2024-11-21T17:47:05.751012Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } 2024-11-21T17:47:05.751021Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:05.751113Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:610:2232], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.751132Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2024-11-21T17:47:05.751137Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:47:05.751149Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } } 2024-11-21T17:47:05.751193Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:612:2234], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.751204Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1025 } 2024-11-21T17:47:05.751208Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T17:47:05.751213Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1025 2024-11-21T17:47:05.751218Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T17:47:05.751221Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) is now active 2024-11-21T17:47:05.751225Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) enqueue tx 2024-11-21T17:47:05.751229Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) starts new tx 2024-11-21T17:47:05.751237Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1025 2024-11-21T17:47:05.751248Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T17:47:05.751260Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1025 Expire: 18446744073709551615 Epoch { Id: 1 Version: 3 Start: 25000 End: 3600025000 NextEnd: 7200025000 } } 2024-11-21T17:47:05.751265Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) completed tx 2024-11-21T17:47:05.751268Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) unlink from parent 2024-11-21T17:47:05.751271Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1025 2024-11-21T17:47:05.751274Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T17:47:05.751327Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:614:2236], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.751341Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2024-11-21T17:47:05.751344Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:47:05.751356Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } } 2024-11-21T17:47:05.751394Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:616:2238], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.751413Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:05.751418Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:05.751428Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:05.751499Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:05.751507Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:05.751516Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:617:2184], Recipient [1:537:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:05.751519Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:05.751521Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:05.751523Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:05.751526Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:05.751529Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:05.751559Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:05.751571Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 18446744073709551615 Name: "slot-1" } 2024-11-21T17:47:05.751575Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:05.751621Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:620:2241], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.751637Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1025 } 2024-11-21T17:47:05.751640Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T17:47:05.751643Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1025 2024-11-21T17:47:05.751647Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T17:47:05.751650Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) is now active 2024-11-21T17:47:05.751653Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) enqueue tx 2024-11-21T17:47:05.751656Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) starts new tx 2024-11-21T17:47:05.751660Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1025 2024-11-21T17:47:05.751664Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T17:47:05.751676Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1025 Expire: 18446744073709551615 Epoch { Id: 1 Version: 3 Start: 25000 End: 3600025000 NextEnd: 7200025000 } } 2024-11-21T17:47:05.751680Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) completed tx 2024-11-21T17:47:05.751683Z node 1 :NODE_BROKER TRACE: TTxProcessor(1025) unlink from parent 2024-11-21T17:47:05.751687Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1025 2024-11-21T17:47:05.751690Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2024-11-21T17:47:04.124867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:04.124886Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:04.128814Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:04.129187Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:04.129254Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:04.129260Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:04.129267Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:04.129419Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:04.129846Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:04.129853Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:04.129857Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:04.129861Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:04.129874Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:04.129923Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:04.129938Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:04.129945Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:04.161824Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:04.161866Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.023000Z 2024-11-21T17:47:04.161873Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:04.161882Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:04.172169Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:571:2203], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.172431Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:04.172438Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.172447Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:04.254631Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:593:2204], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.254761Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2024-11-21T17:47:04.254770Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.254784Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:04.255182Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:597:2205], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.255194Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:598:2206], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.255236Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:599:2207], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.255264Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:600:2208], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.255293Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:597:2205] 2024-11-21T17:47:04.255296Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.255302Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:04.255333Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:601:2209], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.255374Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:602:2210], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.255385Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:598:2206] 2024-11-21T17:47:04.255388Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.255394Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:04.255418Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:603:2211], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:04.255435Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:599:2207] 2024-11-21T17:47:04.255438Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.255441Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:04.255446Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:600:2208] 2024-11-21T17:47:04.255448Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.255451Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:04.255457Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:601:2209] 2024-11-21T17:47:04.255459Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.255462Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:04.255474Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:602:2210] 2024-11-21T17:47:04.255479Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.255484Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:04.255504Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:603:2211] 2024-11-21T17:47:04.255507Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.255512Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.023000Z - 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:04.306539Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:537:2184], Recipient [1:537:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:04.306562Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:04.306569Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:04.306573Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:04.306590Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:04.306599Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #2.2 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T17:47:04.337196Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2024-11-21T17:47:04.337222Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.337229Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.337283Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:599:2207] 2024-11-21T17:47:04.337286Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.337289Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.337326Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:597:2205] 2024-11-21T17:47:04.337328Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.337330Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.337335Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:598:2206] 2024-11-21T17:47:04.337337Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.337339Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.337342Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:600:2208] 2024-11-21T17:47:04.337344Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.337346Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.337349Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:601:2209] 2024-11-21T17:47:04.337351Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.337352Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.337356Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:602:2210] 2024-11-21T17:47:04.337358Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.337359Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.337362Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:603:2211] 2024-11-21T17:47:04.337364Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:04.337368Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:04.348044Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:04.348060Z node 1 :NODE_BROKER DEBUG: Move to new epoch #2.2 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T17:47:04.348072Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.023000Z 2024-11-21T17:47:04.348075Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=0 expired=0 2024-11-21T17:47:04.348085Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T17:47:04.348094Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.2 1970-01-01T01:00:00.023000Z - 1970-01-01T02:00:00.023000Z - 1970-01-01T03:00:00.023000Z 2024-11-21T17:47:04.348100Z ... ROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:05.521345Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #7.8 1970-01-01T06:00:00.023000Z - 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.551982Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 7 } 2024-11-21T17:47:05.552004Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.552010Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:05.552089Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:597:2205] 2024-11-21T17:47:05.552093Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.552095Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:05.552099Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:598:2206] 2024-11-21T17:47:05.552101Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.552103Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:05.552106Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:599:2207] 2024-11-21T17:47:05.552108Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.552110Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:05.552114Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:600:2208] 2024-11-21T17:47:05.552116Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.552118Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:05.552121Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:601:2209] 2024-11-21T17:47:05.552123Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.552125Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:05.552128Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:602:2210] 2024-11-21T17:47:05.552129Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.552131Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:05.552134Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:603:2211] 2024-11-21T17:47:05.552136Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.552138Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #7 2024-11-21T17:47:05.562881Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:05.562915Z node 1 :NODE_BROKER DEBUG: Move to new epoch #7.8 1970-01-01T06:00:00.023000Z - 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.562942Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T07:00:00.023000Z 2024-11-21T17:47:05.562946Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #7 nodes=0 expired=0 2024-11-21T17:47:05.562959Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.023000Z - 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.562972Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.023000Z - 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.562979Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.023000Z - 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.562984Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.023000Z - 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.562988Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.023000Z - 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.562993Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.023000Z - 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.562997Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.023000Z - 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.563001Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.023000Z - 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.563006Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:05.563219Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:793:2294], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.563247Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:05.563253Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.563258Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.023000Z - 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.563307Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:795:2296], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:05.563315Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:05.563318Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.563320Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #7.8 1970-01-01T06:00:00.023000Z - 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.717147Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:537:2184], Recipient [1:537:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:05.717170Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:05.717177Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:05.717182Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:05.717202Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:05.717211Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #8.9 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z - 1970-01-01T09:00:00.023000Z 2024-11-21T17:47:05.747905Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 8 } 2024-11-21T17:47:05.747931Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.747956Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T17:47:05.748059Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:597:2205] 2024-11-21T17:47:05.748065Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.748070Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T17:47:05.748083Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:598:2206] 2024-11-21T17:47:05.748086Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.748089Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T17:47:05.748095Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:599:2207] 2024-11-21T17:47:05.748098Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.748101Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T17:47:05.748106Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:601:2209] 2024-11-21T17:47:05.748109Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.748112Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T17:47:05.748118Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:603:2211] 2024-11-21T17:47:05.748121Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.748124Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T17:47:05.748130Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:600:2208] 2024-11-21T17:47:05.748133Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.748136Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T17:47:05.748141Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:602:2210] 2024-11-21T17:47:05.748144Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:05.748147Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #8 2024-11-21T17:47:05.760635Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:05.760664Z node 1 :NODE_BROKER DEBUG: Move to new epoch #8.9 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z - 1970-01-01T09:00:00.023000Z 2024-11-21T17:47:05.760689Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T08:00:00.023000Z 2024-11-21T17:47:05.760695Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #8 nodes=0 expired=0 2024-11-21T17:47:05.760711Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z - 1970-01-01T09:00:00.023000Z 2024-11-21T17:47:05.760724Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z - 1970-01-01T09:00:00.023000Z 2024-11-21T17:47:05.760729Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z - 1970-01-01T09:00:00.023000Z 2024-11-21T17:47:05.760737Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z - 1970-01-01T09:00:00.023000Z 2024-11-21T17:47:05.760743Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z - 1970-01-01T09:00:00.023000Z 2024-11-21T17:47:05.760749Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z - 1970-01-01T09:00:00.023000Z 2024-11-21T17:47:05.760755Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z - 1970-01-01T09:00:00.023000Z 2024-11-21T17:47:05.760762Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z - 1970-01-01T09:00:00.023000Z 2024-11-21T17:47:05.760768Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #8.9 1970-01-01T07:00:00.023000Z - 1970-01-01T08:00:00.023000Z - 1970-01-01T09:00:00.023000Z 2024-11-21T17:47:05.760775Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] >> TLocalTests::TestAddTenantWhileResolving |77.2%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.2%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> TSlotIndexesPoolTest::Expansion [GOOD] |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2024-11-21T17:47:06.352774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:06.352791Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:06.359692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 2024-11-21T17:47:06.363125Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:06.363538Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:06.363582Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.363586Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.363591Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:47:06.363756Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:06.364381Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:06.364392Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.364397Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:06.364400Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.364602Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:06.364626Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:06.364642Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.028000Z - 1970-01-01T01:00:00.028000Z - 1970-01-01T02:00:00.028000Z 2024-11-21T17:47:06.364649Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.028000Z - 1970-01-01T01:00:00.028000Z - 1970-01-01T02:00:00.028000Z 2024-11-21T17:47:06.397774Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:06.397811Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.028000Z 2024-11-21T17:47:06.397817Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:06.397826Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:06.397885Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:593:2227], Recipient [1:545:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.398216Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:590:2225], Recipient [1:545:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:06.398225Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:06.398236Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.028000Z - 1970-01-01T01:00:00.028000Z - 1970-01-01T02:00:00.028000Z 2024-11-21T17:47:06.398310Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:606:2232], Recipient [1:545:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.398355Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:590:2225], Recipient [1:545:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:06.398360Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:06.398370Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:06.398987Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:06.399005Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:06.399022Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:607:2187], Recipient [1:545:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.399027Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.399032Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.399036Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.399048Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:06.399053Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:06.399148Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:19001 to database resolvehost=host1 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:06.399183Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:47:06.409994Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:06.410015Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:19001 2024-11-21T17:47:06.410021Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:47:06.410024Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:19001 to epoch cache 2024-11-21T17:47:06.410065Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-0" } 2024-11-21T17:47:06.410071Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:06.410174Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:619:2238], Recipient [1:545:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.410206Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:590:2225], Recipient [1:545:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:06.410210Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:06.410217Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:06.410279Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:06.410291Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:06.410302Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:620:2187], Recipient [1:545:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.410307Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.410311Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.410314Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.410324Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:06.410328Z node 1 :NODE_BROKER DEBUG: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:06.410345Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:19001 to database resolvehost=host2 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2024-11-21T17:47:06.410371Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T17:47:06.421433Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:06.421463Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:19001 2024-11-21T17:47:06.421474Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T17:47:06.421480Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:19001 to epoch cache 2024-11-21T17:47:06.421536Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-1" } 2024-11-21T17:47:06.421547Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:06.421695Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:625:2243], Recipient [1:545:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.421727Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:590:2225], Recipient [1:545:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:06.421733Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:06.421742Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:06.421841Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:06.421861Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:06.421883Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:626:2187], Recipient [1:545:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.421890Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.421896Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.421902Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.421917Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:06.421924Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:06.421948Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:06.421961Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-0" } 2024-11-21T17:47:06.421965Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:06.422023Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:629:2246], Recipient [1:545:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.422041Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:590:2225], Recipient [1:545:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:06.422045Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:06.422051Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:06.422097Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:06.422106Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:06.422115Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:630:2187], Recipient [1:545:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.422118Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.422121Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.422124Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.422129Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:06.422132Z node 1 :NODE_BROKER DEBUG: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:06.422142Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:06.422152Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-1" } 2024-11-21T17:47:06.422155Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:06.422210Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:633:2249], Recipient [1:545:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.422227Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:590:2225], Recipient [1:545:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:06.422231Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:06.422255Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:06.422292Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:06.422301Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:06.422312Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:634:2187], Recipient [1:545:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.422315Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.422318Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.422321Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.422325Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:06.422328Z node 1 :NODE_BROKER DEBUG: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:06.422337Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:06.422347Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-1" } 2024-11-21T17:47:06.422350Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:06.422390Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:637:2252], Recipient [1:545:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:06.422405Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:590:2225], Recipient [1:545:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:06.422408Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:06.422415Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:06.422434Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:06.422439Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:06.422445Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:638:2187], Recipient [1:545:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.422447Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:06.422449Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:06.422450Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:06.422452Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:06.422454Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:06.422459Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:06.422467Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200028000 Name: "slot-0" } 2024-11-21T17:47:06.422469Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx >> TLocalTests::TestAddTenantWhileResolving [GOOD] |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] Test command err: 2024-11-21T17:47:07.962559Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Bootstrap 2024-11-21T17:47:07.981588Z node 1 :TX_PROXY DEBUG: actor# [1:97:2132] Become StateWork (SchemeCache [1:103:2137]) 2024-11-21T17:47:07.990333Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:07.991439Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:07.991891Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:07.992105Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:07.992561Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:07.992578Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:07.992617Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:07.994357Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:07.994390Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:07.994404Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:07.994417Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:07.994426Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:07.994434Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:08.017402Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:08.017445Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:08.028201Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:08.028262Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:08.028276Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:08.028285Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:08.028305Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:08.028310Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:08.028315Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:08.028320Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:08.039108Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:08.039156Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:08.039329Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:08.039338Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:08.040641Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:08.040782Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:08.040952Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/001cc4/r3tmp/tmpaSkWnR/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T17:47:08.041020Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/001cc4/r3tmp/tmpaSkWnR/pdisk_1.dat 2024-11-21T17:47:08.041153Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:08.041170Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:08.041180Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:08.041206Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:08.041233Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:08.041499Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:08.041529Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:08.052215Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:08.052436Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:08.052482Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:08.052488Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:08.052518Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:08.052534Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:08.052622Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:08.052630Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:08.052633Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:08.052645Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:312:2281] 2024-11-21T17:47:08.052900Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:08.052905Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:307:2278] 2024-11-21T17:47:08.052995Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:312:2281] 2024-11-21T17:47:08.053008Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:08.053014Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:08.053016Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:08.069079Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046578944 2024-11-21T17:47:08.069103Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-1 to schemeshard 72057594046578944 2024-11-21T17:47:08.069140Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/users/tenant-2 to schemeshard 72057594046578944 2024-11-21T17:47:08.072707Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "/dc-1/users/tenant-1" PathId: 100 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 100 DomainKey { SchemeShard: 72057594046578944 PathId: 100 } } } 2024-11-21T17:47:08.072744Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-1 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:08.072838Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:08.072844Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:08.072858Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:396:2337] 2024-11-21T17:47:08.072895Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046578944: Status: StatusSuccess Path: "/dc-1/users/tenant-2" PathDescription { Self { Name: "/dc-1/users/tenant-2" PathId: 101 SchemeshardId: 72057594046578944 PathType: EPathTypeSubDomain } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 101 DomainKey { SchemeShard: 72057594046578944 PathId: 101 } } } 2024-11-21T17:47:08.072900Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/users/tenant-2 to hive 72057594046578946 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:08.072932Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:08.072935Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:08.072939Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:402:2339] 2024-11-21T17:47:08.073091Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:396:2337] 2024-11-21T17:47:08.073115Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:08.073122Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:08.073124Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:08.073137Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:402:2339] 2024-11-21T17:47:08.073151Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:08.073153Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:08.073155Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk >> KqpScripting::ScanQuery >> KqpYql::EvaluateExpr2 >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced >> TSchemeShardSplitBySizeTest::Merge1KShards [GOOD] >> KqpYql::BinaryJsonOffsetNormal >> KqpScripting::ExecuteYqlScriptScanScalar >> KqpYql::PgIntPrimaryKey+EnableKqpDataQueryStreamLookup >> KqpRm::ManyTasks >> KqpScripting::UnsafeTimestampCast >> KqpRm::NodesMembershipByExchanger >> KqpScripting::SelectNullType >> KqpYql::TableRange >> KqpRm::ResourceBrokerNotEnoughResources >> KqpRm::DisonnectNodes >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] |77.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> KqpRm::SingleSnapshotByExchanger >> TNodeBrokerTest::NodeNameExpiration [GOOD] >> KqpRm::ManyTasks [GOOD] >> KqpYql::EvaluateExpr2 [GOOD] >> KqpYql::EvaluateExpr3 >> KqpRm::NotEnoughMemory >> KqpScripting::ScanQuery [GOOD] >> KqpScripting::ScanQueryDisable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2024-11-21T17:47:07.021276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:07.021292Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:07.027460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 2024-11-21T17:47:07.030398Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:07.030756Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:07.030798Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.030803Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.030807Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:07.030928Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:47:07.031596Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:07.031603Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.031607Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.031609Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.031627Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:07.031643Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:07.031655Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.027000Z - 1970-01-01T01:00:00.027000Z - 1970-01-01T02:00:00.027000Z 2024-11-21T17:47:07.031659Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.027000Z - 1970-01-01T01:00:00.027000Z - 1970-01-01T02:00:00.027000Z 2024-11-21T17:47:07.063493Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:07.063521Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.027000Z 2024-11-21T17:47:07.063525Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:07.063531Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.063560Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:587:2225], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.063797Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:584:2223], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:07.063803Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.063811Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.027000Z - 1970-01-01T01:00:00.027000Z - 1970-01-01T02:00:00.027000Z 2024-11-21T17:47:07.063862Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:600:2230], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.063899Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:584:2223], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:07.063903Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:07.063910Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:07.064467Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:07.064485Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:07.064500Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:601:2187], Recipient [1:541:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.064504Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.064507Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.064509Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.064517Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:07.064520Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:07.064584Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:19001 to database resolvehost=host1 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:07.064606Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:47:07.075419Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:07.075439Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:19001 2024-11-21T17:47:07.075448Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:47:07.075452Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:19001 to epoch cache 2024-11-21T17:47:07.075504Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200027000 Name: "slot-0" } 2024-11-21T17:47:07.075510Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.075610Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:613:2236], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.075641Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:584:2223], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:07.075648Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:07.075657Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:07.075744Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:07.075758Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:07.075770Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:614:2187], Recipient [1:541:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.075773Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.075777Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.075780Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.075793Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:07.075796Z node 1 :NODE_BROKER DEBUG: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:07.075819Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:19001 to database resolvehost=host2 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2024-11-21T17:47:07.075845Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T17:47:07.086650Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:07.086668Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:19001 2024-11-21T17:47:07.086673Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T17:47:07.086676Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:19001 to epoch cache 2024-11-21T17:47:07.086724Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200027000 Name: "slot-1" } 2024-11-21T17:47:07.086730Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.086805Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:619:2241], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.086825Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:584:2223], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:07.086829Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:07.086835Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:07.086904Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: ( ... processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.650221Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.650227Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:659:2255] 2024-11-21T17:47:07.650231Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.650234Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.650240Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:660:2256] 2024-11-21T17:47:07.650243Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.650247Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.650253Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:661:2257] 2024-11-21T17:47:07.650256Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.650260Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.650266Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:662:2258] 2024-11-21T17:47:07.650269Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.650273Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.650278Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:663:2259] 2024-11-21T17:47:07.650282Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.650286Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.661101Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:07.661123Z node 1 :NODE_BROKER DEBUG: Remove node #1024 host1:19001 2024-11-21T17:47:07.661128Z node 1 :NODE_BROKER DEBUG: Remove node #1025 host2:19001 2024-11-21T17:47:07.661134Z node 1 :NODE_BROKER DEBUG: Move to new epoch #4.8 1970-01-01T03:00:00.027000Z - 1970-01-01T04:00:00.027000Z - 1970-01-01T05:00:00.027000Z 2024-11-21T17:47:07.661147Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T04:00:00.027000Z 2024-11-21T17:47:07.661151Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #4 nodes=2 expired=0 2024-11-21T17:47:07.661178Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.027000Z - 1970-01-01T04:00:00.027000Z - 1970-01-01T05:00:00.027000Z 2024-11-21T17:47:07.661185Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.027000Z - 1970-01-01T04:00:00.027000Z - 1970-01-01T05:00:00.027000Z 2024-11-21T17:47:07.661191Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.027000Z - 1970-01-01T04:00:00.027000Z - 1970-01-01T05:00:00.027000Z 2024-11-21T17:47:07.661195Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.027000Z - 1970-01-01T04:00:00.027000Z - 1970-01-01T05:00:00.027000Z 2024-11-21T17:47:07.661198Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.027000Z - 1970-01-01T04:00:00.027000Z - 1970-01-01T05:00:00.027000Z 2024-11-21T17:47:07.661202Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.027000Z - 1970-01-01T04:00:00.027000Z - 1970-01-01T05:00:00.027000Z 2024-11-21T17:47:07.661206Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.027000Z - 1970-01-01T04:00:00.027000Z - 1970-01-01T05:00:00.027000Z 2024-11-21T17:47:07.661211Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.027000Z - 1970-01-01T04:00:00.027000Z - 1970-01-01T05:00:00.027000Z 2024-11-21T17:47:07.661215Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.661435Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:719:2295], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.661453Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:584:2223], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:07.661458Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.661461Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.027000Z - 1970-01-01T04:00:00.027000Z - 1970-01-01T05:00:00.027000Z 2024-11-21T17:47:07.661495Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:721:2297], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.661524Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:584:2223], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:07.661527Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:07.661533Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:07.661623Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:07.661632Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:07.661644Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:722:2187], Recipient [1:541:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.661646Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.661649Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.661651Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.661663Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:07.661666Z node 1 :NODE_BROKER DEBUG: Registration request from host5:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:07.661683Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host5:19001 to database resolvehost=host5 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:07.661711Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=9 2024-11-21T17:47:07.672508Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:07.672527Z node 1 :NODE_BROKER DEBUG: Added node #1024 host5:19001 2024-11-21T17:47:07.672533Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 8 to 9 2024-11-21T17:47:07.672536Z node 1 :NODE_BROKER DEBUG: Add node #1024 host5:19001 to epoch cache 2024-11-21T17:47:07.672600Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 18000027000 Name: "slot-0" } 2024-11-21T17:47:07.672606Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.672703Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:727:2302], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.672724Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:584:2223], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host6" Port: 19001 ResolveHost: "host6" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:07.672729Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:07.672735Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host6" Port: 19001 ResolveHost: "host6" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:07.672811Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:07.672822Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host6" Port: 19001 ResolveHost: "host6" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:07.672835Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:728:2187], Recipient [1:541:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.672841Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.672845Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.672846Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.672857Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:07.672860Z node 1 :NODE_BROKER DEBUG: Registration request from host6:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:07.672877Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host6:19001 to database resolvehost=host6 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2024-11-21T17:47:07.672904Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=10 2024-11-21T17:47:07.683788Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:07.683813Z node 1 :NODE_BROKER DEBUG: Added node #1025 host6:19001 2024-11-21T17:47:07.683822Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 9 to 10 2024-11-21T17:47:07.683826Z node 1 :NODE_BROKER DEBUG: Add node #1025 host6:19001 to epoch cache 2024-11-21T17:47:07.683886Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host6" Port: 19001 ResolveHost: "host6" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 18000027000 Name: "slot-1" } 2024-11-21T17:47:07.683894Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2024-11-21T17:47:07.123344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:07.123364Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:07.131812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 2024-11-21T17:47:07.136878Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:07.137694Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:07.137762Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.137766Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.137772Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:47:07.137914Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:07.138692Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:07.138700Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.138704Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.138706Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.138888Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:07.138913Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:07.138931Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.029000Z - 1970-01-01T01:00:00.029000Z - 1970-01-01T02:00:00.029000Z 2024-11-21T17:47:07.138938Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.029000Z - 1970-01-01T01:00:00.029000Z - 1970-01-01T02:00:00.029000Z 2024-11-21T17:47:07.171927Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:07.171970Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.029000Z 2024-11-21T17:47:07.171976Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:07.171983Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.172035Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:590:2228], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.172333Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:587:2226], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:07.172345Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.172357Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.029000Z - 1970-01-01T01:00:00.029000Z - 1970-01-01T02:00:00.029000Z 2024-11-21T17:47:07.172433Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:603:2233], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.172477Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:587:2226], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:07.172481Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:07.172490Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:07.173125Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:07.173149Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:07.173167Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:604:2187], Recipient [1:541:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.173172Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.173177Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.173180Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.173194Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:07.173198Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:07.173290Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:19001 to database resolvehost=host1 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:07.173325Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:47:07.184247Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:07.184268Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:19001 2024-11-21T17:47:07.184275Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:47:07.184279Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:19001 to epoch cache 2024-11-21T17:47:07.184324Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200029000 Name: "slot-0" } 2024-11-21T17:47:07.184331Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.184445Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:616:2239], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.184476Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:587:2226], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:07.184481Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:07.184489Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:07.184552Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:07.184560Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:07.184569Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:617:2187], Recipient [1:541:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.184571Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.184574Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.184576Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.184584Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:07.184586Z node 1 :NODE_BROKER DEBUG: Registration request from host2:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:07.184603Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host2:19001 to database resolvehost=host2 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2024-11-21T17:47:07.184627Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T17:47:07.195461Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:07.195486Z node 1 :NODE_BROKER DEBUG: Added node #1025 host2:19001 2024-11-21T17:47:07.195494Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T17:47:07.195499Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:19001 to epoch cache 2024-11-21T17:47:07.195567Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 19001 ResolveHost: "host2" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 7200029000 Name: "slot-1" } 2024-11-21T17:47:07.195578Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.195705Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:622:2244], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.195733Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:587:2226], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:07.195738Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:07.195747Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host3" Port: 19001 ResolveHost: "host3" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:07.195828Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: ( ... tx 2024-11-21T17:47:07.612149Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:07.612153Z node 1 :NODE_BROKER DEBUG: Registration request from host4:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:07.612173Z node 1 :NODE_BROKER DEBUG: Adding node #1027 host4:19001 to database resolvehost=host4 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 04:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=3 authorizedbycertificate=false 2024-11-21T17:47:07.612207Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=7 2024-11-21T17:47:07.622971Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:07.622993Z node 1 :NODE_BROKER DEBUG: Added node #1027 host4:19001 2024-11-21T17:47:07.622999Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 6 to 7 2024-11-21T17:47:07.623004Z node 1 :NODE_BROKER DEBUG: Add node #1027 host4:19001 to epoch cache 2024-11-21T17:47:07.623054Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 14400029000 Name: "slot-3" } 2024-11-21T17:47:07.623061Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.623164Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:708:2293], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.623176Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:587:2226], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:07.623182Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.623191Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.029000Z - 1970-01-01T03:00:00.029000Z - 1970-01-01T04:00:00.029000Z 2024-11-21T17:47:07.746514Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:541:2187], Recipient [1:541:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:07.746532Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:07.746551Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.746555Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.746569Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:07.746574Z node 1 :NODE_BROKER DEBUG: Removing node #1025 from database 2024-11-21T17:47:07.746593Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #4.8 1970-01-01T03:00:00.029000Z - 1970-01-01T04:00:00.029000Z - 1970-01-01T05:00:00.029000Z 2024-11-21T17:47:07.777175Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 4 } 2024-11-21T17:47:07.777192Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.777196Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.777262Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:654:2251] 2024-11-21T17:47:07.777264Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.777267Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.777284Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:655:2252] 2024-11-21T17:47:07.777286Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.777288Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.777291Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:656:2253] 2024-11-21T17:47:07.777293Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.777294Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.777298Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:52:2072], Recipient [1:657:2254] 2024-11-21T17:47:07.777300Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.777302Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.777305Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:658:2255] 2024-11-21T17:47:07.777306Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.777308Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.777311Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:659:2256] 2024-11-21T17:47:07.777313Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.777315Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.777318Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:660:2257] 2024-11-21T17:47:07.777319Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.777321Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2024-11-21T17:47:07.788004Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:07.788028Z node 1 :NODE_BROKER DEBUG: Remove node #1025 host2:19001 2024-11-21T17:47:07.788037Z node 1 :NODE_BROKER DEBUG: Move to new epoch #4.8 1970-01-01T03:00:00.029000Z - 1970-01-01T04:00:00.029000Z - 1970-01-01T05:00:00.029000Z 2024-11-21T17:47:07.788050Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T04:00:00.029000Z 2024-11-21T17:47:07.788055Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #4 nodes=3 expired=0 2024-11-21T17:47:07.788090Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.029000Z - 1970-01-01T04:00:00.029000Z - 1970-01-01T05:00:00.029000Z 2024-11-21T17:47:07.788099Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.029000Z - 1970-01-01T04:00:00.029000Z - 1970-01-01T05:00:00.029000Z 2024-11-21T17:47:07.788107Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.029000Z - 1970-01-01T04:00:00.029000Z - 1970-01-01T05:00:00.029000Z 2024-11-21T17:47:07.788112Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.029000Z - 1970-01-01T04:00:00.029000Z - 1970-01-01T05:00:00.029000Z 2024-11-21T17:47:07.788133Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.029000Z - 1970-01-01T04:00:00.029000Z - 1970-01-01T05:00:00.029000Z 2024-11-21T17:47:07.788140Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.029000Z - 1970-01-01T04:00:00.029000Z - 1970-01-01T05:00:00.029000Z 2024-11-21T17:47:07.788147Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.029000Z - 1970-01-01T04:00:00.029000Z - 1970-01-01T05:00:00.029000Z 2024-11-21T17:47:07.788153Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.029000Z - 1970-01-01T04:00:00.029000Z - 1970-01-01T05:00:00.029000Z 2024-11-21T17:47:07.788160Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.788452Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:722:2298], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.788482Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:587:2226], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:07.788487Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.788494Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.029000Z - 1970-01-01T04:00:00.029000Z - 1970-01-01T05:00:00.029000Z 2024-11-21T17:47:07.788547Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:724:2300], Recipient [1:541:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.788577Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:587:2226], Recipient [1:541:2187]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" } 2024-11-21T17:47:07.788581Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:07.788589Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database" 2024-11-21T17:47:07.788673Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:07.788686Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2024-11-21T17:47:07.788697Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:725:2187], Recipient [1:541:2187]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.788701Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.788705Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.788709Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.788721Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:07.788725Z node 1 :NODE_BROKER DEBUG: Registration request from host5:19001 (not fixed) tenant: /dc-1/my-database 2024-11-21T17:47:07.788748Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host5:19001 to database resolvehost=host5 address= dc=0 location=DC=0/M=0/R=0/U=0/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2024-11-21T17:47:07.788795Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=9 2024-11-21T17:47:07.799452Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:07.799470Z node 1 :NODE_BROKER DEBUG: Added node #1025 host5:19001 2024-11-21T17:47:07.799475Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 8 to 9 2024-11-21T17:47:07.799478Z node 1 :NODE_BROKER DEBUG: Add node #1025 host5:19001 to epoch cache 2024-11-21T17:47:07.799520Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { DataCenter: "0" Module: "0" Rack: "0" Unit: "0" } Expire: 18000029000 Name: "slot-1" } 2024-11-21T17:47:07.799525Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2024-11-21T17:47:08.822927Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T17:47:08.842656Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T17:47:08.842769Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T17:47:08.843547Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T17:47:08.853404Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:08.855485Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:08.855920Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:08.856195Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:08.856320Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:08.856327Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:08.856356Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:08.858382Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:08.858424Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:08.858435Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:08.858478Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:08.858488Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:08.858605Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:08.880896Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:08.880943Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:08.891643Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:08.891678Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:08.891688Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:08.891696Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:08.891711Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:08.891717Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:08.891720Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:08.891726Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:08.902552Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:08.902609Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:08.902808Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:08.902817Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:08.904513Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:08.904740Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:08.905058Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/0019df/r3tmp/tmpSdbzuA/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T17:47:08.905132Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/6fwh/0019df/r3tmp/tmpSdbzuA/pdisk_1.dat 2024-11-21T17:47:08.905140Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/0019df/r3tmp/tmpSdbzuA/pdisk_1.dat 2024-11-21T17:47:08.905305Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T17:47:08.905388Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:08.905411Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:08.905424Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:08.905462Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:08.905499Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:08.905953Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:08.906011Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:08.916886Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:08.917104Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T17:47:08.919248Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T17:47:08.919386Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6fwh/0019df/r3tmp/tmpSdbzuA/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T17:47:08.919514Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6fwh/0019df/r3tmp/tmpSdbzuA/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/0019df/r3tmp/tmpSdbzuA/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 98747660362959804 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T17:47:08.919683Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:08.919739Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:08.919745Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:08.919773Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:08.919781Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:08.919805Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:08.919823Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:08.919829Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:08.919835Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:08.919857Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:08.919952Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:08.919959Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:08.919964Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:08.919985Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T17:47:08.920054Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:08.920060Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:08.920064Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:08.920072Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T17:47:08.920800Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:08.920814Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T17:47:08.920834Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:08.920839Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T17:47:08.921012Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T17:47:08.921034Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:08.921042Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:08.921046Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:08.921160Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T17:47:08.921215Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:08.921220Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:08.921224Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:08.938641Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:08.938662Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:08.938666Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:08.938672Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local >> KqpYql::PgIntPrimaryKey+EnableKqpDataQueryStreamLookup [GOOD] >> KqpYql::PgIntPrimaryKey-EnableKqpDataQueryStreamLookup >> KqpScripting::SelectNullType [GOOD] >> KqpScripting::StreamDdlAndDml >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] >> KqpScripting::UnsafeTimestampCast [GOOD] >> KqpScripting::SystemTables >> KqpYql::TableRange [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2024-11-21T17:47:09.082553Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T17:47:09.101405Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T17:47:09.101582Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T17:47:09.102792Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T17:47:09.113674Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:09.115783Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:09.116353Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:09.116792Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:09.116955Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:09.116964Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:09.117001Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:09.119426Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:09.119481Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:09.119496Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:09.119551Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:09.119564Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:09.119697Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:09.142548Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:09.142594Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:09.153387Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:09.153425Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:09.153439Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:09.153450Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:09.153474Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:09.153482Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:09.153488Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:09.153497Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:09.164175Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:09.164221Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:09.164445Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:09.164453Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:09.165956Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:09.166138Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:09.166387Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/0018c7/r3tmp/tmpkm7hqd/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T17:47:09.166439Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/6fwh/0018c7/r3tmp/tmpkm7hqd/pdisk_1.dat 2024-11-21T17:47:09.166444Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/0018c7/r3tmp/tmpkm7hqd/pdisk_1.dat 2024-11-21T17:47:09.166570Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T17:47:09.166631Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:09.166649Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:09.166658Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:09.166686Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:09.166711Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:09.167073Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:09.167106Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:09.178080Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:09.178275Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T17:47:09.179748Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T17:47:09.179879Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018c7/r3tmp/tmpkm7hqd/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T17:47:09.179979Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018c7/r3tmp/tmpkm7hqd/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/0018c7/r3tmp/tmpkm7hqd/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5488481906159308649 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T17:47:09.180115Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:09.180160Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:09.180164Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:09.180182Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:09.180187Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:09.180204Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:09.180217Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:09.180220Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:09.180240Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:09.180261Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:09.180314Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:09.180319Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:09.180322Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:09.180336Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T17:47:09.180369Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:09.180373Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:09.180375Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:09.180380Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T17:47:09.180973Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:09.180977Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T17:47:09.180986Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:09.180989Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T17:47:09.181096Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T17:47:09.181109Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:09.181114Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:09.181116Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:09.181193Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T17:47:09.181227Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:09.181230Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:09.181232Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:09.197453Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:09.197473Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:09.197477Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:09.197482Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. >> KqpOlapSysView::StatsSysViewEnumStringBytes [GOOD] >> KqpRm::NotEnoughMemory [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2024-11-21T17:47:07.058439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:07.058460Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:07.062006Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:07.062307Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:07.062365Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.062371Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.062376Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:07.062480Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:07.062958Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:07.062966Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.062969Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.062971Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.063003Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:07.063031Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:07.063046Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.063050Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.094642Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:07.094670Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2024-11-21T17:47:07.094675Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:07.094682Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.104996Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:573:2205], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.105345Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:07.105354Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.105368Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.105433Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:575:2207], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.105484Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:07.105490Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:07.105500Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:07.106192Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:07.106219Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:07.106239Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:576:2184], Recipient [1:537:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.106243Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.106249Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.106252Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.106272Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:07.106277Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2024-11-21T17:47:07.106351Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:07.106390Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:47:07.117136Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:07.117155Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T17:47:07.117163Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:47:07.117167Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2024-11-21T17:47:07.117218Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } 2024-11-21T17:47:07.117225Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.117330Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:589:2214], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.117350Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272040960, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvCompactTables 2024-11-21T17:47:07.117355Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvCompactTables 2024-11-21T17:47:07.119453Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268828683, Sender [1:530:2180], Recipient [1:537:2184]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T17:47:07.119627Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268828683, Sender [1:530:2180], Recipient [1:537:2184]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T17:47:07.119952Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268828683, Sender [1:530:2180], Recipient [1:537:2184]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T17:47:07.561273Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:675:2237], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.561334Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2024-11-21T17:47:07.561341Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.561357Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.561706Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:676:2238], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.561720Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:677:2239], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.561747Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:678:2240], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.561769Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:679:2241], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.561792Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:79:2072], Recipient [1:676:2238] 2024-11-21T17:47:07.561794Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.561799Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.561805Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:680:2242], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.561841Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:681:2243], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.561868Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:682:2244], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.561873Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:106:2072], Recipient [1:677:2239] 2024-11-21T17:47:07.561875Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.561878Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.561895Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:133:2072], Recipient [1:678:2240] 2024-11-21T17:47:07.561897Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.561900Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.561907Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:160:2072], Recipient [1:679:2241] 2024-11-21T17:47:07.561910Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.561914Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.561920Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:187:2072], Recipient [1:680:2242] 2024-11-21T17:47:07.561923Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.561928Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.561940Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:214:2072], Recipient [1:681:2243] 2024-11-21T17:47:07.561943Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.561946Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.2 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.561956Z node 1 :NODE_BROKER TRACE: StateWork, received event# ... 7:47:07.756632Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2024-11-21T17:47:07.767428Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:07.767450Z node 1 :NODE_BROKER DEBUG: Move to new epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.767464Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.767469Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=1 expired=0 2024-11-21T17:47:07.767496Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.767505Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.767514Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.767521Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.767527Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.767535Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.767542Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.767549Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.767557Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.767812Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:701:2252], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.767831Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:07.767836Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.767842Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.767883Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:703:2254], Recipient [1:537:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.767895Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:537:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:07.767899Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.767903Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z ... rebooting node broker 2024-11-21T17:47:07.768024Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268829696, Sender [1:530:2180], Recipient [1:537:2184]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T17:47:07.768050Z node 1 :NODE_BROKER INFO: OnTabletDead: 72057594037936129 2024-11-21T17:47:07.768055Z node 1 :NODE_BROKER DEBUG: TNodeBroker::Cleanup 2024-11-21T17:47:07.769733Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:07.770337Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:07.770398Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete ... OnActivateExecutor tabletId# 72057594037936129 2024-11-21T17:47:07.771125Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.771132Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.771145Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:07.771197Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:07.771201Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.771205Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.771208Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.771258Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute ... captured cache request ... sending extend lease request ... captured cache request ... waiting for response 2024-11-21T17:47:07.823291Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:07.823414Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:07.823427Z node 1 :NODE_BROKER DEBUG: Loaded current epoch: #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.823445Z node 1 :NODE_BROKER DEBUG: Added node #1024 host1:1001 2024-11-21T17:47:07.823455Z node 1 :NODE_BROKER DEBUG: Loaded node #1024 host1:1001 expiring Thu, 01 Jan 1970 02:00:00 UTC 2024-11-21T17:47:07.823496Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:07.823515Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.823524Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=1 expired=0 2024-11-21T17:47:07.823539Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.823612Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:750:2289], Recipient [1:711:2257]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.823645Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:526:2178], Recipient [1:711:2257]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1024 } 2024-11-21T17:47:07.823651Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T17:47:07.823655Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1024 2024-11-21T17:47:07.823660Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T17:47:07.823664Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) is now active 2024-11-21T17:47:07.823668Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) enqueue tx 2024-11-21T17:47:07.823671Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) starts new tx 2024-11-21T17:47:07.823678Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1024 2024-11-21T17:47:07.823683Z node 1 :NODE_BROKER DEBUG: Update node #1024 host1:1001 lease in database lease=2 expire=1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.834413Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T17:47:07.834451Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 10800024000 Epoch { Id: 2 Version: 3 Start: 3600024000 End: 7200024000 NextEnd: 10800024000 } } 2024-11-21T17:47:07.834462Z node 1 :NODE_BROKER DEBUG: Extended lease of #1024 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2024-11-21T17:47:07.834467Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) completed tx 2024-11-21T17:47:07.834469Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) unlink from parent 2024-11-21T17:47:07.834475Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1024 2024-11-21T17:47:07.834477Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active ... waiting for epoch update 2024-11-21T17:47:07.834556Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:769:2303], Recipient [1:711:2257]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.834573Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:711:2257]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:07.834577Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.834586Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.3 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:07.977869Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:711:2257], Recipient [1:711:2257]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:07.977890Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2024-11-21T17:47:07.977897Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.977901Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.977915Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2024-11-21T17:47:07.977924Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2024-11-21T17:47:08.028628Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:790:2305], Recipient [1:711:2257]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:08.028714Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:711:2257]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 3 } 2024-11-21T17:47:08.028719Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:08.028724Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #3 2024-11-21T17:47:08.039651Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2024-11-21T17:47:08.039669Z node 1 :NODE_BROKER DEBUG: Move to new epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2024-11-21T17:47:08.039685Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T03:00:00.024000Z 2024-11-21T17:47:08.039691Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #3 nodes=1 expired=0 2024-11-21T17:47:08.039721Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2024-11-21T17:47:08.039731Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:08.039839Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:802:2310], Recipient [1:711:2257]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:08.039870Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:711:2257]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:08.039876Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:08.039884Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z 2024-11-21T17:47:08.039933Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:804:2312], Recipient [1:711:2257]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:08.039948Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:526:2178], Recipient [1:711:2257]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:08.039952Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:08.039958Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.024000Z - 1970-01-01T03:00:00.024000Z - 1970-01-01T04:00:00.024000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableRange [GOOD] Test command err: Trying to start YDB, gRPC: 1403, MsgBus: 27782 2024-11-21T17:47:08.864964Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790576287720719:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:08.865028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00174b/r3tmp/tmpEOUoul/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1403, node 1 2024-11-21T17:47:08.915835Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:08.924506Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:08.924516Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:08.924517Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:08.924547Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27782 TClient is connected to server localhost:27782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:08.964680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:08.964707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:08.965829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:08.992709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.000352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.061256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.076076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.086026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.120595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790580582689346:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.120628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.153723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.159554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.169241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.223698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.232043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.239527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.247936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790580582689850:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.247956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.247965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790580582689855:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.248658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.252455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790580582689857:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Table intent determination, code: 1040
:3:27: Error: RANGE is not supported on Kikimr clusters. >> KqpYql::EvaluateExpr3 [GOOD] >> KqpRm::Reduce >> KqpScripting::ScanQueryDisable [GOOD] >> KqpYql::Closure [GOOD] >> KqpScripting::StreamDdlAndDml [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2024-11-21T17:47:09.470784Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T17:47:09.490776Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T17:47:09.490884Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T17:47:09.491623Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T17:47:09.499931Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:09.501372Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:09.501708Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:09.501919Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:09.502007Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:09.502011Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:09.502027Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:09.503532Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:09.503565Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:09.503577Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:09.503613Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:09.503621Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:09.503712Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:09.526054Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:09.526090Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:09.536892Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:09.536937Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:09.536951Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:09.536962Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:09.536982Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:09.536991Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:09.536996Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:09.537004Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:09.547711Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:09.547762Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:09.547912Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:09.547918Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:09.549305Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:09.549468Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:09.549732Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/0018b3/r3tmp/tmpUkC7Hk/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T17:47:09.549795Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/6fwh/0018b3/r3tmp/tmpUkC7Hk/pdisk_1.dat 2024-11-21T17:47:09.549801Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/0018b3/r3tmp/tmpUkC7Hk/pdisk_1.dat 2024-11-21T17:47:09.549935Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T17:47:09.549997Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:09.550015Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:09.550026Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:09.550055Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:09.550084Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:09.550419Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:09.550449Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:09.561447Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:09.561723Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T17:47:09.563281Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T17:47:09.563392Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018b3/r3tmp/tmpUkC7Hk/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T17:47:09.563484Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018b3/r3tmp/tmpUkC7Hk/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/0018b3/r3tmp/tmpUkC7Hk/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16656923268014703438 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T17:47:09.563594Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:09.563630Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:09.563633Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:09.563651Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:09.563656Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:09.563672Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:09.563684Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:09.563687Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:09.563691Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:09.563706Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:09.563751Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:09.563756Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:09.563758Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:09.563772Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T17:47:09.563801Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:09.563805Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:09.563806Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:09.563811Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T17:47:09.564210Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:09.564216Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T17:47:09.564246Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:09.564250Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T17:47:09.564365Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T17:47:09.564377Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:09.564381Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:09.564383Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:09.564464Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T17:47:09.564501Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:09.564504Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:09.564506Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:09.580462Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:09.580481Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:09.580485Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:09.580490Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr3 [GOOD] Test command err: Trying to start YDB, gRPC: 25081, MsgBus: 25219 2024-11-21T17:47:08.605607Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790575560912111:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:08.605677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00175d/r3tmp/tmpc3qmsF/pdisk_1.dat 2024-11-21T17:47:08.647027Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25081, node 1 2024-11-21T17:47:08.663944Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:08.663958Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:08.663960Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:08.663997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25219 TClient is connected to server localhost:25219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:08.706137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:08.706170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:08.707243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:08.728046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.741209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.757390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.772857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.784770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.879770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790575560913645:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:08.879798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:08.909049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.915719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.924603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.979717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.987004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.993973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.002919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790579855881456:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.002946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.002959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790579855881461:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.003549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.007564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790579855881463:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 23866, MsgBus: 14118 2024-11-21T17:47:09.256644Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790579125362019:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:09.256908Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00175d/r3tmp/tmpBjzIwd/pdisk_1.dat 2024-11-21T17:47:09.264372Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23866, node 2 2024-11-21T17:47:09.273076Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:09.273099Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:09.273104Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:09.273149Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14118 TClient is connected to server localhost:14118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:09.356784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:09.356825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:09.357981Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:09.358580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.368429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.378212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.394704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.403691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.523551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790579125363550:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.523577Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.528532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.535199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.547576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.554132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.561443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.568290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.576301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790579125364056:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.576351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.576385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790579125364061:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.577157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.581835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790579125364063:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpRm::NodesMembershipByExchanger [GOOD] >> KqpYql::PgIntPrimaryKey-EnableKqpDataQueryStreamLookup [GOOD] >> KqpRm::SnapshotSharingByExchanger ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSysView::StatsSysViewEnumStringBytes [GOOD] Test command err: Trying to start YDB, gRPC: 18892, MsgBus: 28235 2024-11-21T17:46:08.923207Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790318535221699:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:08.923356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e8d/r3tmp/tmpXTtL9i/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18892, node 1 2024-11-21T17:46:08.980514Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:08.980780Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:08.980793Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:08.980795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:08.980829Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28235 TClient is connected to server localhost:28235 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:09.024728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:09.024756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:09.025810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:09.051165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:09.073556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:09.083649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790322830189654:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.083710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790322830189654:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.083757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790322830189654:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:09.083788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790322830189654:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:09.083816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790322830189654:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:09.083841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790322830189654:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:09.083866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790322830189654:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:09.083892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790322830189654:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:09.083920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790322830189654:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:09.083944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790322830189654:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.083980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790322830189654:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:09.084004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790322830189654:2288];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:09.084485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:09.084507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:09.084524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:09.084529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:09.084544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:09.084548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:09.084558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:09.084566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:09.084576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:09.084579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:09.084591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:09.084597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:09.084650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:09.084661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:09.084678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:09.084683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:09.084695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:09.084703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:09.084719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:09.084728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:09.084739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:09.084746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:09.087690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322830189655:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:09.087715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322830189655:2289];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:09.087747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790322830189655:2289];tablet_id=7207518622 ... 1:3:0:6132976:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Wait changes: 18421472/51200000 2024-11-21T17:46:36.940064Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211196922, txId: 281474976715667] shutting down 2024-11-21T17:46:39.655511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:46:39.655529Z node 2 :IMPORT WARN: Table profiles were not loaded ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 RESULT: 2024-11-21T17:46:41.979747Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211201966, txId: 281474976715669] shutting down Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 5065184 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 5065184 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 5074568 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 5074568 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 5071136 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 5071136 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 18421472/51200000 2024-11-21T17:46:41.992315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715671:0, at schemeshard: 72057594046644480 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=51200216;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=51200216;columns=2; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 RESULT: 2024-11-21T17:46:52.515950Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211212493, txId: 281474976715673] shutting down Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 5074568 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 5074568 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266592 RawBytes: 14929152 BlobRangeOffset: 1069880 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1467984:0] EntityType: COL BlobRangeSize: 398104 PathId: 3 Rows: 266592 RawBytes: 2132736 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1467984:0] EntityType: COL BlobRangeSize: 1069880 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 5071136 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 5071136 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 266834 RawBytes: 14942704 BlobRangeOffset: 1070888 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1470968:0] EntityType: COL BlobRangeSize: 400080 PathId: 3 Rows: 266834 RawBytes: 2134672 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1470968:0] EntityType: COL BlobRangeSize: 1070888 PathId: 3 Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 5065184 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 5065184 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 266574 RawBytes: 14928144 BlobRangeOffset: 1069840 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1469904:0] EntityType: COL BlobRangeSize: 400064 PathId: 3 Rows: 266574 RawBytes: 2132592 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1469904:0] EntityType: COL BlobRangeSize: 1069840 PathId: 3 Wait changes: 22830328/102400000 ==================================== QUERY: SELECT * FROM `/Root/olapStore/olapTable/.sys/primary_index_stats` WHERE Activity == 1 RESULT: Rows: 266065 RawBytes: 14899640 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 5065184 PathId: 3 Rows: 266065 RawBytes: 2128520 BlobRangeOffset: 5065184 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:1:3:0:6132976:0] EntityType: COL BlobRangeSize: 1067792 PathId: 3 Rows: 266613 RawBytes: 14930328 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 5071136 PathId: 3 Rows: 266613 RawBytes: 2132904 BlobRangeOffset: 5071136 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:1:3:0:6141144:0] EntityType: COL BlobRangeSize: 1070008 PathId: 3 Rows: 267322 RawBytes: 14970032 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 1 InternalEntityId: 2024-11-21T17:46:57.555543Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211217540, txId: 281474976715675] shutting down 1 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 5074568 PathId: 3 Rows: 267322 RawBytes: 2138576 BlobRangeOffset: 5074568 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 1 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:1:3:0:6147352:0] EntityType: COL BlobRangeSize: 1072784 PathId: 3 Rows: 266574 RawBytes: 14928144 BlobRangeOffset: 1069840 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1469904:0] EntityType: COL BlobRangeSize: 400064 PathId: 3 Rows: 266574 RawBytes: 2132592 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037890 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037890:1:2:4:0:1469904:0] EntityType: COL BlobRangeSize: 1069840 PathId: 3 Rows: 266834 RawBytes: 14942704 BlobRangeOffset: 1070888 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1470968:0] EntityType: COL BlobRangeSize: 400080 PathId: 3 Rows: 266834 RawBytes: 2134672 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037888 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037888:1:2:4:0:1470968:0] EntityType: COL BlobRangeSize: 1070888 PathId: 3 Rows: 266592 RawBytes: 14929152 BlobRangeOffset: 1069880 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: field PortionId: 2 InternalEntityId: 1 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1467984:0] EntityType: COL BlobRangeSize: 398104 PathId: 3 Rows: 266592 RawBytes: 2132736 BlobRangeOffset: 0 TierName: __DEFAULT Activity:  TabletId: 72075186224037889 Kind: INSERTED EntityName: pk_int PortionId: 2 InternalEntityId: 2 ChunkIdx: 0 BlobId: [72075186224037889:1:2:4:0:1467984:0] EntityType: COL BlobRangeSize: 1069880 PathId: 3 22830328/102400000 ==================================== QUERY: SELECT COUNT(*), MAX(pk_int), MIN(pk_int) FROM `/Root/olapStore/olapTable` RESULT: 2024-11-21T17:46:58.051759Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211217597, txId: 18446744073709551615] shutting down column2: 0 column0: 1600000 column1: 1599999 column2: int64_value: 0 column0: uint64_value: 1600000 column1: int64_value: 1599999 2024-11-21T17:46:59.651812Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211218000, txId: 18446744073709551615] shutting down count=1600000;min_count=3124;max_count=3126;groups_count=512; 2024-11-21T17:46:59.657579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnStore, opId: 281474976715682:0, at schemeshard: 72057594046644480 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 unpacked data: 44800000 / 15210888 packed data: 44800000 / 1198272 frq_diff: 0.07877725482 frq_compression: 0.02674714286 pk_size : 6400000 / 3210584 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] Test command err: Trying to start YDB, gRPC: 24462, MsgBus: 5174 2024-11-21T17:47:08.609826Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790576652579949:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:08.609943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001770/r3tmp/tmp7B7cUe/pdisk_1.dat 2024-11-21T17:47:08.661661Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24462, node 1 2024-11-21T17:47:08.672486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:08.672501Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:08.672503Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:08.672540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5174 TClient is connected to server localhost:5174 2024-11-21T17:47:08.709713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:08.709741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:47:08.710762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:08.722657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.731450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.795355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.810148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.820205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.884253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790576652581485:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:08.884280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:08.915236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.921469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.931088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.938112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.992650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.001070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.009517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790580947549297:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.009540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790580947549302:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.009542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.010042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.014204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790580947549304:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:09.193624Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229238, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 8902, MsgBus: 16878 2024-11-21T17:47:09.459126Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790579602053230:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:09.459246Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001770/r3tmp/tmpvVf43M/pdisk_1.dat 2024-11-21T17:47:09.472828Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8902, node 2 2024-11-21T17:47:09.478597Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:09.478610Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:09.478612Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:09.478647Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16878 TClient is connected to server localhost:16878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:09.559605Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:09.559633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:09.560814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:09.562129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.568177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.576875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.591934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.604418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.729887Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790579602054757:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.729914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.734257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.739929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.750366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.757043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.763710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.771051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.780162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790579602055261:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.780182Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.780191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790579602055266:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.780854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.784083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790579602055268:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:09.996995Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211230043, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 25635, MsgBus: 7638 2024-11-21T17:47:08.640530Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790574668919455:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:08.640686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001769/r3tmp/tmpXAsfue/pdisk_1.dat 2024-11-21T17:47:08.702959Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25635, node 1 2024-11-21T17:47:08.715308Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:08.715323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:08.715326Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:08.715361Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7638 2024-11-21T17:47:08.740598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:08.740624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:08.741804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:08.780536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.791146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.854202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.869710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.877948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.911990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790574668920993:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:08.912015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:08.951665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.959607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.966017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.020496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.028885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.036150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.045164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790578963888795:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.045187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.045196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790578963888800:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.045825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.048946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790578963888802:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 10524, MsgBus: 21061 2024-11-21T17:47:09.504068Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790582116267468:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:09.504252Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001769/r3tmp/tmpRtTkWZ/pdisk_1.dat 2024-11-21T17:47:09.514961Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10524, node 2 2024-11-21T17:47:09.522919Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:09.522932Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:09.522933Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:09.522964Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21061 TClient is connected to server localhost:21061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:09.604356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:09.604386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:09.605476Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:09.606672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.611419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.619932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.637356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.647188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.781165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790582116269002:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.781198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.784885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.839871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.847712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.855035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.861823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.869259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.877749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790582116269518:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.877768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790582116269523:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.877785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.878346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.882317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790582116269525:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpRm::SingleTask >> KqpScripting::SystemTables [GOOD] >> KqpRm::Reduce [GOOD] >> KqpRm::NotEnoughExecutionUnits ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] Test command err: Trying to start YDB, gRPC: 26748, MsgBus: 12789 2024-11-21T17:47:08.893236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790577979279540:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:08.893431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001759/r3tmp/tmpSY2HeW/pdisk_1.dat 2024-11-21T17:47:08.958092Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26748, node 1 2024-11-21T17:47:08.970783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:08.970795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:08.970797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:08.970824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12789 2024-11-21T17:47:08.993076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:08.993105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:08.994163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:09.035424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.039200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.099108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.113801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.121957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.147202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790582274248373:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.147232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.172864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.177771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.182600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.189952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.196559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.203769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.212458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790582274248864:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.212485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790582274248869:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.212487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.213035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.217227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790582274248871:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:09.384311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.421420Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229469, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 8170, MsgBus: 19093 2024-11-21T17:47:09.589504Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790581144302665:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:09.589727Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001759/r3tmp/tmpjUpI3G/pdisk_1.dat 2024-11-21T17:47:09.596167Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8170, node 2 2024-11-21T17:47:09.604639Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:09.604652Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:09.604654Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:09.604693Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19093 TClient is connected to server localhost:19093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:09.690004Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:09.690050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:09.691154Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:09.691838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.701773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.710476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.730486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.740437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.853920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790581144304198:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.853945Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.859522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.865380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.876299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.883673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.889809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.896734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.905661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790581144304701:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.905690Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.905694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790581144304706:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.906221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.910373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790581144304708:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:10.079673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:10.131907Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211230176, txId: 281474976715673] shutting down ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2024-11-21T17:47:08.823101Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T17:47:08.851556Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T17:47:08.851690Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T17:47:08.852923Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T17:47:08.865249Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:08.870483Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:08.870945Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:08.871256Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:08.871369Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:08.871374Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:08.871397Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:08.873426Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:08.873471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:08.873486Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:08.873527Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:08.873537Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:08.873683Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:08.898665Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:08.898738Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:08.909517Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:08.909551Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:08.909562Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:08.909573Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:08.909605Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:08.909615Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:08.909621Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:08.909629Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:08.920483Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:08.920558Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:08.920763Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:08.920774Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:08.922655Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:08.922846Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:08.923127Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/0019d0/r3tmp/tmpP3B67l/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T17:47:08.923196Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/6fwh/0019d0/r3tmp/tmpP3B67l/pdisk_1.dat 2024-11-21T17:47:08.923203Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/0019d0/r3tmp/tmpP3B67l/pdisk_1.dat 2024-11-21T17:47:08.923368Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T17:47:08.923449Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:08.923470Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:08.923482Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:08.923518Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:08.923552Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:08.924079Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:08.924181Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:08.935321Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:08.935536Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T17:47:08.937108Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T17:47:08.937263Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6fwh/0019d0/r3tmp/tmpP3B67l/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T17:47:08.937393Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6fwh/0019d0/r3tmp/tmpP3B67l/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/0019d0/r3tmp/tmpP3B67l/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17460445049295967100 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T17:47:08.937546Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:08.937608Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:08.937611Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:08.937635Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:08.937641Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:08.937665Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:08.937679Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:08.937683Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:08.937687Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:08.937702Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:08.937763Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:08.937769Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:08.937771Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:08.937790Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T17:47:08.937824Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:08.937828Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:08.937830Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:08.937835Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T17:47:08.938318Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:08.938322Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T17:47:08.938333Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:08.938335Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T17:47:08.938487Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T17:47:08.938502Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:08.938508Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:08.938510Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:08.938589Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T17:47:08.938630Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:08.938634Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:08.938635Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:08.956248Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:08.956272Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:08.956275Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:08.956280Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:08.978989Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:47:08.979887Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2024-11-21T17:47:08.979971Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:47:08.980564Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:47:09.013414Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal } } 2024-11-21T17:47:09.065017Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2024-11-21T17:47:09.106161Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green } } 2024-11-21T17:47:09.362115Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:09.362758Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:09.362797Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } >> KqpRm::DisonnectNodes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::Merge1KShards [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:45:45.917439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:45.917460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:45.917464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:45.917467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:45.917476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:45.917478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:45.917485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:45.917557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:45.924560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:45.924579Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:45.926771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:45.927298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:45.927325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:45:45.928419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:45.928570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:45.928643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:45.928697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:45:45.929408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:45.929614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:45.929622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:45.929647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:45.929651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:45.929656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:45.929664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.930602Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:45:45.944459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:45:45.944539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.944595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:45:45.944653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:45:45.944657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.945662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:45.945681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:45:45.945728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.945736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:45:45.945740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:45.945744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:45.946128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.946135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:45:45.946137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:45.946401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.946406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.946410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:45.946414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:45.946911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:45.947323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:45.947375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:45:45.947554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:45:45.947585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:45:45.947595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:45.947646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:45.947652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:45:45.947683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:45.947695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:45:45.948083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:45:45.948088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:45:45.948122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:45:45.948126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:45:45.948209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:45:45.948215Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:45.948222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:45.948243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:45.948250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:45.948256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:45.948259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:45.948261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:45.948269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:45:45.948274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:45:45.948277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:45:45.948509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:45.948519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:45:45.948522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:45:45.948525Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:45:45.948527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:45:45.948538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... 72075186233410325 Deleted tabletId 72075186233410326 Deleted tabletId 72075186233410327 Deleted tabletId 72075186233410328 Deleted tabletId 72075186233410330 Deleted tabletId 72075186233410331 Deleted tabletId 72075186233410335 Deleted tabletId 72075186233410343 Deleted tabletId 72075186233410344 Deleted tabletId 72075186233410345 Deleted tabletId 72075186233410464 Deleted tabletId 72075186233410465 Deleted tabletId 72075186233410466 Deleted tabletId 72075186233410467 Deleted tabletId 72075186233410468 Deleted tabletId 72075186233410469 Deleted tabletId 72075186233410470 Deleted tabletId 72075186233410471 Deleted tabletId 72075186233410472 Deleted tabletId 72075186233410473 Deleted tabletId 72075186233410474 Deleted tabletId 72075186233410476 Deleted tabletId 72075186233410477 Deleted tabletId 72075186233410475 Deleted tabletId 72075186233410478 Deleted tabletId 72075186233410479 Deleted tabletId 72075186233410480 Deleted tabletId 72075186233410481 Deleted tabletId 72075186233410482 Deleted tabletId 72075186233410483 Deleted tabletId 72075186233410484 Deleted tabletId 72075186233410485 Deleted tabletId 72075186233410486 Deleted tabletId 72075186233410487 Deleted tabletId 72075186233410488 Deleted tabletId 72075186233410489 Deleted tabletId 72075186233410490 Deleted tabletId 72075186233410491 Deleted tabletId 72075186233410492 Deleted tabletId 72075186233410493 Deleted tabletId 72075186233410494 Deleted tabletId 72075186233410495 Deleted tabletId 72075186233410496 Deleted tabletId 72075186233410497 Deleted tabletId 72075186233410498 Deleted tabletId 72075186233410499 Deleted tabletId 72075186233410500 Deleted tabletId 72075186233410501 Deleted tabletId 72075186233410502 Deleted tabletId 72075186233410503 Deleted tabletId 72075186233410504 Deleted tabletId 72075186233410505 Deleted tabletId 72075186233410506 Deleted tabletId 72075186233410507 Deleted tabletId 72075186233410508 Deleted tabletId 72075186233410509 Deleted tabletId 72075186233410510 Deleted tabletId 72075186233410511 Deleted tabletId 72075186233410512 Deleted tabletId 72075186233410513 Deleted tabletId 72075186233410514 Deleted tabletId 72075186233410515 Deleted tabletId 72075186233410516 Deleted tabletId 72075186233410517 Deleted tabletId 72075186233410518 Deleted tabletId 72075186233410519 Deleted tabletId 72075186233410520 Deleted tabletId 72075186233410521 Deleted tabletId 72075186233410522 Deleted tabletId 72075186233410523 Deleted tabletId 72075186233410524 Deleted tabletId 72075186233410525 Deleted tabletId 72075186233410526 Deleted tabletId 72075186233410527 Deleted tabletId 72075186233410528 Deleted tabletId 72075186233410529 Deleted tabletId 72075186233410530 Deleted tabletId 72075186233409598 Deleted tabletId 72075186233409599 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409600 Deleted tabletId 72075186233409601 Deleted tabletId 72075186233409602 Deleted tabletId 72075186233409603 Deleted tabletId 72075186233409604 Deleted tabletId 72075186233409605 Deleted tabletId 72075186233409606 Deleted tabletId 72075186233410346 Deleted tabletId 72075186233410347 Deleted tabletId 72075186233410348 Deleted tabletId 72075186233410349 Deleted tabletId 72075186233410350 Deleted tabletId 72075186233410351 Deleted tabletId 72075186233410352 Deleted tabletId 72075186233410353 Deleted tabletId 72075186233410354 Deleted tabletId 72075186233410355 Deleted tabletId 72075186233410356 Deleted tabletId 72075186233410357 Deleted tabletId 72075186233410358 Deleted tabletId 72075186233410359 Deleted tabletId 72075186233410360 Deleted tabletId 72075186233410361 Deleted tabletId 72075186233410362 Deleted tabletId 72075186233410363 Deleted tabletId 72075186233410364 Deleted tabletId 72075186233410365 Deleted tabletId 72075186233410366 Deleted tabletId 72075186233410367 Deleted tabletId 72075186233410368 Deleted tabletId 72075186233410369 Deleted tabletId 72075186233410370 Deleted tabletId 72075186233410371 Deleted tabletId 72075186233410372 Deleted tabletId 72075186233410373 Deleted tabletId 72075186233410374 Deleted tabletId 72075186233410375 Deleted tabletId 72075186233410376 Deleted tabletId 72075186233410377 Deleted tabletId 72075186233410378 Deleted tabletId 72075186233410379 Deleted tabletId 72075186233410380 Deleted tabletId 72075186233410381 Deleted tabletId 72075186233410382 Deleted tabletId 72075186233410383 Deleted tabletId 72075186233410384 Deleted tabletId 72075186233410385 Deleted tabletId 72075186233410386 Deleted tabletId 72075186233410387 Deleted tabletId 72075186233410388 Deleted tabletId 72075186233410389 Deleted tabletId 72075186233410390 Deleted tabletId 72075186233410391 Deleted tabletId 72075186233410392 Deleted tabletId 72075186233410393 Deleted tabletId 72075186233410394 Deleted tabletId 72075186233410395 Deleted tabletId 72075186233410396 Deleted tabletId 72075186233410397 Deleted tabletId 72075186233410398 Deleted tabletId 72075186233410399 Deleted tabletId 72075186233410400 Deleted tabletId 72075186233410401 Deleted tabletId 72075186233410402 Deleted tabletId 72075186233410403 Deleted tabletId 72075186233410404 Deleted tabletId 72075186233410405 Deleted tabletId 72075186233410406 Deleted tabletId 72075186233410407 Deleted tabletId 72075186233410408 Deleted tabletId 72075186233410409 Deleted tabletId 72075186233410410 Deleted tabletId 72075186233410411 Deleted tabletId 72075186233410412 Deleted tabletId 72075186233410413 Deleted tabletId 72075186233410414 Deleted tabletId 72075186233410415 Deleted tabletId 72075186233410416 Deleted tabletId 72075186233410417 Deleted tabletId 72075186233410418 Deleted tabletId 72075186233410419 Deleted tabletId 72075186233410420 Deleted tabletId 72075186233410421 Deleted tabletId 72075186233410422 Deleted tabletId 72075186233410423 Deleted tabletId 72075186233410424 Deleted tabletId 72075186233410425 Deleted tabletId 72075186233410426 Deleted tabletId 72075186233410427 Deleted tabletId 72075186233410428 Deleted tabletId 72075186233410429 Deleted tabletId 72075186233410430 Deleted tabletId 72075186233410431 Deleted tabletId 72075186233410432 Deleted tabletId 72075186233410433 Deleted tabletId 72075186233410434 Deleted tabletId 72075186233410435 Deleted tabletId 72075186233410436 Deleted tabletId 72075186233410437 Deleted tabletId 72075186233410438 Deleted tabletId 72075186233410439 Deleted tabletId 72075186233410440 Deleted tabletId 72075186233410441 Deleted tabletId 72075186233410442 Deleted tabletId 72075186233410443 Deleted tabletId 72075186233410444 Deleted tabletId 72075186233410445 Deleted tabletId 72075186233410446 Deleted tabletId 72075186233410447 Deleted tabletId 72075186233410448 Deleted tabletId 72075186233410449 Deleted tabletId 72075186233410450 Deleted tabletId 72075186233410451 Deleted tabletId 72075186233410452 Deleted tabletId 72075186233410453 Deleted tabletId 72075186233410454 Deleted tabletId 72075186233410455 Deleted tabletId 72075186233410456 Deleted tabletId 72075186233410457 Deleted tabletId 72075186233410458 Deleted tabletId 72075186233410459 Deleted tabletId 72075186233410460 Deleted tabletId 72075186233410461 Deleted tabletId 72075186233410462 Deleted tabletId 72075186233410463 2024-11-21T17:47:08.337900Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:08.337991Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 131us result status StatusSuccess 2024-11-21T17:47:08.338244Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233410546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2000 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2024-11-21T17:47:10.175248Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T17:47:10.196953Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T17:47:10.197107Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T17:47:10.197874Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T17:47:10.206145Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:10.207846Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:10.208261Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:10.208546Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:10.208680Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:10.208687Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:10.208713Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:10.210741Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:10.210783Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:10.210797Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:10.210836Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:10.210845Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:10.210974Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:10.233402Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:10.233438Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:10.244204Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:10.244267Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:10.244283Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:10.244295Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:10.244318Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:10.244326Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:10.244331Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:10.244339Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:10.255104Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:10.255152Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:10.255290Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:10.255295Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:10.256203Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:10.256379Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:10.256568Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/0018ae/r3tmp/tmpHUQQQh/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T17:47:10.256614Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/6fwh/0018ae/r3tmp/tmpHUQQQh/pdisk_1.dat 2024-11-21T17:47:10.256618Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/0018ae/r3tmp/tmpHUQQQh/pdisk_1.dat 2024-11-21T17:47:10.256726Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T17:47:10.256776Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:10.256788Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:10.256797Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:10.256819Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:10.256843Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:10.257195Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:10.257226Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:10.268071Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:10.268296Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T17:47:10.270031Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T17:47:10.270186Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018ae/r3tmp/tmpHUQQQh/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T17:47:10.270286Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018ae/r3tmp/tmpHUQQQh/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/0018ae/r3tmp/tmpHUQQQh/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11195746722291943877 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T17:47:10.270440Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:10.270494Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:10.270499Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:10.270522Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:10.270532Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:10.270551Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:10.270567Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:10.270574Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:10.270580Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:10.270597Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:10.270662Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:10.270667Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:10.270670Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:10.270686Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T17:47:10.270732Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:10.270736Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:10.270738Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:10.270743Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T17:47:10.271274Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:10.271281Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T17:47:10.271293Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:10.271295Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T17:47:10.271415Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T17:47:10.271432Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:10.271438Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:10.271442Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:10.271516Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T17:47:10.271551Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:10.271555Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:10.271556Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:10.287754Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:10.287772Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:10.287776Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:10.287781Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> KqpYql::UpdateBadType >> KqpRm::SingleSnapshotByExchanger [GOOD] >> KqpRm::NotEnoughExecutionUnits [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey-EnableKqpDataQueryStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15031, MsgBus: 6951 2024-11-21T17:47:08.759894Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790574613287481:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:08.760165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001749/r3tmp/tmpgLYFjS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15031, node 1 2024-11-21T17:47:08.816450Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:08.824708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:08.824726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:08.824728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:08.824766Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6951 2024-11-21T17:47:08.860286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:08.860314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:6951 2024-11-21T17:47:08.861434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:08.887281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.032595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790578908255376:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.032627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.055804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.115233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790578908255477:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.115266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790578908255482:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.115289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.115845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.119248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790578908255484:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } Trying to start YDB, gRPC: 20067, MsgBus: 23541 2024-11-21T17:47:09.633040Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790581094140323:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:09.633324Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001749/r3tmp/tmpwx4ln2/pdisk_1.dat 2024-11-21T17:47:09.642246Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20067, node 2 2024-11-21T17:47:09.658041Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:09.658055Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:09.658057Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:09.658121Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23541 TClient is connected to server localhost:23541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:09.733245Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:09.733274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:09.734372Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:47:09.734963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.922929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790581094140920:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.922955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.925613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.934669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790581094141018:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.934697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.934731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790581094141023:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.935513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.938276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790581094141025:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] Test command err: Trying to start YDB, gRPC: 3808, MsgBus: 17458 2024-11-21T17:47:08.826379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790575908408701:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:08.826603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001748/r3tmp/tmpo2Ce52/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3808, node 1 2024-11-21T17:47:08.880138Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:08.887126Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:08.887139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:08.887141Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:08.887185Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17458 TClient is connected to server localhost:17458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:08.926370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:08.926394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:08.927541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:08.931259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.941866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.955882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.971776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.980701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.077039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790580203377530:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.077060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.099795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.105131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.113086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.167354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.221773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.232093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.240591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790580203378048:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.240610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.240638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790580203378053:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.241188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.244964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790580203378055:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:09.490317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19735, MsgBus: 28809 2024-11-21T17:47:09.700352Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790581180848827:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:09.700399Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001748/r3tmp/tmpkX3Fqq/pdisk_1.dat 2024-11-21T17:47:09.708658Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19735, node 2 2024-11-21T17:47:09.717464Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:09.717478Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:09.717479Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:09.717510Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28809 TClient is connected to server localhost:28809 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:09.800962Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:09.800997Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:09.802010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:09.802719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.813868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.821203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.836951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.849300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:10.001294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790585475817659:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:10.001325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:10.006689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:10.013332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:10.068597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:10.124499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:10.136005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:10.149656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:10.158076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790585475818173:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:10.158105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:10.158107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790585475818178:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:10.158689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:10.162132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790585475818180:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:10.337801Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211230332, txId: 281474976715671] shutting down 2024-11-21T17:47:10.356291Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211230352, txId: 281474976715673] shutting down 2024-11-21T17:47:10.456082Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211230498, txId: 281474976715675] shutting down >> KqpScripting::StreamScanQuery >> KqpRm::SingleTask [GOOD] >> KqpYql::InsertIgnore ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2024-11-21T17:47:09.132334Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T17:47:09.151986Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T17:47:09.152092Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T17:47:09.153190Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T17:47:09.163573Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:09.165683Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:09.166067Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:09.166331Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:09.166433Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:09.166438Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:09.166458Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:09.168223Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:09.168283Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:09.168292Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:09.168325Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:09.168334Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:09.168467Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:09.190308Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:09.190346Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:09.200944Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:09.200976Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:09.200989Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:09.200996Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:09.201009Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:09.201014Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:09.201017Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:09.201022Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:09.211804Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:09.211853Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:09.212012Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:09.212032Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:09.213565Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:09.213796Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:09.214037Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/0018c3/r3tmp/tmptVhzOo/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T17:47:09.214090Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/6fwh/0018c3/r3tmp/tmptVhzOo/pdisk_1.dat 2024-11-21T17:47:09.214095Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/0018c3/r3tmp/tmptVhzOo/pdisk_1.dat 2024-11-21T17:47:09.214209Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T17:47:09.214270Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:09.214288Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:09.214298Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:09.214324Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:09.214354Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:09.214744Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:09.214774Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:09.225536Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:09.225733Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T17:47:09.227247Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T17:47:09.227353Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018c3/r3tmp/tmptVhzOo/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T17:47:09.227442Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018c3/r3tmp/tmptVhzOo/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/0018c3/r3tmp/tmptVhzOo/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8631692491073015039 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T17:47:09.227553Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:09.227589Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:09.227592Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:09.227609Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:09.227614Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:09.227630Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:09.227642Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:09.227645Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:09.227649Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:09.227665Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:09.227709Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:09.227714Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:09.227717Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:09.227730Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T17:47:09.227770Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:09.227774Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:09.227776Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:09.227781Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T17:47:09.228399Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:09.228409Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T17:47:09.228428Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:09.228432Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T17:47:09.228603Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T17:47:09.228623Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:09.228630Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:09.228633Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:09.228735Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T17:47:09.228781Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:09.228787Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:09.228790Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:09.247087Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:09.247111Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:09.247115Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:09.247121Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:09.269811Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:47:09.270682Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2024-11-21T17:47:09.270772Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:47:09.271398Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:47:09.304021Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal } } 2024-11-21T17:47:09.355466Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2024-11-21T17:47:09.396594Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green } } 2024-11-21T17:47:09.652533Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:09.653256Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:09.653348Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:10.126398Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2024-11-21T17:47:10.126477Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2024-11-21T17:47:10.126670Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2024-11-21T17:47:10.126862Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:54:2071] ServerId# [1:348:2268] TabletId# 72057594037932033 PipeClientId# [2:54:2071] 2024-11-21T17:47:10.126925Z node 2 :TX_PROXY WARN: actor# [2:142:2085] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2024-11-21T17:47:10.126946Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientDestroyed {TabletId=72057594046578946 ClientId=[2:371:2096] 2024-11-21T17:47:10.126952Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar HandlePipeDestroyed - DISCONNECTED 2024-11-21T17:47:10.126958Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:10.126976Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:508:2096] 2024-11-21T17:47:10.127812Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:508:2096] 2024-11-21T17:47:10.127879Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T17:47:10.128008Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2024-11-21T17:47:10.128036Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:10.128049Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:10.128053Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:10.139143Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2024-11-21T17:47:09.298411Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T17:47:09.325343Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T17:47:09.325500Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T17:47:09.326557Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T17:47:09.337006Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:09.339027Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:09.339464Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:09.339729Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:09.339847Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:09.339853Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:09.339875Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:09.341789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:09.341837Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:09.341849Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:09.341898Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:09.341909Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:09.342044Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:09.363974Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:09.364013Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:09.374859Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:09.374901Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:09.374917Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:09.374927Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:09.374948Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:09.374956Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:09.374962Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:09.374969Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:09.385753Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:09.385808Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:09.386000Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:09.386008Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:09.387588Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:09.387788Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:09.388074Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/0018c0/r3tmp/tmpVSOYJu/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T17:47:09.388140Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/6fwh/0018c0/r3tmp/tmpVSOYJu/pdisk_1.dat 2024-11-21T17:47:09.388146Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/0018c0/r3tmp/tmpVSOYJu/pdisk_1.dat 2024-11-21T17:47:09.388318Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T17:47:09.388391Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:09.388412Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:09.388425Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:09.388460Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:09.388495Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:09.388871Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:09.388916Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:09.399873Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:09.400093Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T17:47:09.402111Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T17:47:09.402290Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018c0/r3tmp/tmpVSOYJu/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T17:47:09.402417Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018c0/r3tmp/tmpVSOYJu/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/0018c0/r3tmp/tmpVSOYJu/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9146374507026775713 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T17:47:09.402573Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:09.402630Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:09.402636Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:09.402665Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:09.402672Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:09.402695Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:09.402711Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:09.402717Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:09.402722Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:09.402746Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:09.402822Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:09.402830Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:09.402834Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:09.402853Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T17:47:09.402905Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:09.402911Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:09.402914Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:09.402922Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T17:47:09.403561Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:09.403573Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T17:47:09.403592Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:09.403596Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T17:47:09.403765Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T17:47:09.403784Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:09.403791Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:09.403794Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:09.403901Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T17:47:09.403952Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:09.403957Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:09.403960Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:09.421625Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:09.421646Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:09.421651Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:09.421656Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:09.445013Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:47:09.445748Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2024-11-21T17:47:09.445819Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:47:09.446282Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:47:09.478684Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal } } 2024-11-21T17:47:09.529977Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2024-11-21T17:47:09.571038Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green } } 2024-11-21T17:47:09.826027Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:09.826512Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:09.826586Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2024-11-21T17:47:10.605296Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T17:47:10.627764Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T17:47:10.627914Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T17:47:10.629029Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T17:47:10.640183Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:10.643074Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:10.643605Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:10.643880Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:10.643975Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:10.643979Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:10.644001Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:10.646201Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:10.646264Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:10.646277Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:10.646336Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:10.646351Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:10.646464Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:10.668622Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:10.668682Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:10.679556Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:10.679600Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:10.679616Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:10.679627Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:10.679652Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:10.679659Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:10.679664Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:10.679669Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:10.690417Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:10.690477Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:10.690683Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:10.690692Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:10.692369Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:10.692628Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:10.692952Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/0018a1/r3tmp/tmpD83SYP/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T17:47:10.693038Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/6fwh/0018a1/r3tmp/tmpD83SYP/pdisk_1.dat 2024-11-21T17:47:10.693046Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/0018a1/r3tmp/tmpD83SYP/pdisk_1.dat 2024-11-21T17:47:10.693268Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T17:47:10.693360Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:10.693384Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:10.693400Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:10.693439Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:10.693481Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:10.694014Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:10.694121Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:10.705258Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:10.705537Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T17:47:10.707524Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T17:47:10.707691Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018a1/r3tmp/tmpD83SYP/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T17:47:10.707856Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018a1/r3tmp/tmpD83SYP/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/0018a1/r3tmp/tmpD83SYP/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12299276205767986109 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T17:47:10.708064Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:10.708132Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:10.708136Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:10.708165Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:10.708172Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:10.708200Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:10.708219Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:10.708225Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:10.708246Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:10.708268Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:10.708350Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:10.708357Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:10.708361Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:10.708384Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T17:47:10.708439Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:10.708445Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:10.708448Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:10.708455Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T17:47:10.709130Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:10.709139Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T17:47:10.709158Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:10.709161Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T17:47:10.709355Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T17:47:10.709376Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:10.709384Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:10.709387Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:10.709489Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T17:47:10.709544Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:10.709550Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:10.709552Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:10.727803Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:10.727828Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:10.727833Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:10.727839Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2024-11-21T17:47:10.631646Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T17:47:10.654425Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T17:47:10.654542Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T17:47:10.655197Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T17:47:10.663757Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:10.665252Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:10.665661Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:10.665909Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:10.666002Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:10.666006Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:10.666023Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:10.667539Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:10.667573Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:10.667580Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:10.667611Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:10.667619Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:10.667698Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:10.689412Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:10.689452Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:10.700171Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:10.700213Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:10.700225Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:10.700258Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:10.700279Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:10.700287Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:10.700292Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:10.700300Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:10.711040Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:10.711090Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:10.711261Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:10.711271Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:10.712741Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:10.712932Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:10.713191Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/0018a6/r3tmp/tmpAjzqVM/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T17:47:10.713243Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/6fwh/0018a6/r3tmp/tmpAjzqVM/pdisk_1.dat 2024-11-21T17:47:10.713248Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/0018a6/r3tmp/tmpAjzqVM/pdisk_1.dat 2024-11-21T17:47:10.713364Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T17:47:10.713417Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:10.713433Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:10.713441Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:10.713464Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:10.713486Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:10.713814Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:10.713867Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:10.724760Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:10.724961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T17:47:10.726965Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T17:47:10.727098Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018a6/r3tmp/tmpAjzqVM/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T17:47:10.727217Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6fwh/0018a6/r3tmp/tmpAjzqVM/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/0018a6/r3tmp/tmpAjzqVM/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7931503990042830699 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T17:47:10.727381Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:10.727433Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:10.727439Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:10.727464Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:10.727470Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:10.727489Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:10.727502Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:10.727506Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:10.727510Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:10.727535Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:10.727588Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:10.727592Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:10.727595Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:10.727610Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T17:47:10.727662Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:10.727668Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:10.727671Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:10.727679Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T17:47:10.728211Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:10.728219Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T17:47:10.728252Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:10.728257Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T17:47:10.728407Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T17:47:10.728423Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:10.728430Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:10.728433Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:10.728536Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T17:47:10.728573Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:10.728577Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:10.728579Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:10.744404Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:10.744424Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:10.744429Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:10.744433Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. >> KqpScripting::StreamExecuteYqlScriptScanCancelation >> KqpScripting::StreamExecuteYqlScriptScan >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueries >> KqpScripting::LimitOnShard >> KqpYql::InsertIgnore [GOOD] >> KqpYql::JsonCast >> KqpYql::NonStrictDml >> KqpYql::UpdateBadType [GOOD] >> KqpYql::UuidPrimaryKeyBulkUpsert >> KqpScripting::StreamScanQuery [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries >> KqpYql::EvaluateExpr1 >> KqpYql::UpdatePk >> KqpYql::FlexibleTypes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 19243, MsgBus: 61577 2024-11-21T17:47:10.840518Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790583076802624:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:10.840765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001743/r3tmp/tmptHwcAs/pdisk_1.dat 2024-11-21T17:47:10.876136Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19243, node 1 2024-11-21T17:47:10.891494Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:10.891511Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:10.891512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:10.891541Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61577 TClient is connected to server localhost:61577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:10.940566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:10.940608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:10.941718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:10.962167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:10.969582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.031067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.051652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.062592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.126202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790587371771472:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.126232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.167988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.176089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.187009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.199248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.206767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.221126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.228948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790587371771975:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.228962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790587371771980:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.228977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.229683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:11.233414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790587371771982:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx >> KqpScripting::ScriptValidate >> KqpYql::InsertCV >> KqpPragma::Auth ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2024-11-21T17:46:56.680929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:46:56.680956Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:56.680982Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:56.684177Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:56.684378Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:46:56.684447Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:56.685578Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:56.694655Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:56.694821Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:56.694957Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:46:56.694966Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:46:56.694974Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:46:56.695012Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:56.698815Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:46:56.698893Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:56.698934Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:46:56.698939Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:46:56.698944Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:46:56.698950Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:56.699050Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:56.699066Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:56.699102Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:46:56.699126Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:46:56.699184Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:56.699192Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:56.699199Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:46:56.699204Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:46:56.699208Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:46:56.699213Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:46:56.699219Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:46:56.708106Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:56.708134Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:56.708156Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:46:56.708649Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:46:56.708667Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:46:56.708692Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:46:56.708726Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:46:56.708737Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:46:56.708745Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:46:56.708755Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:56.708760Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:46:56.708765Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:46:56.708769Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:56.708842Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:46:56.708848Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:46:56.708852Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:46:56.708856Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:56.708867Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:46:56.708870Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:46:56.708873Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:46:56.708877Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:56.708881Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:46:56.730150Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:46:56.730180Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:56.730187Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:56.730201Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:46:56.730216Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:56.730354Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:56.730362Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:56.730370Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:46:56.730391Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:46:56.730396Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:46:56.730446Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:56.730456Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:56.730461Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:46:56.730466Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:46:56.731205Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:46:56.731225Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:56.731291Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:56.731297Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:56.731306Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:56.731314Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:56.731319Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:46:56.731327Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:46:56.731332Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:46:56.731338Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:56.731342Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:46:56.731347Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:46:56.731351Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:46:56.731399Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:46:56.731404Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:56.731407Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:46:56.731411Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:46:56.731414Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:46:56.731427Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:56.731431Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:46:56.731434Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:46:56.731437Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:46:56.731448Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:46:56.731451Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:46:56.731455Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:46:56.731460Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:56.731463Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:46:56.731467Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... llReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\231\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1002 ExecLevel: 0 Flags: 0 2024-11-21T17:47:11.593327Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:47:11.593355Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:11.593478Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CheckDataTx 2024-11-21T17:47:11.593494Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2024-11-21T17:47:11.593498Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CheckDataTx 2024-11-21T17:47:11.593504Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:47:11.593508Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:47:11.593518Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T17:47:11.593532Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1002] at 9437184 2024-11-21T17:47:11.593536Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2024-11-21T17:47:11.593539Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:47:11.593543Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:47:11.593547Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:11.593554Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T17:47:11.593647Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2024-11-21T17:47:11.593657Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:47:11.593670Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:47:11.593674Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:47:11.593677Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit FinishPropose 2024-11-21T17:47:11.593681Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2024-11-21T17:47:11.593688Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is DelayComplete 2024-11-21T17:47:11.593691Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2024-11-21T17:47:11.593695Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit CompletedOperations 2024-11-21T17:47:11.593700Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2024-11-21T17:47:11.593708Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2024-11-21T17:47:11.593711Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2024-11-21T17:47:11.593715Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1002] at 9437184 has finished 2024-11-21T17:47:11.607189Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:47:11.607228Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2024-11-21T17:47:11.607241Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2024-11-21T17:47:11.607275Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2024-11-21T17:47:11.615043Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2024-11-21T17:47:11.615075Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2024-11-21T17:47:11.615258Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4518:6448], Recipient [3:226:2221]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:11.615264Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:11.615273Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4517:6447], serverId# [3:4518:6448], sessionId# [0:0:0] 2024-11-21T17:47:11.615345Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:226:2221]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2024-11-21T17:47:11.615350Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:47:11.615378Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:11.615528Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2024-11-21T17:47:11.615545Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2024-11-21T17:47:11.615550Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2024-11-21T17:47:11.615554Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:47:11.615558Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:47:11.615568Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T17:47:11.615582Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1003] at 9437184 2024-11-21T17:47:11.615587Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2024-11-21T17:47:11.615590Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:47:11.615593Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:47:11.615597Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:11.615604Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T17:47:11.615697Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2024-11-21T17:47:11.615707Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:47:11.615719Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:47:11.615722Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:47:11.615725Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit FinishPropose 2024-11-21T17:47:11.615729Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2024-11-21T17:47:11.615736Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is DelayComplete 2024-11-21T17:47:11.615739Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2024-11-21T17:47:11.615742Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit CompletedOperations 2024-11-21T17:47:11.615745Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2024-11-21T17:47:11.615754Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2024-11-21T17:47:11.615757Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2024-11-21T17:47:11.615760Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1003] at 9437184 has finished 2024-11-21T17:47:11.619291Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2024-11-21T17:47:11.619313Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2024-11-21T17:47:11.619637Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:47:11.619649Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2024-11-21T17:47:11.619659Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2024-11-21T17:47:11.619685Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:47:11.620192Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:224:2220], Recipient [3:226:2221]: NKikimr::TEvTablet::TEvFollowerGcApplied .2024-11-21T17:47:11.621802Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4532:6461], Recipient [3:226:2221]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:11.621814Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:11.621822Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4531:6460], serverId# [3:4532:6461], sessionId# [0:0:0] 2024-11-21T17:47:11.621897Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553160, Sender [3:4530:6459], Recipient [3:226:2221]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1713 LastUpdateTime: 1713 } >> KqpYql::JsonCast [GOOD] >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce >> KqpYql::NonStrictDml [GOOD] >> KqpYql::JsonNumberPrecision >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex >> KqpYql::DdlDmlMix >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::NoAstSizeLimit >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonCast [GOOD] Test command err: Trying to start YDB, gRPC: 16305, MsgBus: 11260 2024-11-21T17:47:11.018615Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790588293413271:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:11.018714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00173f/r3tmp/tmpQ8yDQk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16305, node 1 2024-11-21T17:47:11.073860Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:11.076973Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:11.076984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:11.076985Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:11.077016Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11260 TClient is connected to server localhost:11260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:11.118662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:11.118690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:11.119786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:11.145269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.154473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.170283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.186163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.196524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.295374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790588293414810:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.295403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.327834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.334354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.346409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.353263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.360105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.367217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.375983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790588293415313:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.376014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.376017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790588293415318:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.376743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:11.380356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790588293415320:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Table intent determination, code: 1040
:3:35: Error: INSERT OR IGNORE is not yet supported for Kikimr. Trying to start YDB, gRPC: 25217, MsgBus: 17368 2024-11-21T17:47:11.703335Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790590130473377:2057];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00173f/r3tmp/tmp1N6dS3/pdisk_1.dat 2024-11-21T17:47:11.710633Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:11.717473Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25217, node 2 2024-11-21T17:47:11.726516Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:11.726530Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:11.726532Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:11.726575Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17368 TClient is connected to server localhost:17368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:11.803809Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:11.803838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:11.804892Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:11.805035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.813759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.822427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.843370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:11.853431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.999039Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790590130474901:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.999064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.005469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.014115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.025381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.032080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.039448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.053667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.072031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790594425442712:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.072064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.072109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790594425442717:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.072882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.080988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790594425442719:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } [[#]] >> KqpYql::EvaluateExpr1 [GOOD] >> KqpYql::Discard >> KqpYql::UpdatePk [GOOD] >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::ScriptStats ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 7192, MsgBus: 25507 2024-11-21T17:47:11.844018Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790590593149063:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:11.844054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001723/r3tmp/tmpWs99Zv/pdisk_1.dat 2024-11-21T17:47:11.886138Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7192, node 1 2024-11-21T17:47:11.902845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:11.902856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:11.902858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:11.902888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25507 TClient is connected to server localhost:25507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:11.944393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:11.944413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:11.945612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:11.948319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.134382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790594888116954:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.134416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.169838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.232399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790594888117061:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.232425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.232485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790594888117066:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.233237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.235207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790594888117068:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 16218, MsgBus: 10912 2024-11-21T17:47:11.105441Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790587944150390:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:11.105623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001737/r3tmp/tmplO4wvs/pdisk_1.dat 2024-11-21T17:47:11.148292Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16218, node 1 2024-11-21T17:47:11.165537Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:11.165555Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:11.165557Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:11.165588Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10912 TClient is connected to server localhost:10912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:11.205662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:11.205683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:11.206729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:11.211935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.215181Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:11.220961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.235063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.249769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.258155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.396063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790587944151916:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.396117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.421190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.426994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.437100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.444690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.458253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.465057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.481961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790587944152430:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.482000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.482068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790587944152435:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.482667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:11.485996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790587944152437:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:11.694149Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211231737, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 16051, MsgBus: 17786 2024-11-21T17:47:11.970652Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790587398067661:2193];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001737/r3tmp/tmpbpgFAZ/pdisk_1.dat 2024-11-21T17:47:11.982506Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:11.986550Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16051, node 2 2024-11-21T17:47:11.992879Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:11.992892Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:11.992896Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:11.992929Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17786 TClient is connected to server localhost:17786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.070356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.070395Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.071636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:47:12.072880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.074395Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:12.079369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.095855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.114267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.124024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.264791Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790591693036339:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.264816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.269786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.276753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.284392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.299238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.313028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.326766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.344296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790591693036851:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.344328Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.344349Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790591693036856:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.345070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.353717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790591693036858:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpYql::FlexibleTypes [GOOD] >> KqpYql::FromBytes >> KqpYql::InsertCV [GOOD] >> KqpYql::InsertCVList ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdatePk [GOOD] Test command err: Trying to start YDB, gRPC: 30995, MsgBus: 29658 2024-11-21T17:47:11.974252Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790587138640467:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:11.974532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00171f/r3tmp/tmpu9RKEu/pdisk_1.dat 2024-11-21T17:47:12.030887Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30995, node 1 2024-11-21T17:47:12.045490Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.045505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.045507Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.045532Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29658 2024-11-21T17:47:12.074186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.074216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.075077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.089653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.101127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.161939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.180999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.190931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.262021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790591433609296:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.262051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.306858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.312565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.326241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.340878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.354577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.361793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.379489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790591433609811:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.379509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.379519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790591433609817:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.380158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.388405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790591433609819:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Type annotation, code: 1030
:3:20: Warning: At function: AsStruct
:4:31: Warning: At function: +
:4:31: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
:5:27: Error: At function: KiUpdateTable!
:5:27: Error: Cannot update primary key column: Group ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] Leader for TabletID 9437184 is [1:130:2153] sender: [1:132:2057] recipient: [1:106:2138] 2024-11-21T17:46:58.177791Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:58.178847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:46:58.178863Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:58.181647Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:58.181845Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:46:58.181935Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:58.193645Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:58.204342Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:58.204533Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:58.204692Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:46:58.204702Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:46:58.204710Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:46:58.204753Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:58.208744Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:46:58.208824Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:58.208870Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:46:58.208877Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:46:58.208882Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:46:58.208888Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:58.208972Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.208990Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.209050Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:46:58.209071Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:46:58.209079Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:58.209085Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:58.209091Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:46:58.209096Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:46:58.209101Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:46:58.209105Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:46:58.209111Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 Leader for TabletID 9437184 is [1:130:2153] sender: [1:205:2057] recipient: [1:14:2061] 2024-11-21T17:46:58.218669Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.218688Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.218711Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:46:58.219222Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:46:58.219236Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:46:58.219258Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:46:58.219295Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:46:58.219306Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:46:58.219316Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:46:58.219326Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:58.219331Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:46:58.219337Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:46:58.219341Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:58.219406Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:46:58.219411Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:46:58.219415Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:46:58.219419Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:58.219433Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:46:58.219437Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:46:58.219441Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:46:58.219444Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:58.219449Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:46:58.245215Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:46:58.245261Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:58.245270Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:58.245285Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:46:58.245304Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:58.245447Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.245456Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.245464Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:46:58.245485Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2024-11-21T17:46:58.245490Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:46:58.245555Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:58.245565Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:46:58.245570Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:46:58.245575Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:46:58.246345Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:46:58.246367Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:58.246437Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.246444Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.246453Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:58.246461Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:58.246465Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:46:58.246473Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2024-11-21T17:46:58.246479Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2024-11-21T17:46:58.246486Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:46:58.246490Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:46:58.246496Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:46:58.246500Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:46:58.246545Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2024-11-21T17:46:58.246550Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:46:58.246553Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:46:58.246558Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:46:58.246561Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:46:58.246576Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:58.246580Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:46:58.246583Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:46:58.246587Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:46:58.246599Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2024-11-21T17:46:58.246602Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2024-11-21T17:46:58.246605Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2024-11-21T17:46:58.246611Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:46:58.246614Z node 1 :TX_DATASHARD TRACE: Adv ... ard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 228 RawX2: 94489282735 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2024-11-21T17:47:12.268891Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [22:270:2258], Recipient [22:228:2223]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T17:47:12.268895Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2024-11-21T17:47:12.268907Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [22:119:2145], Recipient [22:228:2223]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2024-11-21T17:47:12.268910Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T17:47:12.268915Z node 22 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2024-11-21T17:47:12.268921Z node 22 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2024-11-21T17:47:12.300223Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [22:270:2258], Recipient [22:228:2223]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:47:12.300269Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:47:12.341516Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [22:281:2267], Recipient [22:228:2223]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:12.341547Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:12.341558Z node 22 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [22:279:2266], serverId# [22:281:2267], sessionId# [0:0:0] 2024-11-21T17:47:12.341593Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [22:278:2265], Recipient [22:228:2223]: NKikimrTabletBase.TEvGetCounters 2024-11-21T17:47:12.343339Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [22:97:2132], Recipient [22:228:2223]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 94489282644 } 2024-11-21T17:47:12.343359Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2024-11-21T17:47:12.343435Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [22:283:2269], Recipient [22:228:2223]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:12.343439Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:12.343445Z node 22 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [22:282:2268], serverId# [22:283:2269], sessionId# [0:0:0] 2024-11-21T17:47:12.343481Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [22:97:2132], Recipient [22:228:2223]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 94489282644 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2024-11-21T17:47:12.343485Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:47:12.343513Z node 22 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:12.343705Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2024-11-21T17:47:12.343722Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T17:47:12.343726Z node 22 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2024-11-21T17:47:12.343729Z node 22 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:47:12.343731Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:47:12.343739Z node 22 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T17:47:12.343750Z node 22 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2024-11-21T17:47:12.343754Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T17:47:12.343756Z node 22 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:47:12.343759Z node 22 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:47:12.343762Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:12.343771Z node 22 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T17:47:12.343776Z node 22 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 132374 more memory 2024-11-21T17:47:12.343779Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2024-11-21T17:47:12.343835Z node 22 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:12.343837Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:12.343840Z node 22 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T17:47:12.344223Z node 22 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 132502 and requests 1060016 more for the next try 2024-11-21T17:47:12.344270Z node 22 :TX_DATASHARD DEBUG: tx 2 released its data 2024-11-21T17:47:12.344276Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2024-11-21T17:47:12.344312Z node 22 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:12.344317Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:12.344400Z node 22 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2024-11-21T17:47:12.344407Z node 22 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T17:47:12.344521Z node 22 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 1192518 and requests 9540144 more for the next try 2024-11-21T17:47:12.344532Z node 22 :TX_DATASHARD DEBUG: tx 2 released its data 2024-11-21T17:47:12.344537Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2024-11-21T17:47:12.344558Z node 22 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:12.344561Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:12.344611Z node 22 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2024-11-21T17:47:12.344618Z node 22 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T17:47:12.344718Z node 22 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 10732662 and requests 85861296 more for the next try 2024-11-21T17:47:12.344725Z node 22 :TX_DATASHARD DEBUG: tx 2 released its data 2024-11-21T17:47:12.344728Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2024-11-21T17:47:12.344744Z node 22 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:12.344747Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:12.344793Z node 22 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2024-11-21T17:47:12.344798Z node 22 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T17:47:12.417826Z node 22 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2024-11-21T17:47:12.417871Z node 22 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:47:12.417901Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:47:12.417910Z node 22 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:47:12.417916Z node 22 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2024-11-21T17:47:12.417923Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2024-11-21T17:47:12.417964Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:47:12.417968Z node 22 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2024-11-21T17:47:12.417972Z node 22 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2024-11-21T17:47:12.417976Z node 22 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2024-11-21T17:47:12.417993Z node 22 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2024-11-21T17:47:12.417997Z node 22 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2024-11-21T17:47:12.418001Z node 22 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2024-11-21T17:47:12.429066Z node 22 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:47:12.429104Z node 22 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2024-11-21T17:47:12.429115Z node 22 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2024-11-21T17:47:12.429167Z node 22 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:47:12.429391Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [22:288:2274], Recipient [22:228:2223]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:12.429398Z node 22 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:12.429403Z node 22 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [22:287:2273], serverId# [22:288:2274], sessionId# [0:0:0] 2024-11-21T17:47:12.429428Z node 22 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [22:286:2272], Recipient [22:228:2223]: NKikimrTabletBase.TEvGetCounters >> KqpRm::SnapshotSharingByExchanger [GOOD] >> KqpPragma::Auth [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer >> KqpYql::JsonNumberPrecision [GOOD] >> KqpYql::RefSelect >> KqpScripting::NoAstSizeLimit [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] >> KqpYql::UuidPrimaryKeyDisabled ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2024-11-21T17:47:10.566366Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Bootstrap 2024-11-21T17:47:10.588224Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] Become StateWork (SchemeCache [1:149:2138]) 2024-11-21T17:47:10.588440Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Bootstrap 2024-11-21T17:47:10.589555Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] Become StateWork (SchemeCache [2:153:2088]) 2024-11-21T17:47:10.602694Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:10.606251Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:10.606736Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:10.606994Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:47:10.607108Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:10.607112Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:47:10.607138Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:47:10.610375Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:47:10.610450Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:47:10.610465Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:47:10.610526Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:10.610540Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:47:10.610936Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:10.634643Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:47:10.634696Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:10.645554Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:47:10.645610Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:10.645623Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:47:10.645632Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:10.645648Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:47:10.645654Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:10.645658Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:47:10.645663Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:10.656615Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:47:10.656671Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:47:10.656835Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:47:10.656840Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:47:10.657896Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:47:10.658076Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2024-11-21T17:47:10.658291Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/001891/r3tmp/tmpZBVEkU/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2024-11-21T17:47:10.658353Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/6fwh/001891/r3tmp/tmpZBVEkU/pdisk_1.dat 2024-11-21T17:47:10.658358Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/6fwh/001891/r3tmp/tmpZBVEkU/pdisk_1.dat 2024-11-21T17:47:10.658485Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2024-11-21T17:47:10.658545Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2024-11-21T17:47:10.658562Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:10.658572Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:10.658595Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2024-11-21T17:47:10.658621Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2024-11-21T17:47:10.658968Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:10.658994Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2024-11-21T17:47:10.669774Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2024-11-21T17:47:10.670021Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2024-11-21T17:47:10.671786Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T17:47:10.671930Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/6fwh/001891/r3tmp/tmpZBVEkU/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T17:47:10.672066Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/6fwh/001891/r3tmp/tmpZBVEkU/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001891/r3tmp/tmpZBVEkU/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 398994074205009955 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T17:47:10.672253Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:10.672320Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:10.672327Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:47:10.672352Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:47:10.672360Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:10.672383Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:10.672398Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:10.672402Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:47:10.672406Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:47:10.672426Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:47:10.672501Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:10.672507Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:10.672510Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:10.672528Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:371:2096] 2024-11-21T17:47:10.672576Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:47:10.672580Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:47:10.672582Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:47:10.672588Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:373:2281] 2024-11-21T17:47:10.673069Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:10.673074Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:363:2093] 2024-11-21T17:47:10.673086Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:47:10.673089Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:362:2278] 2024-11-21T17:47:10.673245Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:373:2281] 2024-11-21T17:47:10.673261Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:10.673267Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:10.673269Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:10.673361Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:371:2096] 2024-11-21T17:47:10.673404Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:47:10.673407Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:47:10.673409Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2024-11-21T17:47:10.691252Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:10.691271Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:10.691276Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T17:47:10.691282Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T17:47:10.714313Z node 1 :TX_PROXY DEBUG: actor# [1:141:2133] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:47:10.715171Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2024-11-21T17:47:10.715288Z node 2 :TX_PROXY DEBUG: actor# [2:142:2085] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:47:10.715864Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:47:10.748432Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal } } 2024-11-21T17:47:10.800037Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2024-11-21T17:47:10.841169Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:65} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green } } 2024-11-21T17:47:11.097505Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2024-11-21T17:47:11.098226Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2024-11-21T17:47:11.098331Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } >> KqpYql::DdlDmlMix [GOOD] >> KqpYql::CreateUseTable >> KqpYql::Discard [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonNumberPrecision [GOOD] Test command err: Trying to start YDB, gRPC: 24181, MsgBus: 17096 2024-11-21T17:47:11.760022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790588669294214:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:11.760261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001727/r3tmp/tmpvyJKXF/pdisk_1.dat 2024-11-21T17:47:11.818531Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24181, node 1 2024-11-21T17:47:11.842504Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:11.842522Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:11.842524Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:11.842559Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17096 2024-11-21T17:47:11.860182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:11.860210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:11.861322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:11.892202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.904134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.918123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:11.935298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.944786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.121690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790592964263054:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.121727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.155725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.210882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.221762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.235309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.242525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.256635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.271792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790592964263568:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.271821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790592964263573:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.271822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.272457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.276196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790592964263575:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 14236, MsgBus: 29735 2024-11-21T17:47:12.621119Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790594413899171:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:12.621345Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001727/r3tmp/tmp7O4Pww/pdisk_1.dat 2024-11-21T17:47:12.628707Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14236, node 2 2024-11-21T17:47:12.638476Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.638488Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.638489Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.638527Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29735 TClient is connected to server localhost:29735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.721346Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.721374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.722465Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:12.723610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.734744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.743091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.759339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.768423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.920528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790594413900705:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.920554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.925293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.932209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.942346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.949341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.956077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.963289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.972155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790594413901216:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.972176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.972203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790594413901221:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.972803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.976266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790594413901223:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 11203, MsgBus: 4237 2024-11-21T17:47:11.595319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790587618570387:2126];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:11.595405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00172c/r3tmp/tmpJqdLPg/pdisk_1.dat 2024-11-21T17:47:11.650293Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11203, node 1 2024-11-21T17:47:11.664870Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:11.664884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:11.664885Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:11.664914Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4237 2024-11-21T17:47:11.695082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:11.695112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:11.696191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:11.724513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.732307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.793617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.812023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.827256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.894788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790587618571850:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.894810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.921710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.928213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.942281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.953453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.962563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.976900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.985246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790587618572352:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.985277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.985540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790587618572357:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.986340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:11.989585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790587618572359:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:12.225543Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232262, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 13301, MsgBus: 21800 2024-11-21T17:47:12.482149Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790592744347602:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:12.482175Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00172c/r3tmp/tmpEsppoG/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13301, node 2 2024-11-21T17:47:12.498951Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:12.500063Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.500075Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.500077Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.500109Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21800 TClient is connected to server localhost:21800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.582298Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.582323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.583375Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:12.585164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.596747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.604451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.621637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.629443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.771799Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790592744349133:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.771838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.776880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.782653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.794967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.802453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.817727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.835591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.848890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790592744349644:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.848908Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.848929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790592744349649:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.849660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.858189Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790592744349651:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:13.043735Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233088, txId: 281474976715671] shutting down 2024-11-21T17:47:13.067812Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233109, txId: 281474976715673] shutting down 2024-11-21T17:47:13.091570Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233137, txId: 281474976715675] shutting down >> KqpYql::FromBytes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: Trying to start YDB, gRPC: 8522, MsgBus: 3500 2024-11-21T17:47:11.807079Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790588259383147:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:11.807110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001725/r3tmp/tmpAMNy9E/pdisk_1.dat 2024-11-21T17:47:11.860547Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8522, node 1 2024-11-21T17:47:11.873321Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:11.873338Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:11.873340Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:11.873370Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3500 TClient is connected to server localhost:3500 2024-11-21T17:47:11.907286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:11.907329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:47:11.908383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:11.921729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.929269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.947745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.968667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.026548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.102017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790592554351977:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.102046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.130522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.138190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.151877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.165172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.172199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.179508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.194920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790592554352482:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.194944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790592554352487:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.194950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.195652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.199862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790592554352489:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:12.391198Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790592554352814:2459] TxId: 281474976710672. Ctx: { TraceId: 01jd7xbkks2dz4v3xzcagxjsad, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI1YzRhNWMtYzZmMmY3NGMtMzIwNTM1ZDEtN2ZhNDc5NmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T17:47:12.393531Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232437, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 9582, MsgBus: 3407 2024-11-21T17:47:12.677853Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790594127360273:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:12.677870Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001725/r3tmp/tmpvmjbvi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9582, node 2 2024-11-21T17:47:12.694325Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:12.694441Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.694449Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.694451Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.694479Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3407 TClient is connected to server localhost:3407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.778200Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.778236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.779302Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:12.781053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.943495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790594127360874:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.943548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.944926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.001155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790598422328273:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.001175Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.005207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790598422328284:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.005235Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790598422328289:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.005235Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.005781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:47:13.011616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790598422328291:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] >> KqpYql::RefSelect [GOOD] >> KqpYql::ScriptUdf >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] Test command err: Trying to start YDB, gRPC: 6503, MsgBus: 11323 2024-11-21T17:47:11.705841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790589273012507:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:11.705859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00172a/r3tmp/tmpAn6hbl/pdisk_1.dat 2024-11-21T17:47:11.770301Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6503, node 1 2024-11-21T17:47:11.784705Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:11.784717Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:11.784719Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:11.784749Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11323 2024-11-21T17:47:11.805696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:11.805716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:11.806769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:11.846474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.851535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.913821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.973395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.989521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.012951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790593567981351:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.012972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.040547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.046443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.053126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.059966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.066950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.074539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.092652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790593567981843:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.092662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790593567981848:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.092675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.093298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.101723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790593567981850:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 22758, MsgBus: 19659 2024-11-21T17:47:12.595510Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790591397030550:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00172a/r3tmp/tmp91E7YP/pdisk_1.dat 2024-11-21T17:47:12.598100Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:12.609489Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22758, node 2 2024-11-21T17:47:12.621011Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.621028Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.621029Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.621072Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19659 TClient is connected to server localhost:19659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.697659Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.697691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.698056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.698711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:47:12.699260Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:12.706513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.715500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:12.735621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.749855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.909906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790591397031952:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.909938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.915493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.922339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.935743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.942077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.949361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.956781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.964800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790591397032455:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.964826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.964884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790591397032460:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.965571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.969644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790591397032462:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Result: [[[[101u]]];[[[102u]]];[[[103u]]];[[[104u]]];[[[105u]]]] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard [GOOD] Test command err: Trying to start YDB, gRPC: 16894, MsgBus: 12661 2024-11-21T17:47:11.967266Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790589044213647:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:11.967322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001722/r3tmp/tmphw6GiK/pdisk_1.dat 2024-11-21T17:47:12.026840Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16894, node 1 2024-11-21T17:47:12.041049Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.041064Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.041066Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.041113Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12661 2024-11-21T17:47:12.066516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.066545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.067657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.101277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.104051Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:12.125403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.186240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.202075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.211741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.256849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790593339182340:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.256877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.293150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.299684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.312084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.367094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.375360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.382266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.390853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790593339182845:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.390893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.390911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790593339182850:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.391605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.395677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790593339182852:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 9396, MsgBus: 11958 2024-11-21T17:47:12.852826Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790595180578848:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:12.852967Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001722/r3tmp/tmpRi9iyo/pdisk_1.dat 2024-11-21T17:47:12.863388Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9396, node 2 2024-11-21T17:47:12.869594Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.869606Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.869617Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.869651Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11958 TClient is connected to server localhost:11958 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.953333Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.953366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.954495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:12.955649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.967040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.975597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.990736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.001271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.120127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790599475547675:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.120157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.124304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.130409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.138564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.144881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.152098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.159392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.167833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790599475548178:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.167856Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.167904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790599475548183:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.168647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:13.172947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790599475548185:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:13.327748Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790599475548461:2458], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:13: Error: DISCARD not supported in YDB queries, code: 2008 2024-11-21T17:47:13.327844Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWQ4YTE4MzMtNTQwZTYxNWYtNjFmMmIyMmQtYjg5YTQ0NmY=, ActorId: [2:7439790599475548454:2454], ActorState: ExecuteState, TraceId: 01jd7xbmh7b1hz6djv8msv1t7m, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: |77.3%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::ScriptStats [GOOD] >> KqpYql::ColumnNameConflict >> KqpYql::InsertCVList [GOOD] >> KqpScripting::StreamExecuteYqlScriptData |77.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |77.3%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::FromBytes [GOOD] Test command err: Trying to start YDB, gRPC: 61295, MsgBus: 25809 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00171e/r3tmp/tmpwq4usc/pdisk_1.dat 2024-11-21T17:47:12.275130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:47:12.280878Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61295, node 1 2024-11-21T17:47:12.296352Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.296366Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.296367Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.296396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25809 TClient is connected to server localhost:25809 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:12.333028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.333050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.334194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.360665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.371778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.387319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.403182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.412186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.514831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790595229999127:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.514858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.544916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.550628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.556431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.563714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.570998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.577643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.587111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790595229999632:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.587147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.587155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790595229999637:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.587816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.591747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790595229999639:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 20302, MsgBus: 8658 2024-11-21T17:47:13.119539Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790596192258778:2056];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00171e/r3tmp/tmp4ZvvkY/pdisk_1.dat 2024-11-21T17:47:13.124777Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:13.127254Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20302, node 2 2024-11-21T17:47:13.136171Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:13.136186Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:13.136188Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:13.136248Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8658 TClient is connected to server localhost:8658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:13.219402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:13.219432Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:13.220531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:13.221712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.232792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.240979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.258113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.267891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.406135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790596192260305:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.406173Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.411758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.417921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.425751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.439987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.446279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.453606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.461189Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790596192260818:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.461213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.461225Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790596192260823:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.461775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:13.466368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790596192260825:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 9312, MsgBus: 29379 2024-11-21T17:47:11.408117Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790588555572679:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:11.408132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001731/r3tmp/tmpwUprLY/pdisk_1.dat 2024-11-21T17:47:11.463861Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9312, node 1 2024-11-21T17:47:11.491119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:11.491136Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:11.491138Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:11.491179Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:11.508512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:11.508544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:29379 2024-11-21T17:47:11.509532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:11.556992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.559945Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:11.565587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.626461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.648498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.658332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.734243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790588555574216:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.734275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.774472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.781568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.794848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.801635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.808252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.825813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.883024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790588555574733:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.883045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790588555574738:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.883048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.883678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:11.891157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790588555574740:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:12.132642Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232157, txId: 281474976710672] shutting down 2024-11-21T17:47:12.133486Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232157, txId: 281474976710671] shutting down 2024-11-21T17:47:12.149059Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232178, txId: 281474976710675] shutting down 2024-11-21T17:47:12.149421Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232178, txId: 281474976710676] shutting down 2024-11-21T17:47:12.166603Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232206, txId: 281474976710679] shutting down 2024-11-21T17:47:12.182337Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232220, txId: 281474976710682] shutting down 2024-11-21T17:47:12.183103Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232220, txId: 281474976710683] shutting down 2024-11-21T17:47:12.184570Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232220, txId: 281474976710681] shutting down 2024-11-21T17:47:12.189384Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232234, txId: 281474976710687] shutting down 2024-11-21T17:47:12.207707Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232248, txId: 281474976710690] shutting down 2024-11-21T17:47:12.208250Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232248, txId: 281474976710689] shutting down 2024-11-21T17:47:12.233597Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232276, txId: 281474976710693] shutting down 2024-11-21T17:47:12.249320Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232290, txId: 281474976710695] shutting down 2024-11-21T17:47:12.265404Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232304, txId: 281474976710697] shutting down 2024-11-21T17:47:12.280781Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232325, txId: 281474976710699] shutting down 2024-11-21T17:47:12.303393Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232346, txId: 281474976710701] shutting down 2024-11-21T17:47:12.322423Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232367, txId: 281474976710703] shutting down 2024-11-21T17:47:12.340816Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232381, txId: 281474976710705] shutting down 2024-11-21T17:47:12.364174Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232409, txId: 281474976710707] shutting down 2024-11-21T17:47:12.392673Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232437, txId: 281474976710709] shutting down 2024-11-21T17:47:12.417979Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232458, txId: 281474976710711] shutting down 2024-11-21T17:47:12.439489Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232479, txId: 281474976710713] shutting down 2024-11-21T17:47:12.465459Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232507, txId: 281474976710715] shutting down 2024-11-21T17:47:12.491095Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232535, txId: 281474976710717] shutting down 2024-11-21T17:47:12.515003Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232556, txId: 281474976710719] shutting down 2024-11-21T17:47:12.539511Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232584, txId: 281474976710721] shutting down Trying to ... atus: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.818041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.818065Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.819230Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:12.820906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.831557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.843773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.862029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.873013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.020276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790597539776703:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.020299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.025585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.032356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.040295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.046733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.054134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.061648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.069814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790597539777216:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.069834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.069841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790597539777221:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.070318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:13.074343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790597539777223:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:13.252209Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2I3NTU2MmItZTgwOWUyMzEtMzljNzAzY2YtMjhkZjY3NWY=, ActorId: [2:7439790597539777514:2457], ActorState: ExecuteState, TraceId: 01jd7xbmey0kc3ej0b64xddv61, Create QueryResponse for error on request, msg: 2024-11-21T17:47:13.256663Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmI3YWU0NmQtYjljMzJmMTMtYzhiZGQ5NTQtZjk5NTM1Zg==, ActorId: [2:7439790597539777528:2463], ActorState: ExecuteState, TraceId: 01jd7xbmf19psgs8k1gww4qf6k, Create QueryResponse for error on request, msg: 2024-11-21T17:47:13.271496Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDE2ZjZmODAtYThjZTJmNjAtMWQ5NzEzMjAtMTFmYjNlODc=, ActorId: [2:7439790597539777569:2483], ActorState: ExecuteState, TraceId: 01jd7xbmfgbwmf1acathtymnz2, Create QueryResponse for error on request, msg: 2024-11-21T17:47:13.283801Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233312, txId: 281474976715671] shutting down 2024-11-21T17:47:13.284473Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233312, txId: 281474976715673] shutting down 2024-11-21T17:47:13.287204Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233312, txId: 281474976715672] shutting down 2024-11-21T17:47:13.298930Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGFlODMzY2EtYmY4ZmIzOTctYWRhZjRlNmYtNzUxNDg3MGE=, ActorId: [2:7439790597539777850:2530], ActorState: ExecuteState, TraceId: 01jd7xbmg87dsfpaxazpz9na4p, Create QueryResponse for error on request, msg: 2024-11-21T17:47:13.306652Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233340, txId: 281474976715678] shutting down 2024-11-21T17:47:13.307342Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233340, txId: 281474976715677] shutting down 2024-11-21T17:47:13.320499Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233354, txId: 281474976715681] shutting down 2024-11-21T17:47:13.320675Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233354, txId: 281474976715682] shutting down 2024-11-21T17:47:13.333422Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233368, txId: 281474976715685] shutting down 2024-11-21T17:47:13.333597Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233368, txId: 281474976715686] shutting down 2024-11-21T17:47:13.347015Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233389, txId: 281474976715689] shutting down 2024-11-21T17:47:13.360671Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233403, txId: 281474976715691] shutting down 2024-11-21T17:47:13.371885Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233417, txId: 281474976715693] shutting down 2024-11-21T17:47:13.390938Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233431, txId: 281474976715695] shutting down 2024-11-21T17:47:13.409355Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233452, txId: 281474976715697] shutting down 2024-11-21T17:47:13.431582Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233473, txId: 281474976715699] shutting down 2024-11-21T17:47:13.445089Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233487, txId: 281474976715701] shutting down 2024-11-21T17:47:13.464992Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233508, txId: 281474976715703] shutting down 2024-11-21T17:47:13.482751Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233529, txId: 281474976715705] shutting down 2024-11-21T17:47:13.502341Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWEyNDcwODUtMjQ4NmQ2YTQtMzJhYmQ2ZTQtY2E0MzcwNTc=, ActorId: [2:7439790597539779268:2763], ActorState: ExecuteState, TraceId: 01jd7xbmp8ergehmhwka3scrcr, Create QueryResponse for error on request, msg: 2024-11-21T17:47:13.503964Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233550, txId: 281474976715707] shutting down 2024-11-21T17:47:13.529739Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233571, txId: 281474976715709] shutting down 2024-11-21T17:47:13.550308Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233592, txId: 281474976715711] shutting down 2024-11-21T17:47:13.573466Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233613, txId: 281474976715713] shutting down 2024-11-21T17:47:13.600739Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233641, txId: 281474976715715] shutting down 2024-11-21T17:47:13.628984Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233669, txId: 281474976715717] shutting down 2024-11-21T17:47:13.653663Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233697, txId: 281474976715719] shutting down 2024-11-21T17:47:13.677896Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233718, txId: 281474976715721] shutting down >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] >> KqpPragma::ResetPerQuery >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> KqpYql::CreateUseTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 18497, MsgBus: 16428 2024-11-21T17:47:12.197589Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790593930111483:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:12.198210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00171b/r3tmp/tmp0HxiDY/pdisk_1.dat 2024-11-21T17:47:12.248097Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18497, node 1 2024-11-21T17:47:12.265630Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.265645Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.265647Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.265687Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16428 2024-11-21T17:47:12.297149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.297174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.298071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.325050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.334446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.395551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.412024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.421875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.491895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790593930112878:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.491918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.527321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.535631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.543431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.550552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.557104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.564685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.572539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790593930113373:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.572570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.572602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790593930113378:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.573360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.577442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790593930113380:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 62230, MsgBus: 27290 2024-11-21T17:47:12.884477Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790591378319166:2062];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00171b/r3tmp/tmp6nD0eJ/pdisk_1.dat 2024-11-21T17:47:12.887021Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:12.897490Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62230, node 2 2024-11-21T17:47:12.904098Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.904112Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.904113Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.904157Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27290 TClient is connected to server localhost:27290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.984572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.984599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.985665Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:12.986850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.991557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.999223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.015413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.027393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.150267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790595673288002:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.150297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.153366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.159081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.166139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.173197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.227747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.236578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.244429Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790595673288507:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.244451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.244478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790595673288512:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.245177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:13.249966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790595673288514:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:13.497173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.558061Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233599, txId: 281474976715674] shutting down 2024-11-21T17:47:13.629622Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233669, txId: 281474976715678] shutting down 2024-11-21T17:47:13.676665Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233675, txId: 281474976715682] shutting down >> KqpYql::UuidPrimaryKey >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> KqpScripting::StreamExecuteYqlScriptScanCancelation [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList [GOOD] Test command err: Trying to start YDB, gRPC: 26834, MsgBus: 32426 2024-11-21T17:47:12.313623Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790595330658701:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:12.313805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001717/r3tmp/tmp7PlJ5K/pdisk_1.dat 2024-11-21T17:47:12.382975Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26834, node 1 2024-11-21T17:47:12.396592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.396604Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.396606Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.396638Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32426 2024-11-21T17:47:12.414018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.414040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.414867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.442237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.453020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.515899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.532320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.544659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.621841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790595330660233:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.621873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.667338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.676922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.691435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.704552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.718578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.725138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.733870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790595330660746:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.733902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.733949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790595330660751:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.734630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.740286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790595330660753:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:12.953660Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790595330661074:2468], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jd7xbm4rdx3z8bveq0hygnkx. SessionId : ydb://session/3?node_id=1&id=NTEyNGIwNWYtYjc4ZTQ3MTktYmNiOTVjMzUtZDYwOWIyNjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T17:47:12.953806Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790595330661075:2469], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NTEyNGIwNWYtYjc4ZTQ3MTktYmNiOTVjMzUtZDYwOWIyNjM=. TraceId : 01jd7xbm4rdx3z8bveq0hygnkx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439790595330661071:2459], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T17:47:12.954946Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTEyNGIwNWYtYjc4ZTQ3MTktYmNiOTVjMzUtZDYwOWIyNjM=, ActorId: [1:7439790595330661048:2459], ActorState: ExecuteState, TraceId: 01jd7xbm4rdx3z8bveq0hygnkx, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Conflict with existing key., code: 2012 Trying to start YDB, gRPC: 63377, MsgBus: 3128 2024-11-21T17:47:13.184602Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790596229350478:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:13.184645Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001717/r3tmp/tmp0s7Jy6/pdisk_1.dat 2024-11-21T17:47:13.195496Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63377, node 2 2024-11-21T17:47:13.205927Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:13.205940Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:13.205941Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:13.205970Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3128 TClient is connected to server localhost:3128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:13.285197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:13.285227Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:13.286302Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:13.287446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.293350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.303179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.320449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.330675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.461499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790596229352012:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.461545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.464420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.470495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.481575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.536654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.544389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.551210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.559761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790596229352530:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.559776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790596229352535:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.559784Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.560584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:13.568360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790596229352537:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:13.778505Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790596229352860:2468], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jd7xbmyj7dyt2azwfnye1a2p. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YTQ1Y2M5MjctZWM5MTQ1NzktYjNkMWI5MTItZjkzZTBmZWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2024-11-21T17:47:13.778571Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790596229352861:2469], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=YTQ1Y2M5MjctZWM5MTQ1NzktYjNkMWI5MTItZjkzZTBmZWQ=. CustomerSuppliedId : . TraceId : 01jd7xbmyj7dyt2azwfnye1a2p. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439790596229352857:2459], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T17:47:13.778839Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTQ1Y2M5MjctZWM5MTQ1NzktYjNkMWI5MTItZjkzZTBmZWQ=, ActorId: [2:7439790596229352834:2459], ActorState: ExecuteState, TraceId: 01jd7xbmyj7dyt2azwfnye1a2p, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Duplicated keys found., code: 2012 >> KqpScripting::EndOfQueryCommit >> KqpYql::ScriptUdf [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 24389, MsgBus: 27037 2024-11-21T17:47:13.544272Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790596133405715:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:13.544638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001709/r3tmp/tmptfhMR8/pdisk_1.dat 2024-11-21T17:47:13.594803Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24389, node 1 2024-11-21T17:47:13.612497Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:13.612514Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:13.612516Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:13.612566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27037 2024-11-21T17:47:13.644333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:13.644364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:13.645460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:13.673534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.821100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790596133406311:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.821131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.858662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790596133406341:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.858687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.862901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790596133406356:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.862925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.866483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790596133406371:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.866504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.868769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.930720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790596133406456:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.930781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::CreateUseTable [GOOD] Test command err: Trying to start YDB, gRPC: 15097, MsgBus: 12678 2024-11-21T17:47:12.660067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790591548842445:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:12.660081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001710/r3tmp/tmpDMby5d/pdisk_1.dat 2024-11-21T17:47:12.715715Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15097, node 1 2024-11-21T17:47:12.731632Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.731644Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.731646Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.731690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12678 2024-11-21T17:47:12.760312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.760338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.761421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12678 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.779039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.788094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.854553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.869865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.880161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.944981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790591548844001:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.945010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.984786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.991044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.045428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.054170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.060862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.067977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.076801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790595843811803:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.076824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.076853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790595843811808:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.077454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:13.081138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790595843811810:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Optimization, code: 1070
:4:24: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 Trying to start YDB, gRPC: 19579, MsgBus: 11060 2024-11-21T17:47:13.540255Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790598988939605:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:13.540297Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001710/r3tmp/tmpcXdIYB/pdisk_1.dat 2024-11-21T17:47:13.551598Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19579, node 2 2024-11-21T17:47:13.561623Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:13.561636Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:13.561637Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:13.561664Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11060 TClient is connected to server localhost:11060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:13.643999Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:13.644026Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:13.644268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.645103Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:13.647780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.657662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.677365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.687710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.839358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790598988941143:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.839381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.845260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.852431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.859270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.867230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.880633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.887646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.903198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790598988941654:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.903234Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.903244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790598988941659:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.904088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:13.907756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790598988941661:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:14.081471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.113490Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234159, txId: 281474976715673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 22843, MsgBus: 29595 2024-11-21T17:47:12.394850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790595155243973:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:12.395045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001713/r3tmp/tmpyQMZKo/pdisk_1.dat 2024-11-21T17:47:12.450435Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22843, node 1 2024-11-21T17:47:12.462702Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:12.462720Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:12.462722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:12.462763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29595 2024-11-21T17:47:12.495392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:12.495421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:12.496497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:12.511865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.516679Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:12.522012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.536861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.555623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.566352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:12.673978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790595155245513:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.674023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.705713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.711431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.770491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.781353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.788315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.794988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:12.812281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790595155246028:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.812327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.812339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790595155246033:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:12.815224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.823566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790595155246035:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:12.998945Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790595155246326:2459], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:34: Error: Pragma auth not supported inside Kikimr query., code: 2016 2024-11-21T17:47:12.999017Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTFmMzQ2NzItYTQ0YzlkMjYtZDRkNWI2N2YtZGNhN2YyYTM=, ActorId: [1:7439790595155246318:2454], ActorState: ExecuteState, TraceId: 01jd7xbm7418ywcvtsx8gjtpqj, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 16744, MsgBus: 20492 2024-11-21T17:47:13.267182Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790596069150306:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:13.267409Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001713/r3tmp/tmpnThIOn/pdisk_1.dat 2024-11-21T17:47:13.277971Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16744, node 2 2024-11-21T17:47:13.289475Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:13.289488Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:13.289490Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:13.289530Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20492 TClient is connected to server localhost:20492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:13.367818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:13.367839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:13.369002Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:13.369651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.375856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.384407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.404593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.415836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.575144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790596069151850:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.575183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.580844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.587070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.592993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.600588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.614456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.628531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.645539Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790596069152353:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.645571Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.645576Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790596069152358:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.646391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:13.648005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790596069152360:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:13.845562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.958369Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234005, txId: 281474976715675] shutting down >> KqpPragma::OrderedColumns >> KqpScripting::ScriptingCreateAndAlterTableTest >> KqpYql::ColumnNameConflict [GOOD] >> KqpYql::ColumnTypeMismatch >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults >> KqpScripting::ScanQueryInvalid >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg >> KqpYql::BinaryJsonOffsetBound ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::ScriptUdf [GOOD] Test command err: Trying to start YDB, gRPC: 13557, MsgBus: 8023 2024-11-21T17:47:13.263007Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790599510041759:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:13.263249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00170d/r3tmp/tmplMr8EC/pdisk_1.dat 2024-11-21T17:47:13.307095Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13557, node 1 2024-11-21T17:47:13.324756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:13.324768Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:13.324770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:13.324803Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8023 TClient is connected to server localhost:8023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:13.362811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:13.362834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:13.363985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:13.386912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.391816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.410708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.427748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.440546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:13.573058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790599510043291:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.573088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.605222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.611678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.622049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.636074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.649794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.656513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:13.664193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790599510043804:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.664216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790599510043809:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.664223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:13.664966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:13.669950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790599510043811:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Optimization, code: 1070
:4:20: Error: RefSelect mode isn't supported by provider: kikimr Trying to start YDB, gRPC: 22123, MsgBus: 17615 2024-11-21T17:47:13.944623Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790598296292388:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:13.944949Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00170d/r3tmp/tmpQZ1voE/pdisk_1.dat 2024-11-21T17:47:13.954767Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22123, node 2 2024-11-21T17:47:13.964529Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:13.964551Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:13.964554Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:13.964584Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17615 TClient is connected to server localhost:17615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:14.045402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:14.045436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:14.046488Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:14.048165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.050645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.063143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.079925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.091622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.233058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790602591261218:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.233096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.237249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.242853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.251305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.257959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.315050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.329800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.344482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790602591261732:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.344508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.344510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790602591261737:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.345353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:14.348493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790602591261739:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script type: Python3 >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> KqpYql::TableConcat >> KqpYql::UuidPrimaryKey [GOOD] >> KqpOlapBlobsSharing::HugeSchemeHistory [FAIL] >> KqpScripting::QueryStats >> KqpYql::EvaluateIf >> KqpYql::EvaluateExprPgNull >> KqpScripting::EndOfQueryCommit [GOOD] >> KqpScripting::ExecuteYqlScriptPg >> KqpYql::TestUuidPrimaryKeyPrefixSearch >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 17751, MsgBus: 63461 2024-11-21T17:47:14.356702Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790602217193510:2056];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:14.356969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016ff/r3tmp/tmp60drAz/pdisk_1.dat 2024-11-21T17:47:14.399930Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17751, node 1 2024-11-21T17:47:14.415924Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:14.415936Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:14.415938Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:14.415976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63461 2024-11-21T17:47:14.457400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:14.457427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:14.459272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:14.476151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.482502Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:14.657790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790602217194102:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.657813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.678900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.740130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790602217194202:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.740144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790602217194207:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.740149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.740758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:47:14.742426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790602217194209:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:47:14.936003Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790602217194425:2363], status: GENERIC_ERROR, issues:
:3:25: Error: Invalid value "invalid-uuid" for type Uuid 2024-11-21T17:47:14.936074Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGI1ZDc5OGYtNjhkM2UxYTktY2QzOTBiMmQtYTI4ZTQzZWI=, ActorId: [1:7439790602217194084:2297], ActorState: ExecuteState, TraceId: 01jd7xbp3q6jk0a26eybxa2qfg, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpYql::TableNameConflict >> KqpScripting::ScriptExplainCreatedTable >> KqpPragma::OrderedColumns [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer >> KqpYql::ColumnTypeMismatch [GOOD] >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::AnsiIn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 21902, MsgBus: 5102 2024-11-21T17:47:11.547428Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790588790042972:2060];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:11.548366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00172e/r3tmp/tmpxVebkd/pdisk_1.dat 2024-11-21T17:47:11.605629Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21902, node 1 2024-11-21T17:47:11.623362Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:11.623373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:11.623374Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:11.623401Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5102 2024-11-21T17:47:11.645676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:11.645707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:11.646653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:11.675249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.681561Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:11.692043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.707801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.727359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.739900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.843887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790588790044504:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.843918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.883471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.891510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.899190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.955952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.962276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.969943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.981189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790588790045019:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.981224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.981345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790588790045024:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.982070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:12.003595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790588790045026:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:12.234057Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790593085012666:2459] TxId: 281474976710672. Ctx: { TraceId: 01jd7xbkeh8ea9tjvxqs79wbbm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmU0YjdkZjUtYWNiMDJkY2ItOTA0OWRkYzQtM2Q4YjlkZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T17:47:13.209645Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7439790593085012608:2453] 2024-11-21T17:47:13.209906Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790593085012666:2459] TxId: 281474976710672. Ctx: { TraceId: 01jd7xbkeh8ea9tjvxqs79wbbm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmU0YjdkZjUtYWNiMDJkY2ItOTA0OWRkYzQtM2Q4YjlkZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T17:47:13.210854Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790593085012678:2469], TxId: 281474976710672, task: 7. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xbkeh8ea9tjvxqs79wbbm. SessionId : ydb://session/3?node_id=1&id=NmU0YjdkZjUtYWNiMDJkY2ItOTA0OWRkYzQtM2Q4YjlkZmI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439790593085012666:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T17:47:13.210942Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmU0YjdkZjUtYWNiMDJkY2ItOTA0OWRkYzQtM2Q4YjlkZmI=, ActorId: [1:7439790593085012621:2459], ActorState: ExecuteState, TraceId: 01jd7xbkeh8ea9tjvxqs79wbbm, Create QueryResponse for error on request, msg: 2024-11-21T17:47:13.211015Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790593085012679:2470], TxId: 281474976710672, task: 8. Ctx: { SessionId : ydb://session/3?node_id=1&id=NmU0YjdkZjUtYWNiMDJkY2ItOTA0OWRkYzQtM2Q4YjlkZmI=. TraceId : 01jd7xbkeh8ea9tjvxqs79wbbm. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439790593085012666:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T17:47:13.211035Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232276, txId: 281474976710671] shutting down 2024-11-21T17:47:13.211084Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790593085012680:2471], TxId: 281474976710672, task: 9. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NmU0YjdkZjUtYWNiMDJkY2ItOTA0OWRkYzQtM2Q4YjlkZmI=. TraceId : 01jd7xbkeh8ea9tjvxqs79wbbm. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439790593085012666:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T17:47:13.211101Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790593085012671:2463], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NmU0YjdkZjUtYWNiMDJkY2ItOTA0OWRkYzQtM2Q4YjlkZmI=. TraceId : 01jd7xbkeh8ea9tjvxqs79wbbm. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439790593085012666:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T17:47:13.211318Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790593085012673:2464], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xbkeh8ea9tjvxqs79wbbm. SessionId : ydb://session/3?node_id=1&id=NmU0YjdkZjUtYWNiMDJkY2ItOTA0OWRkYzQtM2Q4YjlkZmI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439790593085012666:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T17:47:13.211380Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790593085012674:2465], TxId: 281474976710672, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=NmU0YjdkZjUtYWNiMDJkY2ItOTA0OWRkYzQtM2Q4YjlkZmI=. CustomerSuppliedId : . TraceId : 01jd7xbkeh8ea9tjvxqs79wbbm. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439790593085012666:2459], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T17:47:13.211396Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790593085012675:2466], TxId: 281474976710672, task: 4. Ctx: { TraceId : 01jd7xbkeh8ea9tjvxqs79wbbm. SessionId : ydb://session/3?node_id=1&id=NmU0YjdkZjUtYWNiMDJ ... cessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00172e/r3tmp/tmpb9EUBG/pdisk_1.dat 2024-11-21T17:47:14.469912Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4008, node 2 2024-11-21T17:47:14.477983Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:14.477998Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:14.478000Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:14.478041Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13776 TClient is connected to server localhost:13776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:14.556317Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:14.556351Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:14.557395Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:14.559156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.565906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.574560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.590375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.601936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.786466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790603447937335:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.786547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.791028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.798365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.811554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.818029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.825222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.833382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.848702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790603447937830:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.848734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.848840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790603447937835:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.849569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:14.852603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790603447937837:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:15.043808Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDBlMWRkODgtMjkyYTIwZmItZTQ5ZTI1NmQtNDVkYzRlMDc=, ActorId: [2:7439790607742905418:2454], ActorState: ExecuteState, TraceId: 01jd7xbp71aqtq7pz5jf6k7pa4, Create QueryResponse for error on request, msg: 2024-11-21T17:47:15.059263Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7439790607742905442:2464] 2024-11-21T17:47:15.062927Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7439790607742905452:2469] 2024-11-21T17:47:15.070449Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDgxOGViYS0xZmRkMDMzYy02YjVmOWZjYy1jNDhiNTM0ZQ==, ActorId: [2:7439790607742905467:2475], ActorState: ExecuteState, TraceId: 01jd7xbp7qfsa378z1v3me2s8d, Create QueryResponse for error on request, msg: 2024-11-21T17:47:15.089093Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7439790607742905507:2491] 2024-11-21T17:47:15.098650Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7439790607742905518:2496] 2024-11-21T17:47:15.114608Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7439790607742905550:2506] 2024-11-21T17:47:15.126322Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjA2YzdkMS00ZTE1NmM2My1kNzIzZTc2Zi1lZTY5MmZkMA==, ActorId: [2:7439790607742905576:2516], ActorState: ExecuteState, TraceId: 01jd7xbp9b5wrbdbze7zr5z0wh, Create QueryResponse for error on request, msg: 2024-11-21T17:47:15.139588Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDUwOWE1NmItOTU3ODM5NmUtMTVlYTUzNDItYWY1MjRlZjQ=, ActorId: [2:7439790607742905607:2525], ActorState: ExecuteState, TraceId: 01jd7xbp9qdx9zsw0q9tshrjt8, Create QueryResponse for error on request, msg: 2024-11-21T17:47:15.203000Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mjg4YjFhZmQtMjk1ZTMzNWMtMzZiY2JmMDgtNDNmMGQ1MA==, ActorId: [2:7439790607742905699:2561], ActorState: ExecuteState, TraceId: 01jd7xbpbj68e7777zfhfhbry2, Create QueryResponse for error on request, msg: 2024-11-21T17:47:15.221558Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmJmZDc5YmMtMzAxYzU0MTQtOTMxMDMzNmItNTRlMzI3YjY=, ActorId: [2:7439790607742905721:2570], ActorState: ExecuteState, TraceId: 01jd7xbpc32x1kt0rcspdjfq0f, Create QueryResponse for error on request, msg: 2024-11-21T17:47:15.240940Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7439790607742905772:2578] 2024-11-21T17:47:15.261022Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7439790607742905818:2587] 2024-11-21T17:47:15.282186Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2YzOGU1NDMtZTczN2RjYzAtOTk3NzA5MzMtZDNmZGJjODE=, ActorId: [2:7439790607742905872:2597], ActorState: ExecuteState, TraceId: 01jd7xbpdx58748pjmf3c7thnz, Create QueryResponse for error on request, msg: 2024-11-21T17:47:15.309506Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWJmNzkxOTctNDRiYTU1NjUtMmU3ZDQwZGUtYjAyMDFiZmY=, ActorId: [2:7439790607742905944:2606], ActorState: ExecuteState, TraceId: 01jd7xbper7xjmh6jzpdmqb7tz, Create QueryResponse for error on request, msg: 2024-11-21T17:47:15.331726Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7439790607742905964:2614] 2024-11-21T17:47:15.332053Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439790607742906010:2619] TxId: 281474976715675. Ctx: { TraceId: 01jd7xbpfd37hyb7ctzvaxpckx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODNmN2VmODktMjg4NGE5NzQtNTI5Y2QyMWMtN2QwNGFjMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T17:47:15.332166Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODNmN2VmODktMjg4NGE5NzQtNTI5Y2QyMWMtN2QwNGFjMQ==, ActorId: [2:7439790607742905975:2619], ActorState: ExecuteState, TraceId: 01jd7xbpfd37hyb7ctzvaxpckx, Create QueryResponse for error on request, msg: 2024-11-21T17:47:15.332270Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211235377, txId: 281474976715674] shutting down 2024-11-21T17:47:15.332329Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790607742906020:2627], TxId: 281474976715675, task: 5. Ctx: { TraceId : 01jd7xbpfd37hyb7ctzvaxpckx. SessionId : ydb://session/3?node_id=2&id=ODNmN2VmODktMjg4NGE5NzQtNTI5Y2QyMWMtN2QwNGFjMQ==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439790607742906010:2619], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T17:47:15.353619Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211235398, txId: 281474976715677] shutting down 2024-11-21T17:47:15.377896Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211235419, txId: 281474976715679] shutting down 2024-11-21T17:47:15.400609Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211235447, txId: 281474976715681] shutting down >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::ScanQueryTruncate >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] >> KqpScripting::ScriptingCreateAndAlterTableTest [GOOD] >> KqpScripting::SecondaryIndexes >> KqpPragma::Warning [GOOD] >> KqpYql::TableConcat [GOOD] >> KqpYql::SelectNoAsciiValue >> KqpYql::TableUseBeforeCreate >> KqpYql::EvaluateIf [GOOD] >> KqpYql::EvaluateFor ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] Test command err: Trying to start YDB, gRPC: 27395, MsgBus: 28789 2024-11-21T17:47:14.022626Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790602048482684:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:14.022841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001706/r3tmp/tmpmmRs3C/pdisk_1.dat 2024-11-21T17:47:14.074173Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27395, node 1 2024-11-21T17:47:14.089737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:14.089751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:14.089752Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:14.089789Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28789 2024-11-21T17:47:14.122638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:14.122667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:14.123748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:14.151805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.155077Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:14.160726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.176157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.192675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.202601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.325497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790602048484218:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.325551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.358287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.414381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.427678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.440466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.447682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.463116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.479473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790602048484734:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.479500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.479542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790602048484739:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.480388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:14.491266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790602048484741:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Type annotation, code: 1030
:7:30: Error: At function: KiCreateTable!
:7:30: Error: Duplicate column: Value. Trying to start YDB, gRPC: 16837, MsgBus: 19772 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001706/r3tmp/tmpp6Brxv/pdisk_1.dat 2024-11-21T17:47:14.890528Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:47:14.891615Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16837, node 2 2024-11-21T17:47:14.900162Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:14.900180Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:14.900183Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:14.900218Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19772 TClient is connected to server localhost:19772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:47:14.980836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:14.980866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:14.981243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.981816Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:14.983676Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:14.986591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.999380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.025478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.041281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.213470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790607179723915:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.213496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.218598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.225651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.239409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.253341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.259861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.273990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.290863Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790607179724418:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.290889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.290936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790607179724423:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.291545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.293336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790607179724425:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:15.471007Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790607179724716:2459], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T17:47:15.471094Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2U4NDhlOWMtZjgxMjEwZTgtNTBjNzRmNmMtZTlhZWVmODk=, ActorId: [2:7439790607179724708:2454], ActorState: ExecuteState, TraceId: 01jd7xbpmb1rb5p0y4jz0kc4x5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 >> KqpScripting::QueryStats [GOOD] >> KqpScripting::Pure ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: Trying to start YDB, gRPC: 15496, MsgBus: 65396 2024-11-21T17:47:14.131687Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790600965406000:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:14.131708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001703/r3tmp/tmpwJyvLh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15496, node 1 2024-11-21T17:47:14.185844Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:14.195441Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:14.195455Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:14.195457Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:14.195492Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65396 TClient is connected to server localhost:65396 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:47:14.231949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:14.231973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:47:14.233008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:14.260560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.271755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.335470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.356683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.367630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.418229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790600965407539:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.418255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.457994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.464796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.475444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.482451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.540847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.553217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.569340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790600965408054:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.569370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.569392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790600965408059:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.570072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:14.572043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790600965408061:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:14.774833Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234817, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 11613, MsgBus: 6516 2024-11-21T17:47:15.024867Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790606876591592:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.026984Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001703/r3tmp/tmpM9Jc9P/pdisk_1.dat 2024-11-21T17:47:15.039370Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11613, node 2 2024-11-21T17:47:15.063133Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.063148Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.063150Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.063195Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6516 TClient is connected to server localhost:6516 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:47:15.125810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.125835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.126927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.129283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.131073Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:15.148680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.158908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.175687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.231805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.352170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790606876593004:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.352206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.358016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.365199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.380475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.392702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.406643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.413213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.422175Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790606876593498:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.422192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.422196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790606876593504:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.422754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.426309Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790606876593506:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:15.620085Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211235664, txId: 281474976715671] shutting down 2024-11-21T17:47:15.639167Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211235685, txId: 281474976715673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 2328, MsgBus: 16791 2024-11-21T17:47:13.901376Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790596122900795:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:13.901592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001708/r3tmp/tmpoK3IsB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2328, node 1 2024-11-21T17:47:13.963359Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:13.967606Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:13.967619Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:13.967621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:13.967649Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16791 TClient is connected to server localhost:16791 2024-11-21T17:47:14.001453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:14.001488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:47:14.002674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:14.013843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.023146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.085564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.104556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.115704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.193110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790600417869630:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.193140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.229685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.236551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.244176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.251170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.265561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.279591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.287935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790600417870136:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.287961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790600417870141:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.287971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.288659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:14.292255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790600417870143:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:14.448529Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODJkNjc2ODktNWNiNjdiZGQtMzkxODdmMjMtNjFkMDJhMg==, ActorId: [1:7439790600417870422:2454], ActorState: ExecuteState, TraceId: 01jd7xbnmee1qhga4n8t3kkdkz, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.456747Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmZiMWMxNGQtNGY1N2JlOTgtMzU4YTI1ZmQtMWM2OTE0ODk=, ActorId: [1:7439790600417870435:2459], ActorState: ExecuteState, TraceId: 01jd7xbnmj5c4kc89r96pe43pf, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.468165Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTU2MWRlMTktMjE1YzFmNmEtNDI1YWMwNy01YjM1MmM3, ActorId: [1:7439790600417870453:2467], ActorState: ExecuteState, TraceId: 01jd7xbnmtdhypsx44d0zkymcw, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.476994Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTEyYWZlNy1iOTkzNTFkNS1hYzBmMTY1MC1iNGRhMjY1Nw==, ActorId: [1:7439790600417870470:2472], ActorState: ExecuteState, TraceId: 01jd7xbnn54q7bvpv64p7fqref, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.490127Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmE0NzNkNzktZTUzODQwOWMtMzhhM2FhZS1iMGE1ODgxMQ==, ActorId: [1:7439790600417870555:2490], ActorState: ExecuteState, TraceId: 01jd7xbnne16s5y63atm6npsma, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.492372Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234523, txId: 281474976710671] shutting down 2024-11-21T17:47:14.503197Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDIwOGJkYzctZWFiNjA2NWEtMTQ3ZTMxMmItNTlmY2JhOTY=, ActorId: [1:7439790600417870610:2499], ActorState: ExecuteState, TraceId: 01jd7xbnnxbab5psdv4cwbqbac, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.506324Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234544, txId: 281474976710673] shutting down 2024-11-21T17:47:14.516945Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234558, txId: 281474976710674] shutting down 2024-11-21T17:47:14.517863Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjAwNTg5YjUtZGI3ZGM0ZTUtMmEyMDk5ZGItYjVjNTFkZWY=, ActorId: [1:7439790600417870720:2517], ActorState: ExecuteState, TraceId: 01jd7xbnpcayedevjy4e9325kv, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.529046Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234572, txId: 281474976710678] shutting down 2024-11-21T17:47:14.529304Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234572, txId: 281474976710677] shutting down 2024-11-21T17:47:14.530379Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDdmNmQzYzAtOWU4NWVmNjUtZTQ3NjlhODAtZmU1YjdiMDM=, ActorId: [1:7439790600417870827:2532], ActorState: ExecuteState, TraceId: 01jd7xbnps9n8wbje3cmp8wpap, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.542598Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDk4N2ExOTktNmE0ZWYxMGMtNzQzYzBiMmMtOTM3OThmOWI=, ActorId: [1:7439790600417870976:2559], ActorState: ExecuteState, TraceId: 01jd7xbnq5ds4w45pxrsqqzs5g, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.549217Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234593, txId: 281474976710682] shutting down 2024-11-21T17:47:14.549446Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234593, txId: 281474976710681] shutting down 2024-11-21T17:47:14.559650Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjRhMDJjYjUtYTg4OWM0ODMtZGQ5NTMxOGQtNDUzZDg4ZjU=, ActorId: [1:7439790600417871011:2565], ActorState: ExecuteState, TraceId: 01jd7xbnqf36ek55b46r9y6797, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.572761Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJjZTkzN2QtOWU2ZDk0ZTMtYzgwNjljYmQtNjRiMDBhZjE=, ActorId: [1:7439790600417871159:2592], ActorState: ExecuteState, TraceId: 01jd7xbnr13v72645q95nep2ks, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.576022Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234614, txId: 281474976710686] shutting down 2024-11-21T17:47:14.576178Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234614, txId: 281474976710685] shutting down 2024-11-21T17:47:14.587065Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzY2Mjk2NWMtZGZhODVkYS00OTkxOWY2Zi1iMzM1YTU2OQ==, ActorId: [1:7439790600417871304:2616], ActorState: ExecuteState, TraceId: 01jd7xbnre6wbw2pjsdzf0m75z, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.600123Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234642, txId: 281474976710689] shutting down 2024-11-21T17:47:14.600179Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234642, txId: 281474976710690] shutting down 2024-11-21T17:47:14.616760Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2ZlODE4MTMtNjAxNGQwNjAtOWUyZjM1NzQtYjY3Yjk0Njg=, ActorId: [1:7439790600417871512:2649], ActorState: ExecuteState, TraceId: 01jd7xbnsa5kyx5zeygvp7w846, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.623600Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234663, txId: 281474976710693] shutting down 2024-11-21T17:47:14.634352Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDI4NTQwMWMtZTA0ZTMzNDktODg2N2UzZDYtODVkYjExNTA=, ActorId: [1:7439790600417871544:2658], ActorState: ExecuteState, TraceId: 01jd7xbnstdrxh8fqgzq8qrj59, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.643248Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234684, txId: 281474976710695] shutting down 2024-11-21T17:47:14.653178Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDkzZmVkNmQtOGM5YmEyOGItYTYwNzFlZWMtMzA1MDQ1MWI=, ActorId: [1:7439790600417871665:2676], ActorState: ExecuteState, TraceId: 01jd7xbntc2mwgwk85zmztwdnf, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.662605Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234705, txId: 281474976710697] shutting down 2024-11-21T17:47:14.672082Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjcwMjcyZTctNGEzMWVjYTEtMWZmYTkwMjQtZjY0MTRjMzU=, ActorId: [1:7439790600417871764:2694], ActorState: ExecuteState, TraceId: 01jd7xbnty03z3gq31w9bhy7a4, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.680027Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234719, txId: 281474976710699] shutting down 2024-11-21T17:47:14.692063Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzEyYjY1Y2QtYWJjZWVmNDYtODBkZDJmOWQtOGI2YjE0, ActorId: [1:7439790600417871872:2712], ActorState: ExecuteState, TraceId: 01jd7xbnvh0wcq392ejw52tvv4, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.694791Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234740, txId: 281474976710701] shutting down 2024-11-21T17:47:14.712676Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234754, txId: 281474976710703] shutting down 2024-11-21T17:47:14.735083Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmEzYTNiNTctOTdkYmI1ZjEtN2ZmMzU1ZmItM2U1OTI4ZTY=, ActorId: [1:7439790600417872177:2757], ActorState: ExecuteState, TraceId: 01jd7xbnwta1xzdek62kckb1wv, Create QueryResponse for error on request, msg: 2024-11-21T17:47:14.736859Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234782, txId: 281474976710705] shutting down 2024-11-21T17:47:14.756324Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211234803, txId: 281474976710707] shutting down Trying to start YDB, gRPC: 21073, MsgBus: 65107 2024-11-21T17:47:15.000343Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790606517069737:2194];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001708/r3tmp/tmpgQEpGG/pdisk_1.dat 2024-11-21T17:47:15.005332Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 21073, node 2 2024-11-21T17:47:15.014796Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:15.018085Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.018113Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.018114Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.018145Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65107 TClient is connected to server localhost:65107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.099732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.099760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.100838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:15.102054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.104592Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:15.121038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.177388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.192976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.206983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.354709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790606517071126:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.354730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.358979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.365968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.379548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.392372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.400302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.413661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.421852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790606517071638:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.421880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.421923Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790606517071643:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.422444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.426243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790606517071645:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpYql::EvaluateExprPgNull [GOOD] >> KqpYql::EvaluateExprYsonAndType >> KqpScripting::ExecuteYqlScriptPg [GOOD] >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 24615, MsgBus: 14929 2024-11-21T17:47:14.231701Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790600200836280:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:14.231740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001702/r3tmp/tmpy2fsDc/pdisk_1.dat 2024-11-21T17:47:14.298562Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24615, node 1 2024-11-21T17:47:14.312371Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:14.312384Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:14.312386Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:14.312422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14929 2024-11-21T17:47:14.331477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:14.331499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:14.332681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:14.375449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.420868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.484269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.505537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.515818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.546107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790600200837811:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.546141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.577227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.633254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.645263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.657389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.663790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.671301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:14.679929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790600200838314:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.679951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.679966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790600200838319:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.680612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:14.689691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790600200838321:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:14.873415Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790600200838644:2465], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:2:50: Error: At function: AssumeColumnOrderPartial
:2:20: Error: At function: Aggregate /lib/yql/aggregate.yql:644:18: Error: At function: AggregationTraits /lib/yql/aggregate.yql:58:31: Error: At function: AggrCountInit
:2:20: Error: At function: PersistableRepr
:2:26: Error: At function: Member
:2:26: Error: Member not found: _yql_partition_id 2024-11-21T17:47:14.873563Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2ZkZWQ5NzAtYTNiYjcyNjUtOTc5MzNjY2MtOGY1YjViYmQ=, ActorId: [1:7439790600200838592:2454], ActorState: ExecuteState, TraceId: 01jd7xbp1nd7fbbts6yyy8ap9v, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 14397, MsgBus: 2898 2024-11-21T17:47:15.149539Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790605519002469:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.149555Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001702/r3tmp/tmpKBwNXZ/pdisk_1.dat 2024-11-21T17:47:15.165917Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14397, node 2 2024-11-21T17:47:15.173220Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.173233Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.173235Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.173269Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2898 TClient is connected to server localhost:2898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.247702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.247729Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.247971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.248732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:47:15.249583Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.260055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.269749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.290265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.300112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.451542Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790605519004004:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.451569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.456988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.464014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.519096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.533342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.548804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.561258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.576766Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790605519004518:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.576793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.576898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790605519004523:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.577622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.580185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790605519004525:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |77.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots >> KqpYql::AnsiIn [GOOD] >> KqpYql::TableNameConflict [GOOD] >> KqpScripting::ScanQueryTruncate [GOOD] >> KqpOlap::BulkUpsertUpdate [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] |77.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 24271, MsgBus: 12971 2024-11-21T17:47:14.627860Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790600725688539:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:14.627876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016fc/r3tmp/tmp1kiLQT/pdisk_1.dat 2024-11-21T17:47:14.694556Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24271, node 1 2024-11-21T17:47:14.704579Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:14.704589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:14.704590Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:14.704614Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12971 2024-11-21T17:47:14.727925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:14.727950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:14.729038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:14.748634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.760081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.821555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.839426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.850017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:14.924514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790600725690068:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.924542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:14.951145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.006473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.014892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.020751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.035395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.049190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.059468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790605020657880:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.059494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.059544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790605020657885:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.060140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.062496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790605020657887:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:15.272544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.331906Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211235377, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 61695, MsgBus: 6380 2024-11-21T17:47:15.481870Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790607382080036:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.482050Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016fc/r3tmp/tmpdtEdUc/pdisk_1.dat 2024-11-21T17:47:15.495814Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61695, node 2 2024-11-21T17:47:15.502153Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.502168Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.502170Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.502226Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6380 TClient is connected to server localhost:6380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.582472Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.582495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.583607Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:15.586117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.595614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.607003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.624321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.634491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.803259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790607382081581:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.803282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.809520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.816391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.826774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.841103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.854391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.861475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.870056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790607382082082:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.870082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.870090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790607382082087:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.870675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.874505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790607382082089:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] Test command err: Trying to start YDB, gRPC: 28765, MsgBus: 29112 2024-11-21T17:47:15.597889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790607279675283:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.597919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016e7/r3tmp/tmpfWDajG/pdisk_1.dat 2024-11-21T17:47:15.647784Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28765, node 1 2024-11-21T17:47:15.663596Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.663618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.663620Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.663648Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29112 2024-11-21T17:47:15.694222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.694244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.695306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.726379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.868185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790607279675879:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.868210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.899767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.963277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790607279675981:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.963310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.963325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790607279675986:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.963874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.972678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790607279675988:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } >> KqpScripting::ScriptExplainCreatedTable [GOOD] >> KqpScripting::ScriptExplain >> KqpYql::SelectNoAsciiValue [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableNameConflict [GOOD] Test command err: Trying to start YDB, gRPC: 19821, MsgBus: 29642 2024-11-21T17:47:15.702437Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790606899206797:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.702452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016e2/r3tmp/tmpom7tZ3/pdisk_1.dat 2024-11-21T17:47:15.746498Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19821, node 1 2024-11-21T17:47:15.763654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.763664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.763665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.763690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29642 TClient is connected to server localhost:29642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:15.802796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.802826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.803875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.808920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.821264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.833365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.849404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.862086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.972626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790606899208326:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.972655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.998181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.004132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.015263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.022166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.028945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.036406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.051721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790611194176137:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.051740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.051747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790611194176142:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.052336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:16.056950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790611194176144:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiCreateTable!
:12:30: Error: Table name conflict: db.[/Root/Test] is used to reference multiple tables. >> KqpYql::EvaluateFor [GOOD] >> KqpScripting::SecondaryIndexes [GOOD] >> KqpScripting::Pure [GOOD] >> KqpYql::TableUseBeforeCreate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::AnsiIn [GOOD] Test command err: Trying to start YDB, gRPC: 62464, MsgBus: 8730 2024-11-21T17:47:15.007636Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790605444491583:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.007665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016f6/r3tmp/tmpwixCh8/pdisk_1.dat 2024-11-21T17:47:15.051781Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62464, node 1 2024-11-21T17:47:15.070171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.070182Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.070183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.070214Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8730 TClient is connected to server localhost:8730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:15.112607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.112644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.113588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.133972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.139699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.209942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.268651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:15.279036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.300650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790605444493123:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.300684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.332684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.339516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.350481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.357350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.364512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.379998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.394263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790605444493615:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.394289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790605444493620:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.394296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.394974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.402040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790605444493622:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:15.600601Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=ZDA4N2I0ZDUtM2IzMmI0OWMtZDVmZWNmNDEtNTk5Yzc5NGY=, ActorId: [1:7439790605444493906:2454], ActorState: ExecuteState, TraceId: 01jd7xbpr88p8y9vvsrarj78sp, Internal error, message: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer 2024-11-21T17:47:15.600616Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDA4N2I0ZDUtM2IzMmI0OWMtZDVmZWNmNDEtNTk5Yzc5NGY=, ActorId: [1:7439790605444493906:2454], ActorState: ExecuteState, TraceId: 01jd7xbpr88p8y9vvsrarj78sp, Create QueryResponse for error on request, msg: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer Trying to start YDB, gRPC: 15183, MsgBus: 3769 2024-11-21T17:47:15.880158Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790607187925335:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.880217Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016f6/r3tmp/tmpF0lLeP/pdisk_1.dat 2024-11-21T17:47:15.889396Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15183, node 2 2024-11-21T17:47:15.901156Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.901186Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.901189Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.901232Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3769 TClient is connected to server localhost:3769 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.980377Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.980420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.981687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:15.983247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.995617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.005219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.025394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.037501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.172390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790611482894161:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.172424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.177095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.183438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.190353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.197227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.204359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.211238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.220912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790611482894666:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.220942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.221297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790611482894673:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.221889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:16.224366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790611482894677:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 30946, MsgBus: 20382 2024-11-21T17:47:15.021162Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790607101823634:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.021218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016f0/r3tmp/tmpGBVVT0/pdisk_1.dat 2024-11-21T17:47:15.107346Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30946, node 1 2024-11-21T17:47:15.120936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.120978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.122388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:15.124390Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.124401Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.124402Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.124450Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20382 TClient is connected to server localhost:20382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.183936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:15.212280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.227008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.244387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.253258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.327458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790607101824955:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.327503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.356380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.361929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.370704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.425242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.480307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.490343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.506681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790607101825474:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.506728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.506802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790607101825479:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.507587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.510200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790607101825481:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:15.690306Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790607101825782:2462], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2024-11-21T17:47:15.691224Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTM4ZjYwNjEtZGE1Mzc3ODAtYzE1YjZlZmEtNTM2NGU0ZDc=, ActorId: [1:7439790607101825780:2461], ActorState: ExecuteState, TraceId: 01jd7xbpv74a9433rd6r40fw9f, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2024-11-21T17:47:15.707239Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790607101825813:2475], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 2024-11-21T17:47:15.707531Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzA4OWQzOWMtODAyNzEwYTUtYzhlMzQ1MWItYmI5NTU1OTY=, ActorId: [1:7439790607101825811:2474], ActorState: ExecuteState, TraceId: 01jd7xbpvs3cr1fy6we18ern45, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 28895, MsgBus: 7127 2024-11-21T17:47:15.905698Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790604808344347:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016f0/r3tmp/tmpfSq4U4/pdisk_1.dat 2024-11-21T17:47:15.908331Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:15.914849Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28895, node 2 2024-11-21T17:47:15.923802Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.923813Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.923814Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.923847Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7127 TClient is connected to server localhost:7127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:16.005411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:16.005439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:16.006489Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:16.007604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.010871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.023380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.040391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.051928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.198258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790609103313020:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.198286Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.202350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.208413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.218393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.225360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.239226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.254457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.269105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790609103313531:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.269140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.269209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790609103313536:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.270108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:16.273812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790609103313538:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:16.462356Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7439790609103313917:2057], tablet: [2:7439790609103312225:2300], scanId: 1, table: /Root/EightShard 2024-11-21T17:47:16.463645Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211236504, txId: 281474976715671] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::BulkUpsertUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 22590, MsgBus: 24484 2024-11-21T17:46:15.744620Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790347205499373:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:15.744641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e33/r3tmp/tmp1KmGkI/pdisk_1.dat 2024-11-21T17:46:15.788462Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22590, node 1 2024-11-21T17:46:15.801736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:15.801763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:15.801766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:15.801803Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24484 TClient is connected to server localhost:24484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:15.844831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:15.844881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:15.846007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:15.872921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int64 NOT NULL, value Int32 NOT NULL, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:46:16.025632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790351500467265:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.025670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.048047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:16.054799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:16.054847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:16.054915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:16.054933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:16.054949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:16.054964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:16.054977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:16.054997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:16.055021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:16.055037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.055056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:16.055071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:16.055407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:16.055418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:16.055427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:16.055429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:16.055443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:16.055446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:16.055452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:16.055456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:16.055465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:16.055472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:16.055476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:16.055481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:16.055530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:16.055538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:16.055549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:16.055559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:16.055566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:16.055572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:16.055582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:16.055588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:16.055595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:16.055600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=208;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=208;columns=2; 2024-11-21T17:46:16.115871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790351500467486:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.115890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.115907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790351500467491:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:16.116452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:46:16.117603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790351500467493:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:46:16.244995Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211176171, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=208;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=208;columns=2; 2024-11-21T17:46:16.291235Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211176269, txId: 18446744073709551615] shutting down 2024-11-21T17:46:20.745043Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790347205499373:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:20.745090Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:30.787877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:46:30.787889Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:16.059237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790351500467342:2303];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:16.063697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=a5846876-a83011ef-8f823291-b9b3f922;fline=with_appended.cpp:80;portions=3,;task_id=a5846876-a83011ef-8f823291-b9b3f922; 2024-11-21T17:47:16.389116Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211236000, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 11390, MsgBus: 9003 2024-11-21T17:47:14.884834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790600058147105:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:14.885040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016f3/r3tmp/tmploZsr2/pdisk_1.dat 2024-11-21T17:47:14.925314Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11390, node 1 2024-11-21T17:47:14.945578Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:14.945591Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:14.945593Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:14.945649Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9003 2024-11-21T17:47:14.986239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:14.986275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:14.987472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:47:15.007234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.009816Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:15.014523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.038779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.064815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.085930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.214922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790604353115934:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.214955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.253345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.259549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.265820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.273147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.280056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.287318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.295653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790604353116445:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.295690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.295701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790604353116450:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.296325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.300513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790604353116452:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:15.509969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.562069Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211235608, txId: 281474976715675] shutting down Trying to start YDB, gRPC: 10280, MsgBus: 5024 2024-11-21T17:47:15.770107Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790607332888646:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.770334Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016f3/r3tmp/tmpUT8YUr/pdisk_1.dat 2024-11-21T17:47:15.780247Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10280, node 2 2024-11-21T17:47:15.789423Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.789438Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.789439Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.789483Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5024 TClient is connected to server localhost:5024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.870573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.870608Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.871703Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:15.872320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.875141Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:15.877989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.899793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:15.917067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.926923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.071920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790611627857478:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.071948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.077595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.132120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.187158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.197697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.203901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.211134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.220727Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790611627857994:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.220765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.220822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790611627857999:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.221489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:16.225828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790611627858001:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:16.409381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.471127Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211236518, txId: 281474976715675] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] Test command err: Trying to start YDB, gRPC: 14792, MsgBus: 11772 2024-11-21T17:47:15.428841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790604668652820:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.428868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016ea/r3tmp/tmpsZwDnr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14792, node 1 2024-11-21T17:47:15.485938Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:15.493192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.493204Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.493206Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.493240Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11772 TClient is connected to server localhost:11772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:15.528852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.528880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.529995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.561528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.570649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.585227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.600343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.611254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.722839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790604668654357:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.722868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.754163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.760217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.770348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.777122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.783792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.791234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.805908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790604668654870:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.805935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.806018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790604668654875:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.806866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.811408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790604668654877:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 17964, MsgBus: 14539 2024-11-21T17:47:16.109367Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790608738875915:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:16.109688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016ea/r3tmp/tmpfXqsf6/pdisk_1.dat 2024-11-21T17:47:16.118819Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17964, node 2 2024-11-21T17:47:16.128833Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:16.128847Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:16.128849Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:16.128892Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14539 TClient is connected to server localhost:14539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:16.209822Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:16.209859Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:16.210936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:16.211991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.216258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.227905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.250174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.261384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.413905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790608738877453:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.413964Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.418468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.425317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.435283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.442150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.448851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.456000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.465294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790608738877962:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.465327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.465343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790608738877967:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.465861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:16.469033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790608738877969:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpYql::EvaluateExprYsonAndType [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: Trying to start YDB, gRPC: 23514, MsgBus: 16895 2024-11-21T17:47:15.172386Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790607954881625:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.172452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016ee/r3tmp/tmpdkLrgV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23514, node 1 2024-11-21T17:47:15.233775Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:15.246031Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.246042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.246044Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.246085Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16895 2024-11-21T17:47:15.271195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.271225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.272399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.291971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.295337Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:15.302244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.365891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.383852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.394552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.498149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790607954883028:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.498190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.549119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.556086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.567443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.573933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.588914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.604730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.618444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790607954883539:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.618477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.618497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790607954883544:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.619149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.622210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790607954883546:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Error: Table intent determination, code: 1040
:3:27: Error: CONCAT is not supported on Kikimr clusters. Trying to start YDB, gRPC: 1922, MsgBus: 32386 2024-11-21T17:47:16.053812Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790611714808492:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:16.054103Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016ee/r3tmp/tmpeQ9q6m/pdisk_1.dat 2024-11-21T17:47:16.063119Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1922, node 2 2024-11-21T17:47:16.074434Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:16.074446Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:16.074447Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:16.074486Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32386 TClient is connected to server localhost:32386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:16.154425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:16.154453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:16.155618Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:16.156724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.162095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.170019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.187378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.198261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.345810Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790611714810022:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.345838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.350602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.356569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.365255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.372428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.379715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.393962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.412377Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790611714810537:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.412413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.415921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790611714810542:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.416673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:16.420601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790611714810544:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:16.572112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.625991Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211236672, txId: 281474976715675] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 27670, MsgBus: 3341 2024-11-21T17:47:14.910646Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790602989554292:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:14.910679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016f9/r3tmp/tmpGTA8d8/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27670, node 1 2024-11-21T17:47:14.977271Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:14.987575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:14.987585Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:14.987587Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:14.987628Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3341 2024-11-21T17:47:15.012874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.012907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.015095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.040497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.047251Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:15.048755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.120525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.143588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.155587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.211678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790607284523121:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.211705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.247303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.254047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.266093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.273426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.280185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.287408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.302813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790607284523615:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.302850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.302888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790607284523620:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.303484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.307398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790607284523622:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:15.470049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.527974Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211235573, txId: 281474976715673] shutting down 2024-11-21T17:47:15.533047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.545567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.554553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.561573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.572576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.581340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.623141Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211235664, txId: 281474976715682] shutting down 2024-11-21T17:47:15.632779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.638908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.687487Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211235734, txId: 281474976715687] shutting down Trying to start YDB, gRPC: 28311, MsgBus: 4661 2024-11-21T17:47:15.981278Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790604967393015:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.981297Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016f9/r3tmp/tmp6DWGaZ/pdisk_1.dat 2024-11-21T17:47:15.995179Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28311, node 2 2024-11-21T17:47:16.000606Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:16.000619Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:16.000620Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:16.000653Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4661 TClient is connected to server localhost:4661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:16.081616Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:16.081654Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:16.082694Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:16.084373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.085684Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:16.095681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.103655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.119529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.127998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.250974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790609262361844:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.251015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.255989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.263430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.274723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.281515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.338695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.351949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.367435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790609262362359:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.367459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790609262362364:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.367460Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.368146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:16.371306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790609262362366:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:16.523967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.531539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.540706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] Test command err: Trying to start YDB, gRPC: 1366, MsgBus: 7972 2024-11-21T17:47:16.097482Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790610140162940:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:16.097517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016dc/r3tmp/tmpuZMViU/pdisk_1.dat 2024-11-21T17:47:16.143754Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1366, node 1 2024-11-21T17:47:16.161196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:16.161209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:16.161211Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:16.161242Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7972 TClient is connected to server localhost:7972 2024-11-21T17:47:16.197868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:16.197892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:47:16.199011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:16.225491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.229388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.290731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.310293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.321271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.392585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790610140164497:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.392612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.425748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.480950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.491363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.498007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.505010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.512250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.520522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790610140165003:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.520546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.520547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790610140165008:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.521166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:16.525434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790610140165010:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Type annotation, code: 1030
:3:13: Error: At function: KiReadTable!
:3:13: Error: Cannot find table 'db.[/Root/NewTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 6889, MsgBus: 63941 2024-11-21T17:47:15.324161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790604766176890:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.324177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016ed/r3tmp/tmpjablsK/pdisk_1.dat 2024-11-21T17:47:15.379679Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6889, node 1 2024-11-21T17:47:15.397800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.397814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.397816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.397855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63941 2024-11-21T17:47:15.424326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.424355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.425458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.448268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.451000Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:15.457281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.519599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.548855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.560686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.650221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790604766178428:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.650255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.683663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.690467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.700354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.714919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.728351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.743121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.757872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790604766178942:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.757890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790604766178947:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.757908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.758547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.762600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790604766178949:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:16.041289Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211236084, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 2764, MsgBus: 65474 2024-11-21T17:47:16.214700Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790610637568597:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:16.214802Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016ed/r3tmp/tmpkMbkub/pdisk_1.dat 2024-11-21T17:47:16.227128Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2764, node 2 2024-11-21T17:47:16.235754Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:16.235768Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:16.235770Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:16.235809Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65474 TClient is connected to server localhost:65474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:16.315193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:16.315219Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:16.316336Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:16.318083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.319074Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:16.322366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.330941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.349977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.359741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.490211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790610637570131:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.490241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.495150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.501666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.512141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.519158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.526468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.540741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.548834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790610637570645:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.548856Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790610637570650:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.548860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.549510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:16.553328Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790610637570652:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprYsonAndType [GOOD] Test command err: Trying to start YDB, gRPC: 20962, MsgBus: 65182 2024-11-21T17:47:15.464954Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790605964555025:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.464969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016e8/r3tmp/tmpZt0RXS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20962, node 1 2024-11-21T17:47:15.527335Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:15.537276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.537289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.537291Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.537321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65182 2024-11-21T17:47:15.565132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.565155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.566243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.580950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.582744Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:15.593824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.653612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.668217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.679699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.741095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790605964556562:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.741131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.777988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.784214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.791184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.802410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.812318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.819413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:15.834906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790605964557063:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.834930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790605964557068:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.834938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.835541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:15.839529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790605964557070:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 24142, MsgBus: 19078 2024-11-21T17:47:16.320847Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790609545589181:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:16.321035Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016e8/r3tmp/tmpoXHOX3/pdisk_1.dat 2024-11-21T17:47:16.335349Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24142, node 2 2024-11-21T17:47:16.343939Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:16.343949Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:16.343951Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:16.343983Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19078 TClient is connected to server localhost:19078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:16.422691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:16.422714Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:16.423601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.423904Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:47:16.424654Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:16.430286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.439182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.456984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.466383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.628757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790609545590732:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.628795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.633469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.640034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.652786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.667247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.680876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.687301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.695813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790609545591237:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.695833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.695885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790609545591242:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.696593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:16.700731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790609545591244:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> KqpScripting::ScriptExplain [GOOD] >> VDiskRestart::Simple [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain [GOOD] Test command err: Trying to start YDB, gRPC: 11041, MsgBus: 29995 2024-11-21T17:47:15.660195Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790605468700835:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:15.660220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016e4/r3tmp/tmpx2XTgQ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11041, node 1 2024-11-21T17:47:15.720528Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:15.724471Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:15.724487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:15.724489Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:15.724529Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29995 2024-11-21T17:47:15.760329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:15.760357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:15.761450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:15.788791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.812778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.876777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.899939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.911662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:15.955863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790605468702375:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.955889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:15.995901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.050621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.057424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.064116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.071463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.085465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:16.094433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790609763670177:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.094469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790609763670182:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.094474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:16.095327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:16.098875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790609763670184:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:16.287913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28478, MsgBus: 7429 2024-11-21T17:47:16.720178Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790611612208461:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:16.720586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016e4/r3tmp/tmp6DIgWF/pdisk_1.dat 2024-11-21T17:47:16.728367Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28478, node 2 2024-11-21T17:47:16.737471Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:16.737491Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:16.737493Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:16.737535Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7429 TClient is connected to server localhost:7429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:16.820695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:16.820727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:16.821821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:16.822919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.834573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.842031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.856724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:16.870300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:17.017775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790615907177291:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:17.017805Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:17.020288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:17.026320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:17.037416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:17.044043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:17.051058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:17.058581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:17.074317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790615907177802:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:17.074343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:17.074351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790615907177808:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:17.075031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:17.078770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790615907177810:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:17.300887Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790615907178113:2459], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:168: Error: At function: DataQueryBlocks
:1:185: Error: At function: TKiDataQueryBlock
:1:208: Error: At function: KiEffects
:1:219: Error: At function: KiWriteTable!
:1:219: Error: Cannot find table 'db.[/Root/ScriptingTest]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:17.300998Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmFlODg3NjItNmE2OTQ4NDQtOTQwNTlmMzktYzEzYzU4NzA=, ActorId: [2:7439790615907178111:2458], ActorState: ExecuteState, TraceId: 01jd7xbrdkaaaa8c2ptk5qntjd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> TxUsage::WriteToTopic_Demo_1 >> IncrementalRestoreScan::ChangeSenderSimple >> IncrementalRestoreScan::Empty >> LocalPartition::Restarts >> BasicUsage::WriteRead >> TxUsage::WriteToTopic_Demo_4 >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::HugeSchemeHistory [FAIL] Test command err: Trying to start YDB, gRPC: 22713, MsgBus: 4595 2024-11-21T17:46:11.914547Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790331312221527:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:11.914564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e5e/r3tmp/tmpGleTuU/pdisk_1.dat 2024-11-21T17:46:11.962934Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22713, node 1 2024-11-21T17:46:11.974921Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:11.974933Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:11.974935Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:11.974979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4595 TClient is connected to server localhost:4595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:12.016131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:12.016158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:12.017253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:12.040992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:12.131233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:12.683588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790335607191840:2297];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:12.683626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790335607191840:2297];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:12.683651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790335607191840:2297];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:12.683665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790335607191840:2297];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:12.683688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790335607191840:2297];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:12.683703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790335607191840:2297];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:12.683717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790335607191840:2297];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:12.683735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790335607191840:2297];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:12.683751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790335607191840:2297];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:12.683768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790335607191840:2297];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:12.683781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790335607191840:2297];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:12.683794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790335607191840:2297];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:12.684624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038906;self_id=[1:7439790335607191997:2299];tablet_id=72075186224038906;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:12.684661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038906;self_id=[1:7439790335607191997:2299];tablet_id=72075186224038906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:12.684699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038906;self_id=[1:7439790335607191997:2299];tablet_id=72075186224038906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:12.684721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038906;self_id=[1:7439790335607191997:2299];tablet_id=72075186224038906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:12.684744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038906;self_id=[1:7439790335607191997:2299];tablet_id=72075186224038906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:12.684768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038906;self_id=[1:7439790335607191997:2299];tablet_id=72075186224038906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:12.684794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038906;self_id=[1:7439790335607191997:2299];tablet_id=72075186224038906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:12.684815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038906;self_id=[1:7439790335607191997:2299];tablet_id=72075186224038906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:12.684839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038906;self_id=[1:7439790335607191997:2299];tablet_id=72075186224038906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:12.684858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038906;self_id=[1:7439790335607191997:2299];tablet_id=72075186224038906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:12.684878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038906;self_id=[1:7439790335607191997:2299];tablet_id=72075186224038906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:12.684894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038906;self_id=[1:7439790335607191997:2299];tablet_id=72075186224038906;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:12.685664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038903;self_id=[1:7439790335607192109:2301];tablet_id=72075186224038903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:12.685678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038903;self_id=[1:7439790335607192109:2301];tablet_id=72075186224038903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:12.685693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038903;self_id=[1:7439790335607192109:2301];tablet_id=72075186224038903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:12.685708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038903;self_id=[1:7439790335607192109:2301];tablet_id=72075186224038903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:12.685723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038903;self_id=[1:7439790335607192109:2301];tablet_id=72075186224038903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:12.685737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038903;self_id=[1:7439790335607192109:2301];tablet_id=72075186224038903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:12.685749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038903;self_id=[1:7439790335607192109:2301];tablet_id=72075186224038903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:12.685777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038903;self_id=[1:7439790335607192109:2301];tablet_id=72075186224038903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:12.685790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038903;self_id=[1:7439790335607192109: ... on_start;last_saved_id=16; 2024-11-21T17:47:12.650811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038397;self_id=[1:7439790593306005265:13989];tablet_id=72075186224038397;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.654010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038279;self_id=[1:7439790593306005418:14015];tablet_id=72075186224038279;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.656361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;self_id=[1:7439790593306005377:14010];tablet_id=72075186224038725;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.658523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038307;self_id=[1:7439790593306005452:14024];tablet_id=72075186224038307;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.661596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038337;self_id=[1:7439790593306005356:14008];tablet_id=72075186224038337;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.663190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[1:7439790593306005461:14025];tablet_id=72075186224038084;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.667422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038342;self_id=[1:7439790593306005812:14094];tablet_id=72075186224038342;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.667903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038316;self_id=[1:7439790593306005500:14034];tablet_id=72075186224038316;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.674008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038306;self_id=[1:7439790593306005538:14035];tablet_id=72075186224038306;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.674178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038665;self_id=[1:7439790593306005590:14045];tablet_id=72075186224038665;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.679254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038907;self_id=[1:7439790593306005727:14078];tablet_id=72075186224038907;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.680062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038150;self_id=[1:7439790593306005635:14055];tablet_id=72075186224038150;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.684152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038375;self_id=[1:7439790593306005668:14063];tablet_id=72075186224038375;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.684949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7439790593306005629:14054];tablet_id=72075186224038012;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.688898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038211;self_id=[1:7439790593306005780:14085];tablet_id=72075186224038211;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.689376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;self_id=[1:7439790593306005584:14044];tablet_id=72075186224038698;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.692899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038170;self_id=[1:7439790593306006166:14183];tablet_id=72075186224038170;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.693922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038293;self_id=[1:7439790593306005931:14124];tablet_id=72075186224038293;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.695748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038691;self_id=[1:7439790593306006051:14155];tablet_id=72075186224038691;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.698677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038588;self_id=[1:7439790593306005892:14114];tablet_id=72075186224038588;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.699917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038407;self_id=[1:7439790593306005697:14069];tablet_id=72075186224038407;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.703350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038305;self_id=[1:7439790593306005938:14125];tablet_id=72075186224038305;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.703632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038226;self_id=[1:7439790593306005855:14105];tablet_id=72075186224038226;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.706508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038098;self_id=[1:7439790593306005898:14115];tablet_id=72075186224038098;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.708389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038840;self_id=[1:7439790593306006089:14164];tablet_id=72075186224038840;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.710304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038567;self_id=[1:7439790593306005851:14104];tablet_id=72075186224038567;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.713271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7439790593306005970:14134];tablet_id=72075186224038031;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.714980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038313;self_id=[1:7439790593306006225:14198];tablet_id=72075186224038313;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.718139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038424;self_id=[1:7439790593306005760:14083];tablet_id=72075186224038424;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.718909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7439790593306006124:14173];tablet_id=72075186224038010;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.723006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038438;self_id=[1:7439790593306006009:14144];tablet_id=72075186224038438;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.723155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038510;self_id=[1:7439790593306005728:14079];tablet_id=72075186224038510;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.728128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038829;self_id=[1:7439790593306006090:14165];tablet_id=72075186224038829;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.728154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038320;self_id=[1:7439790593306006145:14178];tablet_id=72075186224038320;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.733301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038669;self_id=[1:7439790593306006206:14193];tablet_id=72075186224038669;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.733367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038473;self_id=[1:7439790593306006012:14145];tablet_id=72075186224038473;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.737993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038459;self_id=[1:7439790593306006185:14188];tablet_id=72075186224038459;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.738812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038341;self_id=[1:7439790593306006049:14154];tablet_id=72075186224038341;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.743066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038705;self_id=[1:7439790593306005975:14135];tablet_id=72075186224038705;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.747839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038668;self_id=[1:7439790593306006289:14214];tablet_id=72075186224038668;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.747847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038246;self_id=[1:7439790593306005814:14095];tablet_id=72075186224038246;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.752020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038332;self_id=[1:7439790593306006245:14203];tablet_id=72075186224038332;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:12.753043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038882;self_id=[1:7439790593306006266:14208];tablet_id=72075186224038882;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; [[53u]] 2024-11-21T17:47:12.884441Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232000, txId: 18446744073709551615] shutting down strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:552, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[53u]]|[[100u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x127C3768 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:552: CompareYson @ 0x2415B5B9 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x1246EC34 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x124645B7 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:564: Execute_ @ 0x124645B7 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x1246A9F6 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x127C571D 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x1246A3B9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x127C5E92 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x127D90AC 10. ??:0: ?? @ 0x7FDFBBF29D8F 11. ??:0: ?? @ 0x7FDFBBF29E3F 12. ??:0: ?? @ 0x1179D028 |77.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> VDiskRestart::Simple [GOOD] >> BasicUsage::ConnectToYDB >> TxUsage::WriteToTopic_Two_WriteSession >> TxUsage::WriteToTopic_Demo_21_RestartNo >> Trace::SkipSpaces [GOOD] >> Trace::NextToken [GOOD] >> Trace::TTraceEvent [GOOD] >> Trace::TExpectedTraceEvent [GOOD] >> Trace::TExpectedTrace [GOOD] >> TSettingsValidation::ValidateSettingsFailOnStart >> IncrementalRestoreScan::ChangeSenderEmpty >> TxUsage::WriteToTopic_Demo_2 >> TxUsage::WriteToTopic_Demo_3 >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll >> TSolomonReboots::AdoptDropSolomonWithReboots |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] >> IncrementalRestoreScan::Empty [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2024-11-21T17:46:56.034816Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1732211216034810 2024-11-21T17:46:56.133158Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790525015973984:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:56.133221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:56.141288Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790525843822475:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:56.141548Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000d6c/r3tmp/tmpD1pHd9/pdisk_1.dat 2024-11-21T17:46:56.169134Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:56.171074Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:56.204171Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25688, node 1 2024-11-21T17:46:56.221891Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000d6c/r3tmp/yandexDFtJVD.tmp 2024-11-21T17:46:56.221908Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000d6c/r3tmp/yandexDFtJVD.tmp 2024-11-21T17:46:56.221976Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000d6c/r3tmp/yandexDFtJVD.tmp 2024-11-21T17:46:56.222018Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:56.226547Z INFO: TTestServer started on Port 3406 GrpcPort 25688 2024-11-21T17:46:56.232391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:56.232420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:56.234194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3406 PQClient connected to localhost:25688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:56.270021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:56.270046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:56.270515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:56.271554Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:56.271819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T17:46:56.448346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790525843822766:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.448389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790525843822776:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.448398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.448766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790525015974744:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.448792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790525015974755:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.448802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.449373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790525015974787:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.449390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.449612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:56.454189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790525015974758:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T17:46:56.454190Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790525843822780:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T17:46:56.479323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:56.517465Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790525015974929:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:56.517574Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjI2NDA1YjMtZjMyNjljODUtMmQyOWQxZTYtMWYzM2YzODU=, ActorId: [1:7439790525015974741:2299], ActorState: ExecuteState, TraceId: 01jd7xb4207pq6dnwh8bkvqd4q, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:56.518103Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:56.543242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:56.548955Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790525843822882:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:56.549245Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTM0YjZmY2UtYmE4N2JlZjUtNThiODk5ODUtNjhjOTQ2MjQ=, ActorId: [2:7439790525843822764:2277], ActorState: ExecuteState, TraceId: 01jd7xb41y9g84ms2vgbnf319n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:56.549366Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:56.607982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:25688", true, true, 1000); 2024-11-21T17:46:56.646390Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd7xb47k3ta4wfd9k1yf3e4d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTk1ZjhjNzktODU4MjEyOTMtZDYzNjU1NTUtZGIzY2JmMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790525015975240:2946] 2024-11-21T17:47:01.133085Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790525015973984:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:01.133125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;e ... quest 2024-11-21T17:47:18.152718Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:63795 2024-11-21T17:47:18.153109Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T17:47:18.153247Z node 5 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:47:18.153263Z node 5 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T17:47:18.153367Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T17:47:18.153402Z node 5 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:37274 2024-11-21T17:47:18.153408Z node 5 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:37274 proto=v1 topic=test-topic durationSec=0 2024-11-21T17:47:18.153411Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:47:18.153812Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T17:47:18.153858Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:47:18.153865Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:47:18.153867Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:47:18.153872Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790618739548840:2483] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:47:18.154370Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790618739548840:2483] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:47:18.168408Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790618739548840:2483] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T17:47:18.168619Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439790618739548870:2483] connected; active server actors: 1 2024-11-21T17:47:18.169121Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439790618739548870:2483] disconnected; active server actors: 1 2024-11-21T17:47:18.169128Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439790618739548870:2483] disconnected no session 2024-11-21T17:47:18.168942Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790618739548840:2483] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T17:47:18.168953Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790618739548840:2483] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T17:47:18.185922Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790618739548840:2483] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:47:18.185947Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790618739548840:2483] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T17:47:18.185951Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439790618739548840:2483] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T17:47:18.185960Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:47:18.186091Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 5, Generation: 1 2024-11-21T17:47:18.186108Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:18.186115Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7439790618739548885:2483], now have 1 active actors on pipe 2024-11-21T17:47:18.186122Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:47:18.186128Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:47:18.186162Z node 5 :PERSQUEUE INFO: new Cookie src|565ccee1-de9c689a-767665e1-a2682065_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T17:47:18.186200Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:47:18.186229Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:47:18.186267Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:47:18.186279Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:47:18.186305Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:47:18.186336Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|565ccee1-de9c689a-767665e1-a2682065_0 2024-11-21T17:47:18.186691Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211238186 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:18.186722Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|565ccee1-de9c689a-767665e1-a2682065_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T17:47:18.186827Z :INFO: [] MessageGroupId [src] SessionId [src|565ccee1-de9c689a-767665e1-a2682065_0] Write session: close. Timeout = 0 ms 2024-11-21T17:47:18.186832Z :INFO: [] MessageGroupId [src] SessionId [src|565ccee1-de9c689a-767665e1-a2682065_0] Write session will now close 2024-11-21T17:47:18.186837Z :DEBUG: [] MessageGroupId [src] SessionId [src|565ccee1-de9c689a-767665e1-a2682065_0] Write session: aborting 2024-11-21T17:47:18.186945Z :INFO: [] MessageGroupId [src] SessionId [src|565ccee1-de9c689a-767665e1-a2682065_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:47:18.186949Z :DEBUG: [] MessageGroupId [src] SessionId [src|565ccee1-de9c689a-767665e1-a2682065_0] Write session: destroy 2024-11-21T17:47:18.187077Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|565ccee1-de9c689a-767665e1-a2682065_0 grpc read done: success: 0 data: 2024-11-21T17:47:18.187088Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|565ccee1-de9c689a-767665e1-a2682065_0 grpc read failed 2024-11-21T17:47:18.187095Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|565ccee1-de9c689a-767665e1-a2682065_0 grpc closed 2024-11-21T17:47:18.187099Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|565ccee1-de9c689a-767665e1-a2682065_0 is DEAD 2024-11-21T17:47:18.187357Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:47:18.187405Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:18.187422Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439790618739548885:2483] destroyed 2024-11-21T17:47:18.187436Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:47:18.189030Z :INFO: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] Starting read session 2024-11-21T17:47:18.189040Z :DEBUG: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] Starting session to cluster null (localhost:63795) 2024-11-21T17:47:18.189252Z :DEBUG: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:18.189255Z :DEBUG: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:18.189257Z :DEBUG: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] [null] Reconnecting session to cluster null in 0.000000s 2024-11-21T17:47:18.189299Z :ERROR: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2024-11-21T17:47:18.189303Z :DEBUG: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:18.189305Z :DEBUG: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:18.189313Z :INFO: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2024-11-21T17:47:18.189343Z :NOTICE: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:47:18.189346Z :DEBUG: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2024-11-21T17:47:18.189352Z :INFO: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:18.189356Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:47:18.189359Z :INFO: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:18.189363Z :NOTICE: [/Root] [/Root] [f3eb573a-90fd5b5-dadd87ba-f7c377dc] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: 2024-11-21T17:47:18.430443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:47:18.430986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:47:18.431012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001f3e/r3tmp/tmpeFqsgD/pdisk_1.dat 2024-11-21T17:47:18.533039Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][[OwnerId: 1, LocalPathId: 2]][[OwnerId: 3, LocalPathId: 4]][1:562:2488] Exhausted 2024-11-21T17:47:18.533067Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][[OwnerId: 1, LocalPathId: 2]][[OwnerId: 3, LocalPathId: 4]][1:562:2488] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2024-11-21T17:47:18.533072Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][[OwnerId: 1, LocalPathId: 2]][[OwnerId: 3, LocalPathId: 4]][1:562:2488] Finish 0 >> IntermediateDirsReboots::CreateSubDomainWithIntermediateDirs |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDrop >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: 2024-11-21T17:47:18.438124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:47:18.438561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:47:18.438585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001f49/r3tmp/tmpiYokT2/pdisk_1.dat 2024-11-21T17:47:18.556808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2024-11-21T17:47:18.556916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.556996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:47:18.557040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:18.557060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.557357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:18.557379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:18.557449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.557469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:18.557475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:18.557480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:18.557550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.557556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:18.557560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:18.557612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.557617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.557623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046644480 2024-11-21T17:47:18.557630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:18.558236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:18.558358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:18.558404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:47:18.558666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:18.558675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T17:47:18.558680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:18.572828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:47:18.572857Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:18.615166Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:47:18.615507Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:47:18.615583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:18.615602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:18.626231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:18.729045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:18.729120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:47:18.729134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T17:47:18.729200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:18.729209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T17:47:18.729253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:47:18.729266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:47:18.729458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:18.729463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:47:18.729501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:18.729504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2024-11-21T17:47:18.729555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.729561Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2024-11-21T17:47:18.729569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:18.729572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:18.729577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:18.729580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:18.729583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:18.729586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:18.729592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:47:18.729597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:47:18.729600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T17:47:18.729968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T17:47:18.729981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T17:47:18.729985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2024-11-21T17:47:18.729988Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:47:18.729992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:47:18.730004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T17:47:18.730007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2024-11-21T17:47:18.730132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2024-11-21T17:47:18.730243Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:47:18.730252Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:47:18.730288Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T17:47:18.731476Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:47:18.731662Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:47:18.731685Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:47:18.731777Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:47:18.731826Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResoluti ... 024-11-21T17:47:19.197954Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037889 state Ready 2024-11-21T17:47:19.197958Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T17:47:19.197973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:19.197976Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:47:19.197982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:47:19.197984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2024-11-21T17:47:19.197988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: true 2024-11-21T17:47:19.197993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2485] message: TxId: 281474976715658 2024-11-21T17:47:19.197997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2024-11-21T17:47:19.198000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:47:19.198003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715658:0 2024-11-21T17:47:19.198015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2024-11-21T17:47:19.198092Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvNavigate describe path /Root/IncrBackupTable 2024-11-21T17:47:19.198102Z node 1 :TX_PROXY DEBUG: Actor# [1:779:2640] HANDLE EvNavigateScheme /Root/IncrBackupTable 2024-11-21T17:47:19.198335Z node 1 :TX_PROXY DEBUG: Actor# [1:779:2640] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T17:47:19.198355Z node 1 :TX_PROXY DEBUG: Actor# [1:779:2640] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2024-11-21T17:47:19.198547Z node 1 :TX_PROXY DEBUG: Actor# [1:779:2640] Handle TEvDescribeSchemeResult Forward to# [1:558:2485] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T17:47:19.198664Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:789:2644], serverId# [1:790:2645], sessionId# [0:0:0] 2024-11-21T17:47:19.198746Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:47:19.198768Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:47:19.198794Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T17:47:19.198815Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] BodySize: 18 }] } 2024-11-21T17:47:19.198827Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T17:47:19.198845Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvGetProxyServicesRequest 2024-11-21T17:47:19.198851Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:795:2646] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T17:47:19.198887Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:796:2650], serverId# [1:797:2651], sessionId# [0:0:0] 2024-11-21T17:47:19.240054Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:795:2646] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T17:47:19.240093Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T17:47:19.240120Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:795:2646] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T17:47:19.240130Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T17:47:19.240158Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: 2024-11-21T17:47:18.578736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:47:18.579124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:47:18.579142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001f34/r3tmp/tmpQlWzSW/pdisk_1.dat 2024-11-21T17:47:18.694379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2024-11-21T17:47:18.694474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.694543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:47:18.694584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:18.694607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.694870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:18.694893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:18.694963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.694982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:18.694987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:18.694992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:18.695085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.695092Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:18.695096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:18.695157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.695163Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.695169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046644480 2024-11-21T17:47:18.695175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:18.695750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:18.695855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:18.695899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:47:18.696145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:18.696155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T17:47:18.696160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:18.710307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:47:18.710346Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:18.752697Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:47:18.752959Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:47:18.753005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:18.753022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:18.763515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:18.866089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:18.866157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:47:18.866184Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T17:47:18.866237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:18.866244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T17:47:18.866293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:47:18.866305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:47:18.866487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:18.866494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:47:18.866538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:18.866543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2024-11-21T17:47:18.866620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.866627Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2024-11-21T17:47:18.866638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:18.866642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:18.866648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:18.866653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:18.866658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:18.866662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:18.866671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:47:18.866677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:47:18.866681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T17:47:18.867035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T17:47:18.867047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T17:47:18.867051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2024-11-21T17:47:18.867055Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:47:18.867060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:47:18.867074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T17:47:18.867079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2024-11-21T17:47:18.867210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2024-11-21T17:47:18.867307Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:47:18.867315Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:47:18.867354Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T17:47:18.868470Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:47:18.868636Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:47:18.868655Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:47:18.868710Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:47:18.868753Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResoluti ... .TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2024-11-21T17:47:19.344657Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvNavigate describe path /Root/IncrBackupTable 2024-11-21T17:47:19.344666Z node 1 :TX_PROXY DEBUG: Actor# [1:791:2646] HANDLE EvNavigateScheme /Root/IncrBackupTable 2024-11-21T17:47:19.344724Z node 1 :TX_PROXY DEBUG: Actor# [1:791:2646] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T17:47:19.344731Z node 1 :TX_PROXY DEBUG: Actor# [1:791:2646] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2024-11-21T17:47:19.344845Z node 1 :TX_PROXY DEBUG: Actor# [1:791:2646] Handle TEvDescribeSchemeResult Forward to# [1:558:2485] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T17:47:19.344928Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:793:2648] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:47:19.344959Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:793:2648] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:47:19.344995Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:793:2648] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T17:47:19.345014Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:793:2648] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData >> TConsistentOpsWithReboots::CreateIndexedTableWithReboots |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::Fake [GOOD] |77.5%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |77.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::Fake [GOOD] >> TSolomonReboots::CreateDropSolomonWithReboots >> TConsistentOpsWithReboots::DropIndexedTableWithReboots >> KqpOlap::ManyColumnShardsWithRestarts [GOOD] >> IntermediateDirsReboots::CreateTableWithIntermediateDirs >> IntermediateDirsReboots::CreateKesusWithIntermediateDirs >> IntermediateDirsReboots::CreateWithIntermediateDirsForceDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::ManyColumnShardsWithRestarts [GOOD] Test command err: 2024-11-21T17:46:18.792865Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:46:18.795089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:46:18.795175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:46:18.795214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:18.795487Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:46:18.795497Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e11/r3tmp/tmp0ovhbS/pdisk_1.dat 2024-11-21T17:46:18.879921Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:18.961076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:46:19.047544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:19.047571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:19.048460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:19.048476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:19.059977Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:19.060111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:19.060199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:19.403757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:19.492241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:19.492293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:19.492328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:19.492343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:19.492361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:19.492381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:19.492400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:19.492420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:19.492444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:19.492465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:19.492485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:19.492504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:1465:2928];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:19.502313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:19.502339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:19.502354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:19.502361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:19.502380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:19.502388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:19.502403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:19.502413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:19.502424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:19.502431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:19.502439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:19.502446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:19.502504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:19.502512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:19.502530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:19.502537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:19.502550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:19.502556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:19.502576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:19.502583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:19.502596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:19.502602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:46:19.514957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1488:2935];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:19.514989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1488:2935];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:19.515025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1488:2935];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:19.515044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1488:2935];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:19.515063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:1488:2935];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGra ... 6224037937;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.472091Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:21744:9160];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.477788Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.480006Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:21744:9160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.658673Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;self_id=[2:23243:9526];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.662678Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[2:23273:9534];tablet_id=72075186224037941;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.667137Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;self_id=[2:23285:9540];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.670013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[2:23328:9550];tablet_id=72075186224037986;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.672600Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.673258Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.674716Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.677457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[2:23332:9553];tablet_id=72075186224037984;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.680489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[2:23393:9562];tablet_id=72075186224037982;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.683868Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.686202Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.686847Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.687390Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;self_id=[2:23243:9526];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037943;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.687428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[2:23273:9534];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037941;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.687450Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;self_id=[2:23285:9540];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037939;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.688145Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[2:23328:9550];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037986;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.689799Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[2:23332:9553];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037984;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.689835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[2:23393:9562];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037982;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.693810Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[2:23406:9565];tablet_id=72075186224037980;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.702104Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;self_id=[2:23416:9573];tablet_id=72075186224037976;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.706774Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[2:23422:9578];tablet_id=72075186224037968;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.710636Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037960;self_id=[2:23436:9581];tablet_id=72075186224037960;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.713254Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.715363Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.718783Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;self_id=[2:23441:9584];tablet_id=72075186224037962;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.721130Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037960;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.723987Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.725445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;self_id=[2:23416:9573];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037976;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.725489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[2:23422:9578];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037968;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.725517Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037960;self_id=[2:23436:9581];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037960;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.726482Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;self_id=[2:23441:9584];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037962;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.731015Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;self_id=[2:23445:9586];tablet_id=72075186224037964;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.735536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.737530Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;self_id=[2:23445:9586];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037964;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.741607Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[2:23447:9588];tablet_id=72075186224037966;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.744571Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.746104Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[2:23447:9588];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037966;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.756813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[2:23460:9589];tablet_id=72075186224037970;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.764613Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[2:23468:9595];tablet_id=72075186224037972;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.768990Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[2:23471:9598];tablet_id=72075186224037974;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.774248Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:23478:9604];tablet_id=72075186224037978;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:13.776176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.778528Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.781578Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.782538Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[2:23468:9595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037972;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.783221Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[2:23471:9598];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037974;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:13.784011Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:23478:9604];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037978;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:47:20.389974Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xbgkx6kbmymtbk278f1hq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM2ZTk1MzEtODRjY2Q2ZGItODJjM2JhMWYtMTVmNDcwNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"type_id":4}}],"rows":[{"items":[{"uint64_value":200000}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2024-11-21T17:47:20.394013Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jd7xbgkx6kbmymtbk278f1hq", SessionId: ydb://session/3?node_id=1&id=ZmM2ZTk1MzEtODRjY2Q2ZGItODJjM2JhMWYtMTVmNDcwNDk=, Slow query, duration: 11.084172s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT COUNT(*) FROM `/Root/largeOlapStore/largeOlapTable`;", parameters: 0b 2024-11-21T17:47:20.395185Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 5500, txId: 18446744073709551615] shutting down >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> TConsistentOpsWithReboots::CreateNotNullColumnTableWithReboots >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDropSimultaneously >> TConsistentOpsWithReboots::DropIndexedTableAndForceDropSimultaneously ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2024-11-21T17:46:55.928012Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.928021Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.928024Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.928165Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.930431Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.930478Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.930555Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.930675Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.930714Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:55.930756Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.930766Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:46:55.930950Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.930953Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.930957Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.931033Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.931235Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.931273Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.931304Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.931354Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.931377Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:55.931403Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.931408Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:46:55.943492Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.943499Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.943502Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.943574Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.943726Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.943778Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.944180Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.944512Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.944586Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:55.944636Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.944653Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:46:55.944921Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.944925Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.944927Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.944993Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.945113Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.945150Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.945196Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.945907Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.946062Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:55.946089Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.946098Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:46:55.946333Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.946337Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.946339Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.946400Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.946519Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.946546Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.946579Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.946646Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.946679Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:55.946713Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.946726Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T17:46:55.946894Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.946899Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.946901Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.946948Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.947127Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.947186Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.947235Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.947307Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.947336Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:55.947361Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.947366Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T17:46:55.947552Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.947555Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.947558Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.947616Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.947739Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.947763Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.947792Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.947949Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.947982Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:55.948006Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.948011Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T17:46:55.948198Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.948201Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.948203Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:55.948265Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:46:55.948351Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:46:55.948385Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.948419Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:46:55.948945Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:55.948987Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:46:55.948997Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:46:55.949002Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:46:55.975711Z :ReadSession INFO: Random seed for debugging is 1732211215975703 2024-11-21T17:46:56.071104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790524910012055:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:56.071376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:56.073545Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790523061838757:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:56.073680Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existen ... r shared/user session shared/user_1_1_3202117689184880317_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 3 } } } 2024-11-21T17:47:08.962100Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3202117689184880317_v1 commit request from client for 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T17:47:08.962107Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3202117689184880317_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T17:47:08.962113Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3202117689184880317_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2024-11-21T17:47:08.962206Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:47:08.962219Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:47:08.962253Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_3202117689184880317_v1 2024-11-21T17:47:08.962287Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:47:08.963135Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:47:08.963159Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:47:08.963164Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2024-11-21T17:47:08.963244Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3202117689184880317_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2024-11-21T17:47:08.963261Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3202117689184880317_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2024-11-21T17:47:08.963270Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3202117689184880317_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2024-11-21T17:47:08.963402Z :DEBUG: [/Root] [/Root] [a916acc5-e0d42f86-ea2c5e93-9c281a3e] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2024-11-21T17:47:09.058783Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a49a0de8-789ea97d-9d23ea59-12c5e8c1_0] Write session will now close 2024-11-21T17:47:09.058808Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a49a0de8-789ea97d-9d23ea59-12c5e8c1_0] Write session: aborting 2024-11-21T17:47:09.059045Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a49a0de8-789ea97d-9d23ea59-12c5e8c1_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:47:09.059054Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|a49a0de8-789ea97d-9d23ea59-12c5e8c1_0] Write session: destroy 2024-11-21T17:47:09.059278Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|a49a0de8-789ea97d-9d23ea59-12c5e8c1_0 grpc read done: success: 0 data: 2024-11-21T17:47:09.059301Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|a49a0de8-789ea97d-9d23ea59-12c5e8c1_0 grpc read failed 2024-11-21T17:47:09.059308Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|a49a0de8-789ea97d-9d23ea59-12c5e8c1_0 grpc closed 2024-11-21T17:47:09.059320Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|a49a0de8-789ea97d-9d23ea59-12c5e8c1_0 is DEAD 2024-11-21T17:47:09.059592Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:47:09.059771Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:09.059802Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439790576449622382:2589] destroyed 2024-11-21T17:47:09.059814Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:47:11.136380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:47:11.136402Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:11.734410Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3202117689184880317_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2024-11-21T17:47:11.734475Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:47:16.734705Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:47:18.961288Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3202117689184880317_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2024-11-21T17:47:19.059321Z :INFO: [/Root] [/Root] [a916acc5-e0d42f86-ea2c5e93-9c281a3e] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:19.059351Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2024-11-21T17:47:19.059362Z :INFO: [/Root] [/Root] [a916acc5-e0d42f86-ea2c5e93-9c281a3e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16328 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:19.059423Z :NOTICE: [/Root] [/Root] [a916acc5-e0d42f86-ea2c5e93-9c281a3e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:47:19.059434Z :DEBUG: [/Root] [/Root] [a916acc5-e0d42f86-ea2c5e93-9c281a3e] [dc1] Abort session to cluster 2024-11-21T17:47:19.059697Z :NOTICE: [/Root] [/Root] [a916acc5-e0d42f86-ea2c5e93-9c281a3e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:47:19.060000Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_3202117689184880317_v1 grpc read done: success# 0, data# { } 2024-11-21T17:47:19.060016Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_3202117689184880317_v1 grpc read failed 2024-11-21T17:47:19.060023Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_3202117689184880317_v1 grpc closed 2024-11-21T17:47:19.060037Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_3202117689184880317_v1 is DEAD 2024-11-21T17:47:19.060303Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:19.060321Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_3202117689184880317_v1 2024-11-21T17:47:19.060337Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439790550679818069:2489] destroyed 2024-11-21T17:47:19.060373Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_3202117689184880317_v1 2024-11-21T17:47:19.060386Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439790550679818066:2486] disconnected; active server actors: 1 2024-11-21T17:47:19.060397Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439790550679818066:2486] client user disconnected session shared/user_1_1_3202117689184880317_v1 2024-11-21T17:47:19.197664Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790623694263310:2717] TxId: 281474976715724. Ctx: { TraceId: 01jd7xbt7y8nmkyxv3g40ejye8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWJjZTEwMDAtMmJlMjkyZGItODYxY2Y1NGEtNDZmZTEyZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2024-11-21T17:47:19.198006Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790623694263316:2724], TxId: 281474976715724, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xbt7y8nmkyxv3g40ejye8. SessionId : ydb://session/3?node_id=1&id=OWJjZTEwMDAtMmJlMjkyZGItODYxY2Y1NGEtNDZmZTEyZTg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439790623694263310:2717], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T17:47:19.198073Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439790623694263318:2725], TxId: 281474976715724, task: 4. Ctx: { TraceId : 01jd7xbt7y8nmkyxv3g40ejye8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OWJjZTEwMDAtMmJlMjkyZGItODYxY2Y1NGEtNDZmZTEyZTg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439790623694263310:2717], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T17:47:19.488616Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:19.488623Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:19.488627Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:47:19.488738Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:47:19.488876Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:47:19.488965Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:19.489037Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:47:19.489217Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:47:19.489252Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:47:19.489307Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T17:47:19.489325Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:47:19.489335Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:47:19.489342Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T17:47:19.489389Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:47:19.489402Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> IntermediateDirsReboots::CreateWithIntermediateDirs >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> IntermediateDirsReboots::CreateTableWithIntermediateDirsForceDrop |77.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |77.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |77.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers >> KqpOlapBlobsSharing::MultipleMergesWithRestartsAfterWait [FAIL] >> TxUsage::WriteToTopic_Two_WriteSession [GOOD] >> TSettingsValidation::ValidateSettingsFailOnStart [GOOD] >> TxUsage::SessionAbort >> TxUsage::WriteToTopic_Demo_5 >> BasicUsage::ConnectToYDB [GOOD] >> BasicUsage::ReadWithoutConsumerWithRestarts >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] Leader for TabletID 9437184 is [1:130:2153] sender: [1:132:2057] recipient: [1:106:2138] 2024-11-21T17:46:57.950790Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:57.951882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:46:57.951897Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:57.953206Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:57.953342Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:46:57.953407Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:57.962214Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:57.964313Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:57.964445Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:57.964585Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:46:57.964593Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:46:57.964600Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:46:57.964643Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:57.967975Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:46:57.968054Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:57.968092Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:46:57.968097Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:46:57.968102Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:46:57.968107Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:57.968179Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:57.968193Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:57.968343Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:46:57.968366Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:46:57.968373Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:57.968380Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:57.968388Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:46:57.968393Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:46:57.968396Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:46:57.968402Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:46:57.968407Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 Leader for TabletID 9437184 is [1:130:2153] sender: [1:205:2057] recipient: [1:14:2061] 2024-11-21T17:46:57.980818Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:57.980834Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:57.980851Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:46:57.981252Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:46:57.981261Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:46:57.981286Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:46:57.981317Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:46:57.981325Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:46:57.981332Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:46:57.981340Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:57.981344Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:46:57.981348Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:46:57.981350Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:57.981421Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:46:57.981426Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:46:57.981428Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:46:57.981430Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:57.981443Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:46:57.981446Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:46:57.981449Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:46:57.981452Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:57.981457Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:46:58.006661Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:46:58.006694Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:58.006701Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:58.006713Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:46:58.006730Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:58.006870Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.006894Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:58.006902Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:46:58.006922Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2024-11-21T17:46:58.006927Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:46:58.006979Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:58.006987Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:46:58.006991Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:46:58.006996Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:46:58.007580Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:46:58.007597Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:58.007654Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.007658Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:58.007664Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:58.007671Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:58.007674Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:46:58.007681Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2024-11-21T17:46:58.007684Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2024-11-21T17:46:58.007689Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:46:58.007692Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:46:58.007695Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:46:58.007697Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:46:58.007733Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2024-11-21T17:46:58.007736Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:46:58.007738Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:46:58.007740Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:46:58.007742Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:46:58.007751Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:58.007753Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:46:58.007755Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:46:58.007758Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:46:58.007769Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2024-11-21T17:46:58.007771Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2024-11-21T17:46:58.007773Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2024-11-21T17:46:58.007778Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:46:58.007779Z node 1 :TX_DATASHARD TRACE: Adv ... 9437184 to execution unit ExecuteDataTx 2024-11-21T17:47:24.522740Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:24.522767Z node 41 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437184 with status COMPLETE 2024-11-21T17:47:24.522772Z node 41 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:47:24.522777Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2024-11-21T17:47:24.522780Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:47:24.522783Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit CompleteOperation 2024-11-21T17:47:24.522787Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit CompleteOperation 2024-11-21T17:47:24.522816Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is DelayComplete 2024-11-21T17:47:24.522820Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit CompleteOperation 2024-11-21T17:47:24.522823Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit CompletedOperations 2024-11-21T17:47:24.522826Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit CompletedOperations 2024-11-21T17:47:24.522830Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2024-11-21T17:47:24.522832Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit CompletedOperations 2024-11-21T17:47:24.522835Z node 41 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437184 has finished 2024-11-21T17:47:24.522839Z node 41 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:47:24.522842Z node 41 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:47:24.522844Z node 41 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:47:24.522847Z node 41 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:47:24.522869Z node 41 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [41:434:2384], Recipient [41:434:2384]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:47:24.522873Z node 41 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:47:24.522877Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2024-11-21T17:47:24.522880Z node 41 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:47:24.522885Z node 41 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2024-11-21T17:47:24.522888Z node 41 :TX_DATASHARD DEBUG: Found ready operation [7:6] in PlanQueue unit at 9437186 2024-11-21T17:47:24.522891Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit PlanQueue 2024-11-21T17:47:24.522896Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T17:47:24.522899Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit PlanQueue 2024-11-21T17:47:24.522902Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit LoadTxDetails 2024-11-21T17:47:24.522905Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit LoadTxDetails 2024-11-21T17:47:24.522966Z node 41 :TX_DATASHARD DEBUG: LoadTxDetails at 9437186 loaded tx from db 7:6 keys extracted: 1 2024-11-21T17:47:24.522970Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T17:47:24.522973Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit LoadTxDetails 2024-11-21T17:47:24.522977Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit FinalizeDataTxPlan 2024-11-21T17:47:24.522981Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit FinalizeDataTxPlan 2024-11-21T17:47:24.522984Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T17:47:24.522987Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit FinalizeDataTxPlan 2024-11-21T17:47:24.522990Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit BuildAndWaitDependencies 2024-11-21T17:47:24.522993Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit BuildAndWaitDependencies 2024-11-21T17:47:24.522999Z node 41 :TX_DATASHARD TRACE: Operation [7:6] is the new logically complete end at 9437186 2024-11-21T17:47:24.523002Z node 41 :TX_DATASHARD TRACE: Operation [7:6] is the new logically incomplete end at 9437186 2024-11-21T17:47:24.523005Z node 41 :TX_DATASHARD TRACE: Activated operation [7:6] at 9437186 2024-11-21T17:47:24.523009Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T17:47:24.523012Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit BuildAndWaitDependencies 2024-11-21T17:47:24.523015Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit BuildDataTxOutRS 2024-11-21T17:47:24.523018Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit BuildDataTxOutRS 2024-11-21T17:47:24.523023Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T17:47:24.523026Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit BuildDataTxOutRS 2024-11-21T17:47:24.523029Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit StoreAndSendOutRS 2024-11-21T17:47:24.523032Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit StoreAndSendOutRS 2024-11-21T17:47:24.523035Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T17:47:24.523038Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit StoreAndSendOutRS 2024-11-21T17:47:24.523041Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit PrepareDataTxInRS 2024-11-21T17:47:24.523045Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit PrepareDataTxInRS 2024-11-21T17:47:24.523048Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T17:47:24.523051Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit PrepareDataTxInRS 2024-11-21T17:47:24.523054Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit LoadAndWaitInRS 2024-11-21T17:47:24.523059Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit LoadAndWaitInRS 2024-11-21T17:47:24.523062Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T17:47:24.523065Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit LoadAndWaitInRS 2024-11-21T17:47:24.523068Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit ExecuteDataTx 2024-11-21T17:47:24.523072Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit ExecuteDataTx 2024-11-21T17:47:24.523102Z node 41 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437186 with status COMPLETE 2024-11-21T17:47:24.523107Z node 41 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437186: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:47:24.523112Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T17:47:24.523115Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit ExecuteDataTx 2024-11-21T17:47:24.523118Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompleteOperation 2024-11-21T17:47:24.523121Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompleteOperation 2024-11-21T17:47:24.523151Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is DelayComplete 2024-11-21T17:47:24.523154Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompleteOperation 2024-11-21T17:47:24.523157Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompletedOperations 2024-11-21T17:47:24.523161Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompletedOperations 2024-11-21T17:47:24.523164Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2024-11-21T17:47:24.523167Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompletedOperations 2024-11-21T17:47:24.523170Z node 41 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437186 has finished 2024-11-21T17:47:24.523174Z node 41 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:47:24.523176Z node 41 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2024-11-21T17:47:24.523179Z node 41 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2024-11-21T17:47:24.523182Z node 41 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2024-11-21T17:47:24.534564Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2024-11-21T17:47:24.534590Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2024-11-21T17:47:24.534605Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:47:24.534614Z node 41 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2024-11-21T17:47:24.534638Z node 41 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [41:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:47:24.534649Z node 41 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:47:24.534740Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2024-11-21T17:47:24.534744Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2024-11-21T17:47:24.534751Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:47:24.534756Z node 41 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2024-11-21T17:47:24.534766Z node 41 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [41:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:47:24.534771Z node 41 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:47:24.534843Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2024-11-21T17:47:24.534847Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2024-11-21T17:47:24.534854Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T17:47:24.534859Z node 41 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2024-11-21T17:47:24.534867Z node 41 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [41:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:47:24.534872Z node 41 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 |77.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |77.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleMergesWithRestartsAfterWait [FAIL] Test command err: Trying to start YDB, gRPC: 28450, MsgBus: 7187 2024-11-21T17:46:30.339130Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790410852061636:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:30.339147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000db2/r3tmp/tmpg8quZf/pdisk_1.dat 2024-11-21T17:46:30.394887Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28450, node 1 2024-11-21T17:46:30.406570Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:30.406587Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:30.406589Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:30.406623Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7187 2024-11-21T17:46:30.439486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:30.439515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:7187 2024-11-21T17:46:30.440626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:30.459592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:30.461956Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:46:30.579946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:31.349163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790415147031404:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:31.349198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790415147031404:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:31.349221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790415147031404:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:31.349238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790415147031404:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:31.349257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790415147031404:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:31.349272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790415147031404:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:31.349294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790415147031404:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:31.349313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790415147031404:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:31.349327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790415147031404:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:31.349345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790415147031404:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:31.349361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790415147031404:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:31.349393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790415147031404:2311];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:31.349823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439790415147031377:2303];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:31.349861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439790415147031377:2303];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:31.349900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439790415147031377:2303];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:31.349922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439790415147031377:2303];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:31.349943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439790415147031377:2303];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:31.349965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439790415147031377:2303];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:31.349985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439790415147031377:2303];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:31.350006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439790415147031377:2303];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:31.350027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439790415147031377:2303];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:31.350046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439790415147031377:2303];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:31.350071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439790415147031377:2303];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:31.350094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038909;self_id=[1:7439790415147031377:2303];tablet_id=72075186224038909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:31.351459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439790415147031370:2296];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:31.351474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439790415147031370:2296];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:31.351491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439790415147031370:2296];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:31.351507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439790415147031370:2296];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:31.351523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439790415147031370:2296];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:31.351536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439790415147031370:2296];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:31.351549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439790415147031370:2296];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:31.351564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7439790415147031370:2296];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... =normalization_start;last_saved_id=16; 2024-11-21T17:47:21.445162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038359;self_id=[1:7439790629895538865:12289];tablet_id=72075186224038359;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.445324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038590;self_id=[1:7439790629895538959:12817];tablet_id=72075186224038590;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.447743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038234;self_id=[1:7439790629895539062:8689];tablet_id=72075186224038234;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.447807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038113;self_id=[1:7439790629895539162:12999];tablet_id=72075186224038113;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.450265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038156;self_id=[1:7439790629895539106:11117];tablet_id=72075186224038156;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.451376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038584;self_id=[1:7439790629895539124:11116];tablet_id=72075186224038584;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.453064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038418;self_id=[1:7439790629895539058:8690];tablet_id=72075186224038418;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.455271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038676;self_id=[1:7439790629895539196:11153];tablet_id=72075186224038676;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.457035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[1:7439790629895539269:8954];tablet_id=72075186224038051;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.457577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038757;self_id=[1:7439790629895539290:11215];tablet_id=72075186224038757;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.459813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038203;self_id=[1:7439790629895539234:11151];tablet_id=72075186224038203;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.461109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038535;self_id=[1:7439790629895539371:12023];tablet_id=72075186224038535;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.462109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038596;self_id=[1:7439790629895539528:8877];tablet_id=72075186224038596;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.464416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038784;self_id=[1:7439790629895539481:12746];tablet_id=72075186224038784;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.464557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038209;self_id=[1:7439790629895539390:8878];tablet_id=72075186224038209;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.467286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038271;self_id=[1:7439790629895539315:12177];tablet_id=72075186224038271;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.468522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038850;self_id=[1:7439790629895539416:11080];tablet_id=72075186224038850;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.469991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[1:7439790629895539420:13095];tablet_id=72075186224038038;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.472197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038780;self_id=[1:7439790629895539860:11498];tablet_id=72075186224038780;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.472342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038769;self_id=[1:7439790629895539672:8765];tablet_id=72075186224038769;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.474666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038293;self_id=[1:7439790629895539627:12161];tablet_id=72075186224038293;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.475693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;self_id=[1:7439790629895539631:11219];tablet_id=72075186224038086;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.476926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038297;self_id=[1:7439790629895539904:12156];tablet_id=72075186224038297;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.477845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038382;self_id=[1:7439790629895539454:13084];tablet_id=72075186224038382;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.479250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038199;self_id=[1:7439790629895539338:11037];tablet_id=72075186224038199;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.479942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7439790629895539892:12158];tablet_id=72075186224038053;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.481912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038258;self_id=[1:7439790629895539598:8876];tablet_id=72075186224038258;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.482128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;self_id=[1:7439790629895539786:12914];tablet_id=72075186224038722;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.484438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038795;self_id=[1:7439790629895539934:8669];tablet_id=72075186224038795;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.485091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038674;self_id=[1:7439790629895539484:12743];tablet_id=72075186224038674;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.486800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038195;self_id=[1:7439790629895539563:12948];tablet_id=72075186224038195;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.488696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7439790629895539727:9012];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.489089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038648;self_id=[1:7439790629895539806:11210];tablet_id=72075186224038648;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.491352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038312;self_id=[1:7439790629895539670:8766];tablet_id=72075186224038312;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.492318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038528;self_id=[1:7439790629895539525:12241];tablet_id=72075186224038528;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.493704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038885;self_id=[1:7439790629895539583:8898];tablet_id=72075186224038885;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.496046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038345;self_id=[1:7439790629895539797:12913];tablet_id=72075186224038345;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.499712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038174;self_id=[1:7439790629895539746:9386];tablet_id=72075186224038174;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.503615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038600;self_id=[1:7439790629895539803:11211];tablet_id=72075186224038600;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.507535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038745;self_id=[1:7439790629895539855:11113];tablet_id=72075186224038745;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.510128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038910;self_id=[1:7439790629895539707:10777];tablet_id=72075186224038910;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.512336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038487;self_id=[1:7439790629895539971:12305];tablet_id=72075186224038487;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.515322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038848;self_id=[1:7439790629895539945:11077];tablet_id=72075186224038848;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2024-11-21T17:47:21.602148Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211241000, txId: 18446744073709551615] shutting down [[0u]] strings (ReformatYson(expected)) and (ReformatYson(actual)) are different at ydb/core/kqp/ut/common/kqp_ut_common.cpp:552, void NKikimr::NKqp::CompareYson(const TString &, const TString &): ([[0u]]|[[10000u]]) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x127C3768 1. /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:552: CompareYson @ 0x2415B5B9 2. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:309: CheckCount @ 0x1246EC34 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:434: CheckCount @ 0x12466C95 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:663: Execute_ @ 0x12466C95 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x1246A9F6 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x127C571D 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x1246A3B9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x127C5E92 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x127D90AC 10. ??:0: ?? @ 0x7F2DFB021D8F 11. ??:0: ?? @ 0x7F2DFB021E3F 12. ??:0: ?? @ 0x1179D028 >> TxUsage::WriteToTopic_Demo_21_RestartNo [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2024-11-21T17:46:54.753757Z :ReadSession INFO: Random seed for debugging is 1732211214753747 2024-11-21T17:46:54.854325Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790516540873840:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:54.854588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:54.864357Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790517353936485:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:54.886737Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e3f/r3tmp/tmphMa6Wz/pdisk_1.dat 2024-11-21T17:46:54.893794Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:54.895341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:54.924760Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25559, node 1 2024-11-21T17:46:54.954764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:54.954793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:54.961923Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000e3f/r3tmp/yandex8X1o1N.tmp 2024-11-21T17:46:54.961937Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000e3f/r3tmp/yandex8X1o1N.tmp 2024-11-21T17:46:54.961993Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000e3f/r3tmp/yandex8X1o1N.tmp 2024-11-21T17:46:54.962010Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:54.962612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:54.966743Z INFO: TTestServer started on Port 3101 GrpcPort 25559 TClient is connected to server localhost:3101 PQClient connected to localhost:25559 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:54.993492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:54.993546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:54.994926Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:54.995727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:55.001833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:46:55.022740Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720659, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:55.229725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790521648903924:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:55.229725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790521648903911:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:55.229743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:55.231610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T17:46:55.238353Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:46:55.238756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790521648903927:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T17:46:55.268372Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790520835842097:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:55.268478Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODExOTNhZmUtN2U5OGE4MzMtZDk0NTBjYTItYTA2YWEwYzk=, ActorId: [1:7439790520835842047:2299], ActorState: ExecuteState, TraceId: 01jd7xb2w52vp4xnr51qyy8gvk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:55.268884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2024-11-21T17:46:55.268929Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:55.330284Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790521648904007:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:55.330773Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGQ0NTliYzgtY2I4ZTkwNmEtYzMyMmZlNjItYmQ2ZjM0ZWY=, ActorId: [2:7439790521648903909:2277], ActorState: ExecuteState, TraceId: 01jd7xb2vsbvgsvmy6vh2mpre3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:55.330974Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:55.350315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:55.381378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:25559", true, true, 1000); 2024-11-21T17:46:55.429231Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jd7xb31e5zenz4aj91xft2ns, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NiMzZiZGItOTUyZWRkNWQtMmNhOTU0YTctNDk5NGI3NmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790520835842464:2907] 2024-11-21T17:46:59.854400Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790516540873840:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:59.854437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:59.862083Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790517353936485:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:59.862122Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:01.479710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720679:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:25559 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:47:01.502790Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:25559 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 ... : "... 99 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 3 WriteTimestampMS: 1732211245365 CreateTimestampMS: 1732211245364 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 20 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 2 } 2024-11-21T17:47:26.472755Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_539710057917364237_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset3 2024-11-21T17:47:26.472764Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_539710057917364237_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 81407f0e-1b3cc626-bc1a484-80e97a6 has messages 1 2024-11-21T17:47:26.472806Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_539710057917364237_v1 read done: guid# 81407f0e-1b3cc626-bc1a484-80e97a6, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2024-11-21T17:47:26.472821Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_539710057917364237_v1 response to read: guid# 81407f0e-1b3cc626-bc1a484-80e97a6 2024-11-21T17:47:26.472947Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_539710057917364237_v1 Process answer. Aval parts: 0 2024-11-21T17:47:26.473141Z :DEBUG: [/Root] [/Root] [9a60cddc-a49e19ce-f71c020f-f78bf5ab] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.473222Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2024-11-21T17:47:26.473283Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2024-11-21T17:47:26.473301Z :DEBUG: [/Root] [/Root] [9a60cddc-a49e19ce-f71c020f-f78bf5ab] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T17:47:25.364000Z WriteTime: 2024-11-21T17:47:25.365000Z Ip: "ipv6:[::1]:42080" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:42080" } } } } 2024-11-21T17:47:26.473341Z :INFO: [/Root] [/Root] [9a60cddc-a49e19ce-f71c020f-f78bf5ab] Closing read session. Close timeout: 3.000000s 2024-11-21T17:47:26.473349Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T17:47:26.473357Z :INFO: [/Root] [/Root] [9a60cddc-a49e19ce-f71c020f-f78bf5ab] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1231 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:26.473550Z :INFO: [/Root] [/Root] [9a60cddc-a49e19ce-f71c020f-f78bf5ab] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:26.473555Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T17:47:26.473559Z :INFO: [/Root] [/Root] [9a60cddc-a49e19ce-f71c020f-f78bf5ab] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1231 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:26.473577Z :NOTICE: [/Root] [/Root] [9a60cddc-a49e19ce-f71c020f-f78bf5ab] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:47:26.473611Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_539710057917364237_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T17:47:26.473670Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_539710057917364237_v1 got read request: guid# 28a96e0e-54dca7f1-94538f11-bcab557b 2024-11-21T17:47:26.473686Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_539710057917364237_v1 grpc closed 2024-11-21T17:47:26.473718Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_539710057917364237_v1 is DEAD 2024-11-21T17:47:26.473872Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:26.473888Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_539710057917364237_v1 2024-11-21T17:47:26.473908Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7439790650755356062:2508] destroyed 2024-11-21T17:47:26.473942Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_539710057917364237_v1 2024-11-21T17:47:26.474045Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7439790650755356059:2505] disconnected; active server actors: 1 2024-11-21T17:47:26.474055Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7439790650755356059:2505] client user disconnected session shared/user_7_1_539710057917364237_v1 2024-11-21T17:47:26.879552Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.879559Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.879562Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:47:26.879647Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:47:26.879778Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:47:26.879831Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.879899Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2024-11-21T17:47:26.880118Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.880120Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.880122Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:47:26.880167Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:47:26.880264Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:47:26.880307Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.880422Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:47:26.880614Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:47:26.880761Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2024-11-21T17:47:26.880774Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2024-11-21T17:47:26.880814Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:47:26.880823Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:47:26.880828Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:47:26.880837Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T17:47:26.881287Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.881290Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.881293Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:47:26.881348Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:47:26.881420Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:47:26.881451Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.881496Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:47:26.881623Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.881661Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:47:26.881708Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:47:26.881718Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:47:26.881728Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2024-11-21T17:47:26.882100Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.882104Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.882107Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:47:26.882155Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:47:26.882206Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:47:26.882225Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.882333Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:26.882357Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:47:26.882365Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:47:26.882377Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |77.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |77.6%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2024-11-21T17:46:40.536891Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1732211200536885 2024-11-21T17:46:40.674540Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790454515641756:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:40.674559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:40.677604Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790455719423080:2252];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:40.677638Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:40.701830Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:40.705644Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e0c/r3tmp/tmpV1qvc0/pdisk_1.dat 2024-11-21T17:46:40.736396Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24671, node 1 2024-11-21T17:46:40.756456Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e0c/r3tmp/yandexu2pN5b.tmp 2024-11-21T17:46:40.756476Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e0c/r3tmp/yandexu2pN5b.tmp 2024-11-21T17:46:40.756528Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e0c/r3tmp/yandexu2pN5b.tmp 2024-11-21T17:46:40.756541Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:40.759536Z INFO: TTestServer started on Port 61409 GrpcPort 24671 TClient is connected to server localhost:61409 2024-11-21T17:46:40.774760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:40.774813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:40.776770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:24671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:40.806238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:40.806406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:40.806421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:46:40.807993Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:40.808242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... 2024-11-21T17:46:40.961835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790455719423174:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.961871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790455719423163:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.961886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.963326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T17:46:40.967482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790455719423178:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T17:46:41.051171Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790460014390517:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:41.051550Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzQ1MTUwMDEtNDY0YTM1MjItOGU1MDRjZmMtN2U5YTAzYjI=, ActorId: [2:7439790455719423147:2277], ActorState: ExecuteState, TraceId: 01jd7xamy14qac4qzy993hk18x, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:41.050367Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790458810610023:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:41.051958Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:41.051918Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTIyOTE5YTItNWM5MGRjOTEtN2E3ZTRjZDYtMjk3Nzc1MDQ=, ActorId: [1:7439790458810609982:2299], ActorState: ExecuteState, TraceId: 01jd7xamz99av3twha0ace731n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:41.052060Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:41.053092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.128702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.226944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:24671", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T17:46:41.314092Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xan8fa3tf0v5ez84qdgj6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTUyZDc2MmItMTRmNmQyYjAtZDU3OTFlZjItYTQ3OWMzOWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790458810610395:2919] 2024-11-21T17:46:45.674873Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790454515641756:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:45.674910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:45.677992Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790455719423080:2252];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:45.678031Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:46:46.388454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 5 partitions CallPersQueueGRPC request to localhost:24671 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:46.435881Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:24671 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 ... TION_CHOOSER DEBUG: TPartitionChooser [3:7439790526388507326:2463] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T17:46:56.352680Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790526388507326:2463] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T17:46:56.352687Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:46:56.352959Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:56.352978Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439790526388507374:2463], now have 1 active actors on pipe 2024-11-21T17:46:56.353018Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T17:46:56.353116Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:56.353136Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:56.353184Z node 4 :PERSQUEUE INFO: new Cookie src|dae8480d-8a9af677-173d3e55-e6534b87_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T17:46:56.353219Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:46:56.353253Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:46:56.353545Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:46:56.353557Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:46:56.353588Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:46:56.353898Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|dae8480d-8a9af677-173d3e55-e6534b87_0 2024-11-21T17:46:56.354240Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211216354 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:46:56.354273Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|dae8480d-8a9af677-173d3e55-e6534b87_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T17:46:56.354382Z :INFO: [] MessageGroupId [src] SessionId [src|dae8480d-8a9af677-173d3e55-e6534b87_0] Write session: close. Timeout = 0 ms 2024-11-21T17:46:56.354392Z :INFO: [] MessageGroupId [src] SessionId [src|dae8480d-8a9af677-173d3e55-e6534b87_0] Write session will now close 2024-11-21T17:46:56.354397Z :DEBUG: [] MessageGroupId [src] SessionId [src|dae8480d-8a9af677-173d3e55-e6534b87_0] Write session: aborting 2024-11-21T17:46:56.354515Z :INFO: [] MessageGroupId [src] SessionId [src|dae8480d-8a9af677-173d3e55-e6534b87_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:46:56.354521Z :DEBUG: [] MessageGroupId [src] SessionId [src|dae8480d-8a9af677-173d3e55-e6534b87_0] Write session: destroy 2024-11-21T17:46:56.354806Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|dae8480d-8a9af677-173d3e55-e6534b87_0 grpc read done: success: 0 data: 2024-11-21T17:46:56.354816Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|dae8480d-8a9af677-173d3e55-e6534b87_0 grpc read failed 2024-11-21T17:46:56.354821Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|dae8480d-8a9af677-173d3e55-e6534b87_0 grpc closed 2024-11-21T17:46:56.354825Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|dae8480d-8a9af677-173d3e55-e6534b87_0 is DEAD 2024-11-21T17:46:56.354988Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:46:56.355105Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:46:56.355127Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439790526388507374:2463] destroyed 2024-11-21T17:46:56.355138Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ====TYdbPqTestRetryPolicy() ====ExpectBreakDown === Session was created, waiting for retries >>> Ready to answer: ok ====CreateRetryState ====CreateRetryState Initialized Test retry state: get retry delay 2024-11-21T17:46:56.408066Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T17:46:58.408364Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T17:47:00.334100Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T17:47:00.409441Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s === In the next federation discovery response dc2 will be available Test retry state: get retry delay 2024-11-21T17:47:02.410448Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T17:47:04.411417Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T17:47:04.608690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:47:04.608721Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:05.334279Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T17:47:06.412432Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T17:47:08.413464Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T17:47:10.334502Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T17:47:10.414430Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T17:47:12.415433Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T17:47:14.416362Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T17:47:15.334663Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T17:47:16.417448Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T17:47:18.418459Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T17:47:20.334824Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T17:47:20.419424Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T17:47:22.420359Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T17:47:24.421365Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T17:47:25.283245Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2024-11-21T17:47:25.283265Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2024-11-21T17:47:25.283441Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvStatus 2024-11-21T17:47:25.283599Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2024-11-21T17:47:25.283829Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2024-11-21T17:47:25.283925Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 2024-11-21T17:47:25.335048Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T17:47:26.422423Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s === Waiting for repair >>> Ready to answer: ok === Closing the session 2024-11-21T17:47:28.423500Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:3310" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:3310" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:3310" location: "dc3" status: AVAILABLE weight: 500 } ] } 2024-11-21T17:47:28.426087Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: try to update token 2024-11-21T17:47:28.426392Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2024-11-21T17:47:28.427608Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: write to message_group: src_id 2024-11-21T17:47:28.427682Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: send init request: init_request { path: "test-topic" message_group_id: "src_id" } 2024-11-21T17:47:28.427793Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2024-11-21T17:47:28.427803Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Write session will now close 2024-11-21T17:47:28.427815Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: OnWriteDone gRpcStatusCode: 0 2024-11-21T17:47:28.427823Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: aborting 2024-11-21T17:47:28.427949Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2024-11-21T17:47:28.428127Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2024-11-21T17:47:28.428626Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:47:28.428676Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211248428 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:28.428644Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2024-11-21T17:47:28.428696Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session is aborting and will not restart 2024-11-21T17:47:28.428724Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: destroy 2024-11-21T17:47:28.428732Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: grpc closed 2024-11-21T17:47:28.428743Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: is DEAD |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TxUsage::SessionAbort [GOOD] >> TxUsage::WriteToTopic_Demo_3 [GOOD] >> BasicUsage::SimpleHandlers [GOOD] >> TxUsage::TwoSessionOneConsumer >> KqpSinkTx::OlapDeferredEffects >> TxUsage::WriteToTopic_Demo_26 >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite |77.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2024-11-21T17:46:40.536901Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1732211200536894 2024-11-21T17:46:40.656572Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790456643812421:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:40.658121Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790455233794013:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e05/r3tmp/tmpwxtFMG/pdisk_1.dat 2024-11-21T17:46:40.683797Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:40.684834Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:40.685156Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:40.686028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:40.724415Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32298, node 1 2024-11-21T17:46:40.746423Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e05/r3tmp/yandexsEQUiK.tmp 2024-11-21T17:46:40.746437Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e05/r3tmp/yandexsEQUiK.tmp 2024-11-21T17:46:40.746490Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e05/r3tmp/yandexsEQUiK.tmp 2024-11-21T17:46:40.746533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:40.748716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:40.748746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:40.750171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:40.751561Z INFO: TTestServer started on Port 12056 GrpcPort 32298 TClient is connected to server localhost:12056 PQClient connected to localhost:32298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:40.780754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:40.784434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:40.784459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:40.786222Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:40.786867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... 2024-11-21T17:46:40.938320Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790455233794135:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.938342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790455233794154:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.938353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:40.939511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T17:46:40.944806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790455233794158:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T17:46:41.012042Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790456643813264:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:41.012322Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjBiYzUtNDc5NmQ5MzAtMjAwMTUyYmItYmZjYTQ2OQ==, ActorId: [1:7439790456643813238:2299], ActorState: ExecuteState, TraceId: 01jd7xamygfer36ft83c74e659, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:41.012467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.012863Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:41.032175Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790459528761527:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:41.032523Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzAwNzcwN2MtMzdlODY4N2MtOGNmOWRiNmEtOTk2ZGIxNGE=, ActorId: [2:7439790455233794127:2277], ActorState: ExecuteState, TraceId: 01jd7xamx98d72wjq55dzzjs7z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:41.032674Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:41.041957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.071057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:32298", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T17:46:41.173013Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd7xan3w4b9pxhssqwwbnwtd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRiYWE5YjMtYjEzYzY2M2MtOTM0YTZmMWMtYmMzYzY4ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790460938780933:2928] 2024-11-21T17:46:45.652315Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790456643812421:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:45.652351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:45.652007Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790455233794013:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:45.652070Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:46:46.341298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:32298 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:46.374593Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:32298 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 ... essed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:29.882026Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_11665810762398015539_v1 grpc read done: success# 0, data# { } 2024-11-21T17:47:29.882036Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_11665810762398015539_v1 grpc read failed 2024-11-21T17:47:29.882041Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_11665810762398015539_v1 grpc closed 2024-11-21T17:47:29.882059Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_11665810762398015539_v1 is DEAD 2024-11-21T17:47:29.882130Z :INFO: [/Root] [/Root] [ecc61f0-55560ee6-1994dd38-5ffcfef8] Closing read session. Close timeout: 18446744073709.551615s 2024-11-21T17:47:29.882139Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:47:29.882145Z :INFO: [/Root] [/Root] [ecc61f0-55560ee6-1994dd38-5ffcfef8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 120 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:29.882150Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_12752982706194502894_v1 grpc read done: success# 0, data# { } 2024-11-21T17:47:29.882159Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_12752982706194502894_v1 grpc read failed 2024-11-21T17:47:29.882162Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_12752982706194502894_v1 grpc closed 2024-11-21T17:47:29.882166Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_12752982706194502894_v1 is DEAD 2024-11-21T17:47:29.882217Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:29.882234Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_3_11665810762398015539_v1 2024-11-21T17:47:29.882246Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439790668389619508:2499] destroyed 2024-11-21T17:47:29.882264Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_3_11665810762398015539_v1 2024-11-21T17:47:29.882296Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790668389619496:2490] disconnected; active server actors: 1 2024-11-21T17:47:29.882303Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790668389619496:2490] client user disconnected session shared/user_3_3_11665810762398015539_v1 2024-11-21T17:47:29.882316Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2024-11-21T17:47:29.882322Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790668389619498:2489] disconnected; active server actors: 1 2024-11-21T17:47:29.882325Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790668389619498:2489] client user disconnected session shared/user_3_2_12752982706194502894_v1 2024-11-21T17:47:29.882333Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2024-11-21T17:47:29.882355Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|28a96a4c-4fd7f996-6df433b6-990084de_0] Write session: close. Timeout = 0 ms 2024-11-21T17:47:29.882359Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|28a96a4c-4fd7f996-6df433b6-990084de_0] Write session will now close 2024-11-21T17:47:29.882343Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_3_1_3352031785119277973_v1" (Sender=[3:7439790668389619485:2488], Pipe=[3:7439790668389619497:2488], Partitions=[], ActiveFamilyCount=0) 2024-11-21T17:47:29.882364Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|28a96a4c-4fd7f996-6df433b6-990084de_0] Write session: aborting 2024-11-21T17:47:29.882348Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_3352031785119277973_v1 grpc read done: success# 0, data# { } 2024-11-21T17:47:29.882350Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_3352031785119277973_v1 grpc read failed 2024-11-21T17:47:29.882352Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_3352031785119277973_v1 grpc closed 2024-11-21T17:47:29.882355Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_3352031785119277973_v1 is DEAD 2024-11-21T17:47:29.882358Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] consumer user family 1 status Active partitions [0] session "shared/user_3_1_3352031785119277973_v1" sender [3:7439790668389619485:2488] lock partition 0 for ReadingSession "shared/user_3_1_3352031785119277973_v1" (Sender=[3:7439790668389619485:2488], Pipe=[3:7439790668389619497:2488], Partitions=[], ActiveFamilyCount=1) generation 1 step 2 2024-11-21T17:47:29.882364Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2024-11-21T17:47:29.882367Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000033s 2024-11-21T17:47:29.882433Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790668389619497:2488] disconnected; active server actors: 1 2024-11-21T17:47:29.882444Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790668389619497:2488] client user disconnected session shared/user_3_1_3352031785119277973_v1 2024-11-21T17:47:29.882454Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|28a96a4c-4fd7f996-6df433b6-990084de_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:47:29.882463Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|28a96a4c-4fd7f996-6df433b6-990084de_0] Write session is aborting and will not restart 2024-11-21T17:47:29.882477Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|28a96a4c-4fd7f996-6df433b6-990084de_0] Write session: destroy 2024-11-21T17:47:29.882536Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|28a96a4c-4fd7f996-6df433b6-990084de_0 grpc read done: success: 0 data: 2024-11-21T17:47:29.882542Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|28a96a4c-4fd7f996-6df433b6-990084de_0 grpc read failed 2024-11-21T17:47:29.882551Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|28a96a4c-4fd7f996-6df433b6-990084de_0 grpc closed 2024-11-21T17:47:29.882554Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|28a96a4c-4fd7f996-6df433b6-990084de_0 is DEAD 2024-11-21T17:47:29.882739Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:47:29.882861Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:29.882882Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439790668389619538:2487] destroyed 2024-11-21T17:47:29.882892Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:47:29.882907Z :INFO: [/Root] [/Root] [31999cde-baf8dabb-b7cdce96-6be2ec3] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:29.882916Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2024-11-21T17:47:29.882924Z :INFO: [/Root] [/Root] [31999cde-baf8dabb-b7cdce96-6be2ec3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 121 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:29.882929Z :INFO: [/Root] [/Root] [d22e5394-eefd5d0-8e01d2ad-d51f3809] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:29.882932Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:47:29.882936Z :INFO: [/Root] [/Root] [d22e5394-eefd5d0-8e01d2ad-d51f3809] Counters: { Errors: 0 CurrentSessionLifetimeMs: 120 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:29.882939Z :INFO: [/Root] [/Root] [ecc61f0-55560ee6-1994dd38-5ffcfef8] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:29.882942Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:47:29.882944Z :INFO: [/Root] [/Root] [ecc61f0-55560ee6-1994dd38-5ffcfef8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 120 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:29.882949Z :INFO: [/Root] [/Root] [ecc61f0-55560ee6-1994dd38-5ffcfef8] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:29.882951Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:47:29.882953Z :INFO: [/Root] [/Root] [ecc61f0-55560ee6-1994dd38-5ffcfef8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 120 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:29.882965Z :NOTICE: [/Root] [/Root] [ecc61f0-55560ee6-1994dd38-5ffcfef8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:47:29.883035Z :INFO: [/Root] [/Root] [d22e5394-eefd5d0-8e01d2ad-d51f3809] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:29.883038Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:47:29.883041Z :INFO: [/Root] [/Root] [d22e5394-eefd5d0-8e01d2ad-d51f3809] Counters: { Errors: 0 CurrentSessionLifetimeMs: 120 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:29.883046Z :NOTICE: [/Root] [/Root] [d22e5394-eefd5d0-8e01d2ad-d51f3809] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:47:29.883082Z :INFO: [/Root] [/Root] [31999cde-baf8dabb-b7cdce96-6be2ec3] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:29.883086Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2024-11-21T17:47:29.883089Z :INFO: [/Root] [/Root] [31999cde-baf8dabb-b7cdce96-6be2ec3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 121 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:29.883093Z :NOTICE: [/Root] [/Root] [31999cde-baf8dabb-b7cdce96-6be2ec3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> IntermediateDirsReboots::CreateWithIntermediateDirsForceDrop [GOOD] >> KqpTx::RollbackTx >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] >> KqpTx::RollbackInvalidated ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:21.356206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:21.356225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:21.356248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:21.356252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:21.356258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:21.356262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:21.356268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:21.356321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:21.363127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:21.363140Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:21.364514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:21.364605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:21.364637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:21.366456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:21.366509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:21.366569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.366708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:21.367193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.367381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:21.367387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.367395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:21.367399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:21.367403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:21.367427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:21.368291Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:21.378865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:21.378918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.378947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:21.378987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:21.378991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.379373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.379388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:21.379424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.379431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:21.379435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:21.379438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:21.379668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.379674Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:21.379676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:21.379877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.379882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.379885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.379889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.380242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:21.380524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:21.380550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:21.380653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.380669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:21.380673Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.380707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:21.380711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.380726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:21.380733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:21.381024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:21.381031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:21.381051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.381054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:21.381098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.381102Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:21.381109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:21.381111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.381115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:21.381118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.381120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:21.381122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:21.381129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:21.381132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:21.381134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... mentPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:47:30.906608Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:47:30.906664Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:47:30.906709Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:47:30.907055Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:47:30.907069Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:47:30.907079Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.907087Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:47:30.907091Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [40:300:2292] 2024-11-21T17:47:30.907387Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:47:30.907430Z node 40 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:30.907434Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:30.907453Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:47:30.907467Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:47:30.907474Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:47:30.907485Z node 40 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:30.907489Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T17:47:30.907494Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2024-11-21T17:47:30.907498Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:47:30.907501Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T17:47:30.907537Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:47:30.907543Z node 40 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T17:47:30.907550Z node 40 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:47:30.907553Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:47:30.907558Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:47:30.907562Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:47:30.907568Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:47:30.907571Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:47:30.907580Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:47:30.907584Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 4, subscribers: 1 2024-11-21T17:47:30.907588Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2024-11-21T17:47:30.907591Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:47:30.907595Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:47:30.907598Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T17:47:30.907650Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.907658Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.907662Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:30.907666Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T17:47:30.907670Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:47:30.907737Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.907744Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.907748Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:30.907751Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T17:47:30.907754Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:47:30.907844Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.907852Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.907855Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:30.907859Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:47:30.907864Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:47:30.908056Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.908065Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.908069Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:30.908073Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:47:30.908076Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:47:30.908084Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T17:47:30.908089Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [40:299:2291] 2024-11-21T17:47:30.908298Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.908736Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.908776Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.908933Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:30.908949Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:47:30.908954Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [40:300:2292] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T17:47:30.909034Z node 40 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:30.909058Z node 40 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/x" took 31us result status StatusPathDoesNotExist 2024-11-21T17:47:30.909087Z node 40 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 1003, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/x" PathId: 3 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BasicUsage::ReadWithoutConsumerWithRestarts [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> KqpOlapSparsed::SwitchingStandalone [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink >> KqpTx::RollbackTx [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2024-11-21T17:46:56.363182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:46:56.363207Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:56.363222Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:46:56.365954Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:46:56.366099Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:46:56.366153Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:46:56.367151Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:46:56.375874Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:46:56.376021Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:46:56.376155Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:46:56.376163Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:46:56.376169Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:46:56.376204Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:46:56.379530Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:46:56.379582Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:46:56.379624Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:46:56.379630Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:46:56.379635Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:46:56.379640Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:56.379733Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:56.379751Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:56.379783Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:46:56.379801Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:46:56.379855Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:56.379862Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:46:56.379868Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:46:56.379874Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:46:56.379877Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:46:56.379882Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:46:56.379887Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:46:56.387359Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:56.387382Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:56.387399Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:46:56.387715Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:46:56.387724Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:46:56.387747Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:46:56.387773Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:46:56.387781Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:46:56.387788Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:46:56.387795Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:56.387798Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:46:56.387802Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:46:56.387804Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:56.387859Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:46:56.387862Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:46:56.387864Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:46:56.387867Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:56.387874Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:46:56.387876Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:46:56.387878Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:46:56.387880Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:56.387883Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:46:56.408991Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:46:56.409019Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:46:56.409026Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:46:56.409037Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:46:56.409050Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:46:56.409188Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:56.409198Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:46:56.409205Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:46:56.409241Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:46:56.409247Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:46:56.409296Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:46:56.409306Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:56.409310Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:46:56.409315Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:46:56.409998Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:46:56.410018Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:46:56.410089Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:56.410095Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:46:56.410103Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:46:56.410110Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:46:56.410115Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:46:56.410124Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:46:56.410128Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:46:56.410135Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:56.410140Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:46:56.410144Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:46:56.410148Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:46:56.410199Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:46:56.410203Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:56.410206Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:46:56.410210Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:46:56.410214Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:46:56.410228Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:46:56.410231Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:46:56.410234Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:46:56.410237Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:46:56.410248Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:46:56.410251Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:46:56.410254Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:46:56.410259Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:46:56.410262Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:46:56.410265Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... propose latency: 58 ms, status: COMPLETE 2024-11-21T17:47:29.507119Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is DelayComplete 2024-11-21T17:47:29.507123Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2024-11-21T17:47:29.507127Z node 3 :TX_DATASHARD TRACE: Add [0:10] at 9437184 to execution unit CompletedOperations 2024-11-21T17:47:29.507131Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2024-11-21T17:47:29.507143Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is Executed 2024-11-21T17:47:29.507146Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2024-11-21T17:47:29.507150Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:10] at 9437184 has finished 2024-11-21T17:47:29.512138Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:47:29.512168Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:10] at 9437184 on unit FinishPropose 2024-11-21T17:47:29.512188Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:47:30.125622Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:97:2132], Recipient [3:227:2222]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 97 RawX2: 12884904020 } 2024-11-21T17:47:30.125647Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2024-11-21T17:47:30.125754Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:627:2606], Recipient [3:227:2222]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:30.125759Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:30.125765Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:626:2605], serverId# [3:627:2606], sessionId# [0:0:0] 2024-11-21T17:47:30.125804Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:97:2132], Recipient [3:227:2222]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 97 RawX2: 12884904020 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006\ 2024-11-21T17:47:30.125808Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:47:30.125825Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:30.125932Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2024-11-21T17:47:30.126784Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2024-11-21T17:47:30.126791Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2024-11-21T17:47:30.126795Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:47:30.126798Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:47:30.126806Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v0/18446744073709551615 ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2024-11-21T17:47:30.126815Z node 3 :TX_DATASHARD TRACE: Activated operation [0:11] at 9437184 2024-11-21T17:47:30.126820Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2024-11-21T17:47:30.126821Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:47:30.126824Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:47:30.126826Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:30.128959Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2024-11-21T17:47:30.129003Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T17:47:30.129011Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T17:47:30.136583Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:30.136612Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:30.136786Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T17:47:30.137388Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2024-11-21T17:47:30.137409Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T17:47:30.137418Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T17:47:30.158642Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:30.158676Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:30.158872Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T17:47:30.159968Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2024-11-21T17:47:30.160007Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T17:47:30.160017Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T17:47:30.183785Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:30.183822Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:30.184018Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T17:47:30.189563Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2024-11-21T17:47:30.189630Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T17:47:30.189640Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T17:47:30.189784Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:30.189791Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:30.189909Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T17:47:30.288462Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2024-11-21T17:47:30.288643Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T17:47:30.288658Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T17:47:30.302187Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:30.302218Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:30.302380Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T17:47:30.304317Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2024-11-21T17:47:30.304377Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T17:47:30.304389Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T17:47:30.307856Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:30.307877Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:30.308036Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T17:47:30.308570Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2024-11-21T17:47:30.308599Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T17:47:30.308608Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T17:47:30.312901Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:30.312924Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:30.313105Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T17:47:30.314327Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2024-11-21T17:47:30.314372Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2024-11-21T17:47:30.314384Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2024-11-21T17:47:30.359788Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:47:30.359817Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2024-11-21T17:47:30.359987Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2024-11-21T17:47:30.620123Z node 3 :TX_DATASHARD TRACE: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2024-11-21T17:47:30.620165Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:47:30.620184Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2024-11-21T17:47:30.620191Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:47:30.620195Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit FinishPropose 2024-11-21T17:47:30.620199Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit FinishPropose 2024-11-21T17:47:30.620209Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 62 ms, propose latency: 62 ms, status: COMPLETE 2024-11-21T17:47:30.620263Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is DelayComplete 2024-11-21T17:47:30.620267Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2024-11-21T17:47:30.620269Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit CompletedOperations 2024-11-21T17:47:30.620272Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2024-11-21T17:47:30.620283Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2024-11-21T17:47:30.620285Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2024-11-21T17:47:30.620288Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:11] at 9437184 has finished 2024-11-21T17:47:30.624759Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:47:30.624792Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:11] at 9437184 on unit FinishPropose 2024-11-21T17:47:30.624811Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TxUsage::WriteToTopic_Demo_2 [GOOD] >> TxUsage::WriteToTopic_Demo_4 [GOOD] >> TxUsage::WriteToTopic_Demo_1 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackTx [GOOD] Test command err: Trying to start YDB, gRPC: 19461, MsgBus: 11478 2024-11-21T17:47:31.012464Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790676405907496:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:31.012684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001487/r3tmp/tmpJNtHrh/pdisk_1.dat 2024-11-21T17:47:31.067152Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19461, node 1 2024-11-21T17:47:31.077021Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:31.077034Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:31.077037Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:31.077072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11478 TClient is connected to server localhost:11478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:31.113692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:31.113717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:31.115648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:31.120889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.132029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.194232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.212349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.223664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.281168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790676405909040:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:31.281195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:31.312812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:31.318555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:31.330808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:31.385379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:31.394783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:31.408654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:31.416455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790676405909557:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:31.416477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:31.416513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790676405909562:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:31.417025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:31.421069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790676405909564:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:31.615434Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmUxNThhMzAtMTExYWI3YzQtZDdjYjg0NjgtNWZhZjI2MjY=, ActorId: [1:7439790676405909848:2454], ActorState: ReadyState, TraceId: 01jd7xc6cy3xqvs8n4dg8jjmd3, Create QueryResponse for error on request, msg: >> TxUsage::WriteToTopic_Demo_10 >> TxUsage::WriteToTopic_Demo_18_RestartNo >> TxUsage::WriteToTopic_Demo_34 >> KqpTx::RollbackInvalidated [GOOD] >> KqpLocksTricky::TestNoLocksIssue-withSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSparsed::SwitchingStandalone [GOOD] Test command err: Trying to start YDB, gRPC: 7226, MsgBus: 10467 2024-11-21T17:46:31.707123Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790415862271092:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:31.707411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000d88/r3tmp/tmpPwUhtQ/pdisk_1.dat 2024-11-21T17:46:31.762705Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7226, node 1 2024-11-21T17:46:31.770391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:31.770406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:31.770408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:31.770447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10467 TClient is connected to server localhost:10467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:31.807814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:31.807841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:31.808948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:31.838898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:32.018629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790420157238994:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:32.018671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:32.040367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:32.061938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439790420157239555:2317];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:32.061987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439790420157239555:2317];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:32.062012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439790420157239555:2317];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:32.062026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439790420157239555:2317];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:32.062042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439790420157239555:2317];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:32.062059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439790420157239555:2317];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:32.062075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439790420157239555:2317];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:32.062101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439790420157239555:2317];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:32.062119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439790420157239555:2317];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:32.062140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439790420157239555:2317];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:32.062160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439790420157239555:2317];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:32.062184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[1:7439790420157239555:2317];tablet_id=72075186224037933;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:32.062288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[1:7439790420157239556:2318];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:32.062313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[1:7439790420157239556:2318];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:32.062345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[1:7439790420157239556:2318];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:32.062369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[1:7439790420157239556:2318];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:32.062392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[1:7439790420157239556:2318];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:32.062414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[1:7439790420157239556:2318];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:32.062437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[1:7439790420157239556:2318];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:32.062466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[1:7439790420157239556:2318];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:32.062487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[1:7439790420157239556:2318];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:32.062510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[1:7439790420157239556:2318];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:32.062531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[1:7439790420157239556:2318];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:32.062552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[1:7439790420157239556:2318];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:32.062945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:32.062957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:32.062968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:32.062977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:32.062991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:32.063000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:32.063008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:32.063021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:32.063033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075 ... 7c7-1feddbc2; 2024-11-21T17:47:25.089305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;task_id=aae5b73e-a83011ef-a8227b96-1d6dcf9;fline=with_appended.cpp:80;portions=9,;task_id=aae5b73e-a83011ef-a8227b96-1d6dcf9; 2024-11-21T17:47:25.090162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;task_id=aae5be78-a83011ef-8cd3c0b2-4d2f193c;fline=with_appended.cpp:80;portions=9,;task_id=aae5be78-a83011ef-8cd3c0b2-4d2f193c; 2024-11-21T17:47:25.090186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;task_id=aae5dfac-a83011ef-ae93f436-3443ece;fline=with_appended.cpp:80;portions=9,;task_id=aae5dfac-a83011ef-ae93f436-3443ece; 2024-11-21T17:47:25.090283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037946;task_id=aae5dd9a-a83011ef-a90aaf71-fd08511a;fline=with_appended.cpp:80;portions=9,;task_id=aae5dd9a-a83011ef-a90aaf71-fd08511a; 2024-11-21T17:47:25.090613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;task_id=aae5dc78-a83011ef-837f08fe-74af4d3;fline=with_appended.cpp:80;portions=9,;task_id=aae5dc78-a83011ef-837f08fe-74af4d3; 2024-11-21T17:47:25.091061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037912;task_id=aae60e96-a83011ef-9d2da9d1-c128d34b;fline=with_appended.cpp:80;portions=9,;task_id=aae60e96-a83011ef-9d2da9d1-c128d34b; 2024-11-21T17:47:25.091558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;task_id=aae5ded0-a83011ef-ae397331-8bf469fe;fline=with_appended.cpp:80;portions=9,;task_id=aae5ded0-a83011ef-ae397331-8bf469fe; 2024-11-21T17:47:25.091603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;task_id=aae61f9e-a83011ef-8b70506c-f199b5e5;fline=with_appended.cpp:80;portions=9,;task_id=aae61f9e-a83011ef-8b70506c-f199b5e5; 2024-11-21T17:47:25.105632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[1:7439790420157239622:2345];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.105637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790420157239666:2357];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.105726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7439790420157239588:2336];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.106057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[1:7439790420157239597:2340];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.106097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439790420157239584:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.107400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;self_id=[1:7439790420157239646:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.107471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[1:7439790420157239623:2346];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.109021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;task_id=aae8cfd2-a83011ef-ae2da54d-d51bb423;fline=with_appended.cpp:80;portions=9,;task_id=aae8cfd2-a83011ef-ae2da54d-d51bb423; 2024-11-21T17:47:25.109148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=aae8d130-a83011ef-b2fe2c71-2d992e6a;fline=with_appended.cpp:80;portions=9,;task_id=aae8d130-a83011ef-b2fe2c71-2d992e6a; 2024-11-21T17:47:25.110582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;task_id=aae8d518-a83011ef-995d447b-9157ec4;fline=with_appended.cpp:80;portions=9,;task_id=aae8d518-a83011ef-995d447b-9157ec4; 2024-11-21T17:47:25.110730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;task_id=aae8e242-a83011ef-a5271440-eda5d7ff;fline=with_appended.cpp:80;portions=9,;task_id=aae8e242-a83011ef-a5271440-eda5d7ff; 2024-11-21T17:47:25.111011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;task_id=aae916cc-a83011ef-8759121d-b9187636;fline=with_appended.cpp:80;portions=9,;task_id=aae916cc-a83011ef-8759121d-b9187636; 2024-11-21T17:47:25.111103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;task_id=aae8e404-a83011ef-ac0b7c22-53809836;fline=with_appended.cpp:80;portions=9,;task_id=aae8e404-a83011ef-ac0b7c22-53809836; 2024-11-21T17:47:25.111490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;task_id=aae919ec-a83011ef-a1e1e182-215f87c1;fline=with_appended.cpp:80;portions=9,;task_id=aae919ec-a83011ef-a1e1e182-215f87c1; 2024-11-21T17:47:25.114712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;self_id=[1:7439790420157239513:2305];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.114817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7439790420157239580:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.117260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;task_id=aaea3732-a83011ef-9bffe0d8-f5f5998c;fline=with_appended.cpp:80;portions=9,;task_id=aaea3732-a83011ef-9bffe0d8-f5f5998c; 2024-11-21T17:47:25.117272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;task_id=aaea3426-a83011ef-aac0aca7-d53c501b;fline=with_appended.cpp:80;portions=9,;task_id=aaea3426-a83011ef-aac0aca7-d53c501b; 2024-11-21T17:47:25.121983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790420157239700:2364];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.123952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7439790420157239668:2358];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.123993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790420157239648:2354];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.124010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7439790420157239625:2348];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.124021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7439790420157239665:2356];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.124067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7439790420157239552:2314];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.125101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;task_id=aaeb4ea6-a83011ef-ba2f62b9-8389e0d5;fline=with_appended.cpp:80;portions=9,;task_id=aaeb4ea6-a83011ef-ba2f62b9-8389e0d5; 2024-11-21T17:47:25.125369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7439790420157239573:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.125402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790420157239548:2310];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.125425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790420157239517:2308];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.128256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;task_id=aaeb9f78-a83011ef-bf3fc715-b1dd15a0;fline=with_appended.cpp:80;portions=9,;task_id=aaeb9f78-a83011ef-bf3fc715-b1dd15a0; 2024-11-21T17:47:25.128442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;task_id=aaeb9d16-a83011ef-a411ce45-cbd75268;fline=with_appended.cpp:80;portions=9,;task_id=aaeb9d16-a83011ef-a411ce45-cbd75268; 2024-11-21T17:47:25.128464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;task_id=aaeb9f5a-a83011ef-ae9a456d-5f1df83d;fline=with_appended.cpp:80;portions=9,;task_id=aaeb9f5a-a83011ef-ae9a456d-5f1df83d; 2024-11-21T17:47:25.128595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;task_id=aaeb9dca-a83011ef-809be27d-cdb2d664;fline=with_appended.cpp:80;portions=9,;task_id=aaeb9dca-a83011ef-809be27d-cdb2d664; 2024-11-21T17:47:25.128993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;task_id=aaeba07c-a83011ef-ac4eab1f-93765cd0;fline=with_appended.cpp:80;portions=9,;task_id=aaeba07c-a83011ef-ac4eab1f-93765cd0; 2024-11-21T17:47:25.129488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;task_id=aaebd4ca-a83011ef-813c19e6-3b6bc5ea;fline=with_appended.cpp:80;portions=9,;task_id=aaebd4ca-a83011ef-813c19e6-3b6bc5ea; 2024-11-21T17:47:25.129506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=aaebd682-a83011ef-b73782ca-b1c6bb22;fline=with_appended.cpp:80;portions=9,;task_id=aaebd682-a83011ef-b73782ca-b1c6bb22; 2024-11-21T17:47:25.130024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;task_id=aaebd75e-a83011ef-82872ece-25e9c084;fline=with_appended.cpp:80;portions=9,;task_id=aaebd75e-a83011ef-82872ece-25e9c084; 2024-11-21T17:47:25.133112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7439790420157239673:2360];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.133165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790420157239550:2312];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.134422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;self_id=[1:7439790420157239551:2313];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=2; 2024-11-21T17:47:25.135647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;task_id=aaed0458-a83011ef-936cd9fb-cdeae17b;fline=with_appended.cpp:80;portions=9,;task_id=aaed0458-a83011ef-936cd9fb-cdeae17b; 2024-11-21T17:47:25.135775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;task_id=aaed02e6-a83011ef-9f56156c-7f902b8c;fline=with_appended.cpp:80;portions=9,;task_id=aaed02e6-a83011ef-9f56156c-7f902b8c; 2024-11-21T17:47:25.136730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;task_id=aaed3680-a83011ef-ba422ee6-e98b607a;fline=with_appended.cpp:80;portions=9,;task_id=aaed3680-a83011ef-ba422ee6-e98b607a; WAIT_COMPACTION: 256 WAIT_COMPACTION: 256 WAIT_COMPACTION: 256 WAIT_COMPACTION: 256 WAIT_COMPACTION: 256 Timing: wait took 6 seconds ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapTable` WHERE field == 'abcde' RESULT: count: 12695 ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapTable` RESULT: 2024-11-21T17:47:31.175316Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211251000, txId: 18446744073709551615] shutting down 2024-11-21T17:47:31.299928Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211251127, txId: 18446744073709551615] shutting down count: 14000 Timing: checkTable took 0 seconds Timing: wait took 0 seconds |77.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |77.6%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackInvalidated [GOOD] Test command err: Trying to start YDB, gRPC: 7148, MsgBus: 10693 2024-11-21T17:47:31.586396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790675985103081:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:31.586627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001475/r3tmp/tmpKwzomJ/pdisk_1.dat 2024-11-21T17:47:31.630133Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7148, node 1 2024-11-21T17:47:31.645961Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:31.645978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:31.645979Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:31.646031Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10693 TClient is connected to server localhost:10693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:31.688194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:31.688224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:31.689450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:31.711219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.723868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.738675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.755848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.766031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.859348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790675985104628:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:31.859387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:31.892478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:31.899652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:31.954912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:31.968905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:32.023583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:32.031353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:32.039613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790680280072442:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:32.039622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790680280072447:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:32.039671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:32.040285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:32.044503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790680280072449:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:32.229179Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790680280072755:2465], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:32.229301Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmM4ZDdiOTUtYjg0NjFhZmYtNWFjZWMwMTctYzVmNDhhMmY=, ActorId: [1:7439790680280072733:2454], ActorState: ExecuteState, TraceId: 01jd7xc7016hfg1gvkdhvdttw6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 01jd7xc6zzf3zre52yth4mayxr 2024-11-21T17:47:32.230632Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmM4ZDdiOTUtYjg0NjFhZmYtNWFjZWMwMTctYzVmNDhhMmY=, ActorId: [1:7439790680280072733:2454], ActorState: ReadyState, TraceId: 01jd7xc7065nyha7adnkyw9zch, Create QueryResponse for error on request, msg: >> KqpTx::DeferredEffects >> KqpSinkLocks::InvalidateOlapOnCommit >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> IntermediateDirsReboots::CreateWithIntermediateDirs [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TxUsage::WriteToTopic_Demo_5 [GOOD] >> KqpTx::DeferredEffects [GOOD] >> KqpTx::EmptyTxOnCommit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:22.403783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:22.403805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:22.403810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:22.403814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:22.403819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:22.403822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:22.403830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:22.403895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:22.412810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:22.412828Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:22.414854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:22.414931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:22.414957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:22.417246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:22.417316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:22.417418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:22.417600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:22.418166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:22.418395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:22.418406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:22.418419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:22.418426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:22.418431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:22.418466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:22.419705Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:22.435389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:22.435451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.435488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:22.435531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:22.435536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.436064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:22.436088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:22.436132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.436141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:22.436145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:22.436149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:22.436617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.436635Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:22.436640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:22.438431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.438446Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.438452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:22.438460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:22.438933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:22.439265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:22.439298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:22.439450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:22.439475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:22.439481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:22.439537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:22.439562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:22.439588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:22.439599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:22.439995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:22.440005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:22.440034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:22.440039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:22.440107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.440114Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:22.440123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:22.440127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:22.440132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:22.440136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:22.440141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:22.440145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:22.440156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:22.440161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:22.440164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:47:33.462455Z node 46 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T17:47:33.462458Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T17:47:33.462462Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:47:33.462466Z node 46 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T17:47:33.462469Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T17:47:33.462473Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:47:33.462476Z node 46 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T17:47:33.462479Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T17:47:33.462483Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:47:33.462488Z node 46 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 5, subscribers: 0 2024-11-21T17:47:33.462491Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:47:33.462496Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T17:47:33.462499Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T17:47:33.462502Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2024-11-21T17:47:33.462505Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 2 2024-11-21T17:47:33.462824Z node 46 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.462835Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.462839Z node 46 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:33.462844Z node 46 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:47:33.462848Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:47:33.462938Z node 46 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.462946Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.462949Z node 46 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:33.462953Z node 46 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:47:33.462956Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:47:33.463048Z node 46 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.463055Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.463059Z node 46 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:33.463062Z node 46 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T17:47:33.463065Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:47:33.463277Z node 46 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.463285Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.463290Z node 46 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:33.463294Z node 46 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T17:47:33.463297Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:47:33.463341Z node 46 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.463348Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.463351Z node 46 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:33.463355Z node 46 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 2 2024-11-21T17:47:33.463358Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T17:47:33.463365Z node 46 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T17:47:33.463908Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.464024Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.464035Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.464053Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:33.464086Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:47:33.464134Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:47:33.464140Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:47:33.464189Z node 46 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:47:33.464202Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:47:33.464207Z node 46 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [46:351:2343] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:47:33.464282Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:33.464307Z node 46 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 33us result status StatusSuccess 2024-11-21T17:47:33.464361Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "z" PathId: 6 PartitionsCount: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:33.464405Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:33.464420Z node 46 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 16us result status StatusPathDoesNotExist 2024-11-21T17:47:33.464436Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_6 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 1237, MsgBus: 11406 2024-11-21T17:47:31.726542Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790675539626106:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:31.726753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00146b/r3tmp/tmpejXZq5/pdisk_1.dat 2024-11-21T17:47:31.770071Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1237, node 1 2024-11-21T17:47:31.777838Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:31.777855Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:31.777857Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:31.777894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11406 TClient is connected to server localhost:11406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:31.827263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:31.827285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:31.828485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:31.849862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.862830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.878178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.894586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.906954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:31.988436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790675539627649:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:31.988465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:32.017979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:32.023555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:32.078194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:32.086878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:32.142626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:32.150201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:32.158744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790679834595463:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:32.158783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:32.158784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790679834595468:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:32.159474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:32.163664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790679834595470:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:32.296422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:32.302713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:47:32.311503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14688, MsgBus: 23352 2024-11-21T17:47:32.805768Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790678720436610:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:32.805789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00146b/r3tmp/tmpz2JziC/pdisk_1.dat 2024-11-21T17:47:32.814974Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14688, node 2 2024-11-21T17:47:32.822105Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:32.822118Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:32.822120Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:32.822153Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23352 TClient is connected to server localhost:23352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:32.906008Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:32.906037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:32.907119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:32.907807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:32.908629Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:32.917249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:32.926029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:32.941616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:32.950564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:33.098284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790683015405438:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.098319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.104635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.111572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.123586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.137838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.153376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.167965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.181692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790683015405952:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.181726Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.181779Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790683015405957:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.182466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:33.185560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790683015405959:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:33.407999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.416024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.424661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> LocalPartition::Restarts [GOOD] >> LocalPartition::DiscoveryServiceBadPort >> KqpTx::EmptyTxOnCommit [GOOD] >> IntermediateDirsReboots::CreateTableWithIntermediateDirsForceDrop [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> BasicUsage::WriteRead [GOOD] >> Describe::Basic |77.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/ut/ydb-core-security-ut |77.6%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::EmptyTxOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 1663, MsgBus: 5182 2024-11-21T17:47:33.046798Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790685544179321:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:33.046905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00144f/r3tmp/tmp5Pcil6/pdisk_1.dat 2024-11-21T17:47:33.103384Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1663, node 1 2024-11-21T17:47:33.114409Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:33.114434Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:33.114436Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:33.114470Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5182 2024-11-21T17:47:33.151076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:33.151115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:33.152185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:33.180270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:33.184834Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:33.195692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:33.215212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:33.274929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:33.297049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:33.334442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790685544180876:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.334476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.362972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.370232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.382830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.396254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.451727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.459678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.475341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790685544181381:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.475360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790685544181386:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.475365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.475961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:33.479815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790685544181388:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 16915, MsgBus: 11926 2024-11-21T17:47:33.941442Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790682637085461:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00144f/r3tmp/tmpaZjQc2/pdisk_1.dat 2024-11-21T17:47:33.946636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:33.952854Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16915, node 2 2024-11-21T17:47:33.959765Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:33.959779Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:33.959781Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:33.959825Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11926 TClient is connected to server localhost:11926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:34.040470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:34.040502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:34.041514Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:34.043194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:34.048837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:34.058098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:34.076387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:34.087171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:34.233803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790686932054148:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:34.233848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:34.239849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.247269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.257496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.265057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.278854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.292865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.300806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790686932054660:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:34.300841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:34.300854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790686932054665:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:34.301464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:34.306039Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790686932054667:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |77.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |77.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:22.867751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:22.867767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:22.867771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:22.867773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:22.867777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:22.867780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:22.867785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:22.867837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:22.874559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:22.874573Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:22.875984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:22.876041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:22.876057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:22.878258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:22.878332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:22.878409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:22.878600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:22.879209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:22.879394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:22.879400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:22.879408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:22.879412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:22.879416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:22.879439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:22.880278Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:22.891040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:22.891141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.891195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:22.891248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:22.891253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.891763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:22.891784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:22.891823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.891830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:22.891832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:22.891836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:22.892136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.892143Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:22.892147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:22.892495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.892504Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.892508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:22.892514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:22.893005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:22.893404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:22.893440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:22.893565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:22.893584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:22.893590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:22.893644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:22.893650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:22.893686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:22.893699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:22.894106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:22.894113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:22.894144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:22.894149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:22.894220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:22.894226Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:22.894237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:22.894242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:22.894247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:22.894252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:22.894257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:22.894270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:22.894280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:22.894286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:22.894290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.757131Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:34.757135Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T17:47:34.757139Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2024-11-21T17:47:34.757143Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:47:34.757146Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T17:47:34.757194Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:47:34.757201Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T17:47:34.757213Z node 49 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:47:34.757216Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:47:34.757221Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:47:34.757225Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:47:34.757228Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:47:34.757232Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:47:34.757253Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:47:34.757257Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 4, subscribers: 1 2024-11-21T17:47:34.757261Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2024-11-21T17:47:34.757264Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:47:34.757267Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:47:34.757270Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T17:47:34.757390Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.757398Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.757401Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:34.757405Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T17:47:34.757410Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:47:34.757522Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.757531Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.757535Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:34.757538Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T17:47:34.757542Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:47:34.757731Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.757743Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.757760Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:34.757764Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:47:34.757769Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:47:34.757922Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:47:34.757952Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.757960Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.757963Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:34.757966Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:47:34.757969Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:47:34.757977Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T17:47:34.757981Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [49:301:2293] Leader for TabletID 72057594037968897 is [49:213:2213] sender: [49:339:2058] recipient: [49:15:2062] 2024-11-21T17:47:34.836844Z node 49 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T17:47:34.836939Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.837031Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:34.837108Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:47:34.837461Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:47:34.837469Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:47:34.837480Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:47:34.837485Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:47:34.837489Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:47:34.837493Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:47:34.837497Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:34.837660Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.837706Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.837913Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:34.837929Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:47:34.837933Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [49:302:2294] 2024-11-21T17:47:34.838242Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:47:34.838292Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T17:47:34.838352Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:34.838380Z node 49 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/x" took 34us result status StatusPathDoesNotExist 2024-11-21T17:47:34.838408Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/x" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 |77.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |77.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit [GOOD] >> IntermediateDirsReboots::CreateKesusWithIntermediateDirs [GOOD] >> TxUsage::TwoSessionOneConsumer [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartNo >> TxUsage::Offsets_Cannot_Be_Promoted_When_Reading_In_A_Transaction >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] |77.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |77.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> TSolomonReboots::AdoptDropSolomonWithReboots [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateKesusWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:21.030549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:21.030574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:21.030579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:21.030584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:21.030590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:21.030594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:21.030603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:21.030682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:21.040621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:21.040654Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:21.043321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:21.043409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:21.043435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:21.045875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:21.045953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:21.046039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.046261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:21.046914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.047184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:21.047195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.047209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:21.047217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:21.047223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:21.047260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:21.048374Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:21.065219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:21.065312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.065381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:21.065444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:21.065453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.066109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.066132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:21.066178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.066187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:21.066191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:21.066195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:21.066548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.066560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:21.066565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:21.066866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.066876Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.066880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.066886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.067405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:21.067759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:21.067798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:21.067947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.067969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:21.067975Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.068025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:21.068031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.068055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:21.068066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:21.068462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:21.068473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:21.068500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.068505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:21.068562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.068568Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:21.068577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:21.068581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.068586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:21.068590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.068594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:21.068597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:21.068607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:21.068611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:21.068615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T17:47:35.775425Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:47:35.775429Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:47:35.775437Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:47:35.775442Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T17:47:35.775444Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T17:47:35.775452Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:47:35.775454Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T17:47:35.775457Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T17:47:35.775461Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:47:35.775464Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T17:47:35.775466Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T17:47:35.775478Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T17:47:35.775482Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 5, subscribers: 1 2024-11-21T17:47:35.775486Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:47:35.775488Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T17:47:35.775491Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T17:47:35.775494Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2024-11-21T17:47:35.775496Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 3 2024-11-21T17:47:35.775831Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.775840Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.775843Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:35.775846Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:47:35.775848Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:47:35.775904Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.775909Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.775910Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:35.775912Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:47:35.775914Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:47:35.776006Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.776014Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.776016Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:35.776018Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T17:47:35.776019Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:47:35.776093Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.776098Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.776100Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:35.776102Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T17:47:35.776104Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:47:35.776402Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.776430Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.776434Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:35.776439Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T17:47:35.776443Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:47:35.776457Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T17:47:35.776462Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [61:365:2346] 2024-11-21T17:47:35.777044Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.777085Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.777133Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.777368Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.777383Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:35.777396Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:47:35.777404Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [61:366:2347] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:47:35.777498Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:35.777534Z node 61 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 41us result status StatusSuccess 2024-11-21T17:47:35.777602Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeKesus CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 KesusVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } Kesus { Name: "z" PathId: 6 KesusTabletId: 72075186233409546 Config { } Version: 2 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:35.777666Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:35.777684Z node 61 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 20us result status StatusPathDoesNotExist 2024-11-21T17:47:35.777700Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T17:47:33.958562Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790681996747388:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:33.958719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:33.968320Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790683350777595:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000be0/r3tmp/tmp4ZlP5P/pdisk_1.dat 2024-11-21T17:47:33.989608Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:33.990017Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:33.990966Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:34.015749Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6217, node 1 2024-11-21T17:47:34.027687Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000be0/r3tmp/yandexC1oLpu.tmp 2024-11-21T17:47:34.027700Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000be0/r3tmp/yandexC1oLpu.tmp 2024-11-21T17:47:34.027754Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000be0/r3tmp/yandexC1oLpu.tmp 2024-11-21T17:47:34.027833Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:34.031407Z INFO: TTestServer started on Port 16450 GrpcPort 6217 TClient is connected to server localhost:16450 PQClient connected to localhost:6217 === TenantModeEnabled() = 1 === Init PQ - start server on port 6217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:34.059000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:34.059027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:34.060621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:34.087982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:34.088005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:34.089437Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:47:34.089691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:34.096459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:47:34.096511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.096557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:47:34.096620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:34.096633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.097251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:34.097271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:34.097313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.097326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:34.097333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2024-11-21T17:47:34.097336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2024-11-21T17:47:34.097824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.097835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:34.097837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T17:47:34.097873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:47:34.097876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2024-11-21T17:47:34.097878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:47:34.098235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.098248Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.098252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:47:34.098258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:47:34.099041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:34.099549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2024-11-21T17:47:34.099599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:47:34.100355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211254144, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:34.100393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 7439790686291715221 RawX2: 4294969641 } } Step: 1732211254144 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:47:34.100406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:47:34.100478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2024-11-21T17:47:34.100489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:47:34.100518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:47:34.100531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:47:34.100988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:34.100997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:47:34.101038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:34.101046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439790686291715268:2381], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2024-11-21T17:47:34.101054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.101058Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2024-11-21T17:47:34.101074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2024-11-21T17:47:34.101083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:47:34.101087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2024-11-21T17:47:34.101090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:47:34.101093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2024-11-21T17:47:34.101094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2024-11-21T17:47:34.101105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:47:34.101108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2024-11-21T17:47:34.101118Z node 1 :FLAT_TX_SCHEMESHARD DEB ... node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:47:35.675147Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790692023271678:2335] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:47:35.675151Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:47:35.675303Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T17:47:35.675331Z node 3 :PERSQUEUE INFO: new Cookie 12345678|c3c55c2b-f4104830-5d1263db-40326c5f_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2024-11-21T17:47:35.675445Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|c3c55c2b-f4104830-5d1263db-40326c5f_0 2024-11-21T17:47:35.677347Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|c3c55c2b-f4104830-5d1263db-40326c5f_0 grpc read done: success: 0 data: 2024-11-21T17:47:35.677361Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|c3c55c2b-f4104830-5d1263db-40326c5f_0 grpc read failed 2024-11-21T17:47:35.677441Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|c3c55c2b-f4104830-5d1263db-40326c5f_0 2024-11-21T17:47:35.677453Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|c3c55c2b-f4104830-5d1263db-40326c5f_0 is DEAD 2024-11-21T17:47:35.677566Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison Finish: 0 === InitializeWritePQService done === PersQueueClient === InitializePQ completed BEFORE MODIFY PERMISSIONS 2024-11-21T17:47:35.686736Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_0@builtin \003\n\031\010\001\022\025\032\023test_user_1@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_1@builtin \003\n\031\010\001\022\025\032\023test_user_2@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_2@builtin \003" } } TxId: 281474976715665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:38756" , at schemeshard: 72057594046644480 2024-11-21T17:47:35.686789Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.686816Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2024-11-21T17:47:35.686822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2024-11-21T17:47:35.686858Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:35.686866Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.686882Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2024-11-21T17:47:35.686889Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2024-11-21T17:47:35.686901Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2024-11-21T17:47:35.686924Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2024-11-21T17:47:35.686932Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2024-11-21T17:47:35.686934Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2024-11-21T17:47:35.686937Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2024-11-21T17:47:35.686939Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2024-11-21T17:47:35.686941Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2024-11-21T17:47:35.687586Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:47:35.687625Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, add access: -():test_user_0@builtin:-, add access: -():test_user_1@builtin:-, add access: -():test_user_2@builtin:- 2024-11-21T17:47:35.687669Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:35.687676Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2024-11-21T17:47:35.687718Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:35.687727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7439790692023270834:2359], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 10 2024-11-21T17:47:35.687847Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2024-11-21T17:47:35.687861Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2024-11-21T17:47:35.687863Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2024-11-21T17:47:35.687867Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2024-11-21T17:47:35.687870Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2024-11-21T17:47:35.687888Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2024-11-21T17:47:35.688291Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2024-11-21T17:47:35.688483Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:47:35.688495Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2024-11-21T17:47:35.688587Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2024-11-21T17:47:35.688609Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:38746 2024-11-21T17:47:35.688618Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:38746 proto=v1 topic=/Root/acc/topic1 durationSec=0 2024-11-21T17:47:35.688622Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:47:35.688987Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2024-11-21T17:47:35.689033Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T17:47:35.689039Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:47:35.689041Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:47:35.689054Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790692023271708:2343] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:47:35.689058Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:47:35.689188Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T17:47:35.689229Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|83a6064f-93640caf-be75e967-7e021c61_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2024-11-21T17:47:35.689362Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|83a6064f-93640caf-be75e967-7e021c61_0 2024-11-21T17:47:35.689932Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|83a6064f-93640caf-be75e967-7e021c61_0 grpc read done: success: 1 data: update_token_request [content omitted] 2024-11-21T17:47:35.690019Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|83a6064f-93640caf-be75e967-7e021c61_0 grpc read done: success: 1 data: update_token_request [content omitted] 2024-11-21T17:47:35.690033Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|83a6064f-93640caf-be75e967-7e021c61_0 2024-11-21T17:47:35.690123Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|83a6064f-93640caf-be75e967-7e021c61_0 is DEAD 2024-11-21T17:47:35.690235Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison |77.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |77.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TSolomonReboots::AdoptDropSolomonWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:18.395214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:18.395239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:18.395245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:18.395250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:18.395255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:18.395259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:18.395267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:18.395349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:18.407066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:18.407095Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:18.409906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:18.410016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:18.410049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:18.417010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:18.417144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:18.417261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:18.417585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:18.418421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:18.418755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:18.418766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:18.418779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:18.418786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:18.418792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:18.418834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:18.420345Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:18.440404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:18.440507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.440574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:18.440636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:18.440644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.441553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:18.441583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:18.441662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.441674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:18.441679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:18.441684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:18.442215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.442234Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:18.442240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:18.442682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.442693Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.442699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:18.442707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:18.443304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:18.443807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:18.443860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:18.444054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:18.444078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:18.444085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:18.444150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:18.444157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:18.444186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:18.444199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:18.444761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:18.444774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:18.444821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:18.444826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:18.444907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.444913Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:18.444925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:18.444929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:18.444935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:18.444941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:18.444946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:18.444951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:18.444962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:18.444969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:18.444973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... rd: 72057594046678944 2024-11-21T17:47:36.283646Z node 73 :FLAT_TX_SCHEMESHARD INFO: TDropSolomon TPropose operationId#1004:0 HandleReply TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2024-11-21T17:47:36.283654Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 4] name: Solomon type: EPathTypeSolomonVolume state: EPathStateDrop stepDropped: 0 droppedTxId: 1004 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:36.283657Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:47:36.283683Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:47:36.283699Z node 73 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 130 2024-11-21T17:47:36.283718Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:47:36.283726Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:47:36.283945Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:47:36.284091Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T17:47:36.284198Z node 73 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:36.284204Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:36.284249Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:47:36.284275Z node 73 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:36.284280Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [73:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T17:47:36.284284Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [73:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T17:47:36.284335Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:47:36.284341Z node 73 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1004:0 ProgressState 2024-11-21T17:47:36.285969Z node 73 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:47:36.285989Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:47:36.285996Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T17:47:36.286001Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:47:36.286006Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:47:36.286010Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:47:36.286049Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:47:36.286054Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T17:47:36.286060Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:47:36.286064Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:47:36.286162Z node 73 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:47:36.286171Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:47:36.286174Z node 73 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:47:36.286177Z node 73 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:47:36.286180Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:47:36.286251Z node 73 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:47:36.286259Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:47:36.286263Z node 73 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:47:36.286266Z node 73 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:47:36.286270Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:47:36.286278Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T17:47:36.288911Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:47:36.288925Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:47:36.289348Z node 73 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409546 2024-11-21T17:47:36.289455Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:47:36.289507Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 Forgetting tablet 72075186233409546 2024-11-21T17:47:36.289787Z node 73 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409547 2024-11-21T17:47:36.289868Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:47:36.289902Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2024-11-21T17:47:36.290085Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:47:36.290111Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:47:36.290159Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:47:36.290165Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:47:36.290180Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:47:36.291945Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:47:36.291965Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2024-11-21T17:47:36.292800Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:47:36.292814Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409547 2024-11-21T17:47:36.292860Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:47:36.292914Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:47:36.292920Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:47:36.292974Z node 73 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:47:36.292989Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:47:36.292994Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [73:479:2452] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:47:36.293038Z node 73 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:47:36.293049Z node 73 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T17:47:36.293103Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:36.293135Z node 73 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/Solomon" took 40us result status StatusPathDoesNotExist 2024-11-21T17:47:36.293168Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpSinkTx::OlapDeferredEffects [GOOD] >> KqpSinkTx::LocksAbortOnCommit >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapNamedStatement >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDropSimultaneously [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:21.723381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:21.723401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:21.723405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:21.723410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:21.723415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:21.723419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:21.723427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:21.723489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:21.730708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:21.730727Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:21.732264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:21.732328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:21.732349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:21.734042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:21.734107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:21.734187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.734357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:21.734894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.735140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:21.735148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.735160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:21.735166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:21.735171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:21.735207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:21.736328Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:21.750160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:21.750243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.750302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:21.750359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:21.750367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.751025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.751046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:21.751097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.751110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:21.751114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:21.751119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:21.751445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.751454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:21.751458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:21.751708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.751716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.751721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.751728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.752286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:21.752671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:21.752713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:21.752908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.752928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:21.752937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.752995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:21.753002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.753029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:21.753042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:21.753403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:21.753414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:21.753449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.753456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:21.753528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.753535Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:21.753545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:21.753550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.753555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:21.753560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.753565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:21.753568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:21.753578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:21.753584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:21.753589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 44073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:36.880491Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:36.880494Z node 62 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:36.880498Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:47:36.880502Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 Leader for TabletID 72057594037968897 is [62:213:2213] sender: [62:343:2058] recipient: [62:15:2062] 2024-11-21T17:47:36.880642Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:36.880652Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:36.880656Z node 62 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:36.880659Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:47:36.880663Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:47:36.880672Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T17:47:36.880677Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [62:300:2292] 2024-11-21T17:47:36.880748Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:36.880785Z node 62 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T17:47:36.880818Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:36.880829Z node 62 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2024-11-21T17:47:36.880853Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:36.880902Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:47:36.880938Z node 62 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T17:47:36.880965Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:47:36.880987Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T17:47:36.881024Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:47:36.881044Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:47:36.881077Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:47:36.881083Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T17:47:36.881094Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T17:47:36.881100Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T17:47:36.881106Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:47:36.881111Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:47:36.881116Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:47:36.881120Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:47:36.881125Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:47:36.881129Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:47:36.881135Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:47:36.881139Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:47:36.881143Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:36.881694Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:36.881714Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:36.881730Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:36.881740Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:36.882295Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:36.882319Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:47:36.882325Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:301:2293] 2024-11-21T17:47:36.882350Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:47:36.882370Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:47:36.882387Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:47:36.882412Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 6 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted wait until 72075186233409554 is deleted wait until 72075186233409555 is deleted 2024-11-21T17:47:36.882529Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:47:36.882540Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:47:36.882549Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:47:36.882557Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T17:47:36.882566Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T17:47:36.882575Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T17:47:36.882582Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 2024-11-21T17:47:36.882591Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409553 2024-11-21T17:47:36.882603Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409554 2024-11-21T17:47:36.882613Z node 62 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409555 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409554 Deleted tabletId 72075186233409555 2024-11-21T17:47:36.882723Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:36.882764Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 46us result status StatusSuccess 2024-11-21T17:47:36.882849Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDrop [GOOD] >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase >> TxUsage::WriteToTopic_Demo_34 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup Test command err: Trying to start YDB, gRPC: 9219, MsgBus: 22149 2024-11-21T17:47:08.627128Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790575489802138:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:08.627313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001760/r3tmp/tmpq6RNFw/pdisk_1.dat 2024-11-21T17:47:08.670742Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9219, node 1 2024-11-21T17:47:08.682730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:08.682742Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:08.682743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:08.682772Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22149 TClient is connected to server localhost:22149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:08.726952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.727464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:08.727481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:47:08.728546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:08.737153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.752367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.770276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.780687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.901561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790575489803674:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:08.901608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:08.930311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.984903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.994313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.001173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.008306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.015444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.023637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790579784771486:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.023667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.023671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790579784771491:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.024290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.028409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790579784771493:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:09.300496Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229343, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 1335, MsgBus: 4771 2024-11-21T17:47:09.482218Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790581310676302:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:09.482369Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001760/r3tmp/tmp8NT4r7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1335, node 2 2024-11-21T17:47:09.496059Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:09.496632Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:09.496640Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:09.496641Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:09.496687Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4771 TClient is connected to server localhost:4771 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:09.582516Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:09.582551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:09.583563Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:09.584160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.592114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.600052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.613902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.624785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:09.729621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790581310677829:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.729645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.734241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.739047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.750136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.757007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.763940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.770854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.779669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790581310678330:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.779686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.779692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790581310678335:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.780317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.784487Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790581310678337:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. |77.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |77.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> TxUsage::WriteToTopic_Demo_35 >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError |77.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/ut/ydb-core-control-ut |77.7%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateIndexedTableAndForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:19.443427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:19.443450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:19.443455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:19.443460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:19.443466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:19.443470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:19.443481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:19.443557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:19.464512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:19.464541Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:19.468937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:19.469076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:19.469121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:19.479901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:19.479976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:19.480082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:19.480271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:19.483622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:19.483917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:19.483930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:19.483943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:19.483950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:19.483955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:19.483999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:19.485438Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:19.498103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:19.498165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.498219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:19.498262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:19.498267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.498835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:19.498853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:19.498895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.498901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:19.498904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:19.498907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:19.499195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.499209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:19.499214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:19.499449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.499455Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.499459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:19.499465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:19.499849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:19.500113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:19.500148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:19.500309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:19.500329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:19.500334Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:19.500378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:19.500382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:19.500402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:19.500412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:19.500699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:19.500706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:19.500739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:19.500742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:19.500802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.500809Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:19.500818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:19.500822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:19.500827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:19.500832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:19.500838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:19.500840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:19.500847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:19.500850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:19.500853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 4] was 4 Forgetting tablet 72075186233409548 2024-11-21T17:47:37.786847Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:37.786891Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:47:37.786941Z node 74 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409546 2024-11-21T17:47:37.786981Z node 74 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:47:37.787430Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:47:37.787472Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 Forgetting tablet 72075186233409546 2024-11-21T17:47:37.787916Z node 74 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:47:37.787928Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:47:37.787933Z node 74 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:47:37.787938Z node 74 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 18446744073709551615 2024-11-21T17:47:37.787942Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 1 2024-11-21T17:47:37.787958Z node 74 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 Forgetting tablet 72075186233409547 2024-11-21T17:47:37.788023Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:47:37.788050Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T17:47:37.788264Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:47:37.788272Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2024-11-21T17:47:37.788284Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T17:47:37.788289Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T17:47:37.788294Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:47:37.788298Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T17:47:37.788303Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:47:37.788307Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:47:37.788312Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:47:37.788318Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:47:37.788323Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:47:37.788327Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:47:37.788332Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:37.788357Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:47:37.788556Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:47:37.788676Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:47:37.788691Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:47:37.788701Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:47:37.789175Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:47:37.789190Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:47:37.789196Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2024-11-21T17:47:37.789221Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:47:37.789226Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2024-11-21T17:47:37.789235Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:47:37.789273Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:47:37.789279Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:47:37.789543Z node 74 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 6 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:47:37.789594Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:47:37.789599Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:47:37.789651Z node 74 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:47:37.789665Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:47:37.789669Z node 74 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [74:601:2556] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted wait until 72075186233409554 is deleted wait until 72075186233409555 is deleted 2024-11-21T17:47:37.789742Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:47:37.789753Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:47:37.789760Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:47:37.789785Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T17:47:37.789791Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T17:47:37.789800Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T17:47:37.789808Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 2024-11-21T17:47:37.789817Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409553 2024-11-21T17:47:37.789825Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409554 2024-11-21T17:47:37.789837Z node 74 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409555 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409554 Deleted tabletId 72075186233409555 2024-11-21T17:47:37.789933Z node 74 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:37.789960Z node 74 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 33us result status StatusSuccess 2024-11-21T17:47:37.790043Z node 74 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T17:47:35.411898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790692142695529:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:35.411963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:35.413695Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790693358372064:2187];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000bd1/r3tmp/tmp24pwUU/pdisk_1.dat 2024-11-21T17:47:35.445796Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:35.446530Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:35.451629Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:35.470220Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14677, node 1 2024-11-21T17:47:35.484929Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000bd1/r3tmp/yandexaNdnf2.tmp 2024-11-21T17:47:35.484944Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000bd1/r3tmp/yandexaNdnf2.tmp 2024-11-21T17:47:35.485011Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000bd1/r3tmp/yandexaNdnf2.tmp 2024-11-21T17:47:35.485069Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:35.489363Z INFO: TTestServer started on Port 3389 GrpcPort 14677 TClient is connected to server localhost:3389 PQClient connected to localhost:14677 === TenantModeEnabled() = 1 === Init PQ - start server on port 14677 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:47:35.510072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:35.510101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:47:35.511879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:35.530066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:47:35.530129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.530194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:47:35.530239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:35.530251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.531220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:35.531248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:35.531293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.531307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:35.531309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2024-11-21T17:47:35.531312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:47:35.531814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.531827Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:35.531830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:47:35.532103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:35.532110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2024-11-21T17:47:35.532113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:35.532199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.532202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.532205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:47:35.532209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2024-11-21T17:47:35.532964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:35.533370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2024-11-21T17:47:35.533409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:47:35.533987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211255579, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:35.534036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7439790692142695943 RawX2: 4294969657 } } Step: 1732211255579 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:47:35.534046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:47:35.534102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:47:35.534112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:47:35.534143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:47:35.534157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:47:35.534480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:35.534488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:47:35.534532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:35.534539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439790692142695970:2385], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2024-11-21T17:47:35.534546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.534550Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:47:35.534560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:47:35.534566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T17:47:35.534571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2024-11-21T17:47:35.534574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T17:47:35.534580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:47:35.534584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2024-11-21T17:47:35.534592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:47:35.534600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:47:35.534602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T17:47:35.535073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:47:35.535092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: ... :47:37.109473Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 ===Make write stream 2024-11-21T17:47:37.109731Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2024-11-21T17:47:37.109965Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:47:37.109974Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2024-11-21T17:47:37.110060Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2024-11-21T17:47:37.110075Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:59054 2024-11-21T17:47:37.110078Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:59054 proto=v1 topic=/Root/acc/topic1 durationSec=0 2024-11-21T17:47:37.110081Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:47:37.110254Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2024-11-21T17:47:37.110293Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T17:47:37.110299Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:47:37.110301Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:47:37.110312Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790700547087422:2343] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:47:37.110315Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:47:37.110405Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T17:47:37.110436Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|aa6f01ef-9be51bac-ec60abe9-8841fc9f_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2024-11-21T17:47:37.110540Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|aa6f01ef-9be51bac-ec60abe9-8841fc9f_0 ===Assert streaming op1 ===Assert streaming op2 2024-11-21T17:47:37.110782Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|aa6f01ef-9be51bac-ec60abe9-8841fc9f_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:47:37.110869Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T17:47:37.110918Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:47:37.111558Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse ===ModifyAcl BEFORE MODIFY PERMISSIONS 2024-11-21T17:47:37.112740Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976715666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:59056" , at schemeshard: 72057594046644480 2024-11-21T17:47:37.112787Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:37.112814Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2024-11-21T17:47:37.112821Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2024-11-21T17:47:37.112861Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:37.112869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:37.112885Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 1/1 2024-11-21T17:47:37.112892Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2024-11-21T17:47:37.112902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2024-11-21T17:47:37.112914Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715666, ready parts: 1/1, is published: false 2024-11-21T17:47:37.112924Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2024-11-21T17:47:37.112927Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2024-11-21T17:47:37.112930Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2024-11-21T17:47:37.112934Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715666, publications: 1, subscribers: 0 2024-11-21T17:47:37.112941Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715666, [OwnerId: 72057594046644480, LocalPathId: 10], 4 2024-11-21T17:47:37.113348Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715666, response: Status: StatusSuccess TxId: 281474976715666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:47:37.113383Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: -():test_user_0@builtin:- 2024-11-21T17:47:37.113421Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:37.113434Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715666, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2024-11-21T17:47:37.113466Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:37.113469Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7439790696252119264:2368], at schemeshard: 72057594046644480, txId: 281474976715666, path id: 10 2024-11-21T17:47:37.113574Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2024-11-21T17:47:37.113593Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2024-11-21T17:47:37.113596Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715666 2024-11-21T17:47:37.113600Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715666, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 4 2024-11-21T17:47:37.113608Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2024-11-21T17:47:37.113626Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 0 ===Wait for session created with token with removed ACE to die2024-11-21T17:47:37.113921Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715666 2024-11-21T17:47:37.929033Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439790700547087471:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:37.929142Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGQ5M2M4OTctNmRiOWRiN2MtNjE0MzRlOWEtYWMzMDA0ZWU=, ActorId: [3:7439790700547087469:2348], ActorState: ExecuteState, TraceId: 01jd7xccj1amp5bncsywcr14g3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:37.929294Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:47:38.111215Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:47:38.111618Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|aa6f01ef-9be51bac-ec60abe9-8841fc9f_0 describe result for acl check 2024-11-21T17:47:38.111655Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|aa6f01ef-9be51bac-ec60abe9-8841fc9f_0 2024-11-21T17:47:38.111878Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|aa6f01ef-9be51bac-ec60abe9-8841fc9f_0 is DEAD 2024-11-21T17:47:38.111952Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T17:47:36.775252Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790696191931517:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:36.775292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:36.776873Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790694544384583:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:36.777033Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:36.796534Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:36.798987Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b95/r3tmp/tmpATYJeI/pdisk_1.dat 2024-11-21T17:47:36.832952Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23498, node 1 2024-11-21T17:47:36.844045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000b95/r3tmp/yandexnO1hPS.tmp 2024-11-21T17:47:36.844060Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000b95/r3tmp/yandexnO1hPS.tmp 2024-11-21T17:47:36.844118Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000b95/r3tmp/yandexnO1hPS.tmp 2024-11-21T17:47:36.844183Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:36.847440Z INFO: TTestServer started on Port 2251 GrpcPort 23498 TClient is connected to server localhost:2251 PQClient connected to localhost:23498 === TenantModeEnabled() = 1 === Init PQ - start server on port 23498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:36.875095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:36.875139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:36.876727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:36.903135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:36.903160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:36.904512Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:47:36.904839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:36.914329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:47:36.914384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.914442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:47:36.914505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:36.914518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.915280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:36.915301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:36.915352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.915367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:36.915369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2024-11-21T17:47:36.915373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2024-11-21T17:47:36.915881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:47:36.915892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2024-11-21T17:47:36.915895Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:47:36.915914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.915919Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:36.915923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T17:47:36.916431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.916453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.916459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:47:36.916467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:47:36.917359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:36.920595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2024-11-21T17:47:36.920649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:47:36.921421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211256965, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:36.921478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 7439790696191931862 RawX2: 4294969650 } } Step: 1732211256965 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:47:36.921494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:47:36.921564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2024-11-21T17:47:36.921578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:47:36.921618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:47:36.921637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:47:36.922144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:36.922164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:47:36.922211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:36.922219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439790696191931891:2379], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2024-11-21T17:47:36.922228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.922233Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2024-11-21T17:47:36.922251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2024-11-21T17:47:36.922257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:47:36.922263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2024-11-21T17:47:36.922267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:47:36.922269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2024-11-21T17:47:36.922272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2024-11-21T17:47:36.922281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:47:36.922290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2024-11-21T17:47:36.922292Z node 1 :FLAT_TX_SCHEMESHARD D ... dSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:38.698627Z :INFO: [/Root] [/Root] [7514878f-33709cb3-89c85f03-8325a018] [null] Closing session to cluster: SessionClosed { Status: BAD_REQUEST Issues: "
: Error: no read rule provided for consumer 'non_existing' in topic '/Root/account1/write_topic' in current cluster '', code: 500003 " } 2024-11-21T17:47:38.698662Z :NOTICE: [/Root] [/Root] [7514878f-33709cb3-89c85f03-8325a018] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:47:38.698671Z :DEBUG: [/Root] [/Root] [7514878f-33709cb3-89c85f03-8325a018] [null] Abort session to cluster Got new read session event: SessionClosed { Status: BAD_REQUEST Issues: "
: Error: no read rule provided for consumer 'non_existing' in topic '/Root/account1/write_topic' in current cluster '', code: 500003 " } 2024-11-21T17:47:38.698683Z :INFO: [/Root] [/Root] [7514878f-33709cb3-89c85f03-8325a018] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:38.698691Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:47:38.698697Z :INFO: [/Root] [/Root] [7514878f-33709cb3-89c85f03-8325a018] Counters: { Errors: 1 CurrentSessionLifetimeMs: 1 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:38.698705Z :NOTICE: [/Root] [/Root] [7514878f-33709cb3-89c85f03-8325a018] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:47:38.698731Z :INFO: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] Starting read session 2024-11-21T17:47:38.698736Z :DEBUG: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] Starting session to cluster null (localhost:8574) 2024-11-21T17:47:38.698756Z :DEBUG: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:38.698758Z :DEBUG: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:38.698761Z :DEBUG: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] [null] Reconnecting session to cluster null in 0.000000s 2024-11-21T17:47:38.698951Z :DEBUG: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] [null] Successfully connected. Initializing session 2024-11-21T17:47:38.699123Z node 3 :PQ_READ_PROXY DEBUG: new grpc connection 2024-11-21T17:47:38.699134Z node 3 :PQ_READ_PROXY DEBUG: new session created cookie 2 2024-11-21T17:47:38.699228Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2024-11-21T17:47:38.699263Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 read init: from# ipv6:[::1]:43890, request# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2024-11-21T17:47:38.699308Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 auth for : consumer_aba 2024-11-21T17:47:38.699451Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 Handle describe topics response 2024-11-21T17:47:38.699470Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 auth is DEAD 2024-11-21T17:47:38.699473Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 auth ok: topics# 1, initDone# 0 2024-11-21T17:47:38.699704Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 register session: topic# /Root/account1/write_topic 2024-11-21T17:47:38.699803Z :INFO: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] [null] Server session id: consumer_aba_3_2_3017873766761110210_v1 2024-11-21T17:47:38.699871Z :DEBUG: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:38.699908Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7439790706891009143:2359] connected; active server actors: 1 2024-11-21T17:47:38.699985Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T17:47:38.700038Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7439790706891009143:2359] session consumer_aba_3_2_3017873766761110210_v1 2024-11-21T17:47:38.700054Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2024-11-21T17:47:38.700068Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2024-11-21T17:47:38.700081Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_3017873766761110210_v1" (Sender=[3:7439790706891009140:2359], Pipe=[3:7439790706891009143:2359], Partitions=[], ActiveFamilyCount=0) 2024-11-21T17:47:38.700084Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2024-11-21T17:47:38.700097Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2024-11-21T17:47:38.700105Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_3017873766761110210_v1" (Sender=[3:7439790706891009140:2359], Pipe=[3:7439790706891009143:2359], Partitions=[], ActiveFamilyCount=0) 2024-11-21T17:47:38.700126Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_3017873766761110210_v1" sender [3:7439790706891009140:2359] lock partition 0 for ReadingSession "consumer_aba_3_2_3017873766761110210_v1" (Sender=[3:7439790706891009140:2359], Pipe=[3:7439790706891009143:2359], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2024-11-21T17:47:38.700142Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2024-11-21T17:47:38.700150Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000049s 2024-11-21T17:47:38.700042Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 got read request: guid# 2d35e273-a4aab221-e3b08b37-7efb62de 2024-11-21T17:47:38.700393Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_3017873766761110210_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7439790706891009143 RawX2: 4503612512274743 } Path: "/Root/account1/write_topic" } 2024-11-21T17:47:38.700424Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2024-11-21T17:47:38.700486Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1 2024-11-21T17:47:38.700536Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: consumer_aba_3_2_3017873766761110210_v1:1 with generation 1 2024-11-21T17:47:38.702691Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1732211258599 CreateTimestampMS: 1732211258596 SizeLag: 165 WriteTimestampEstimateMS: 1732211258599 } Cookie: 18446744073709551615 } 2024-11-21T17:47:38.702717Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2024-11-21T17:47:38.702739Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2024-11-21T17:47:38.703049Z :INFO: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:38.703060Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2024-11-21T17:47:38.703068Z :INFO: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] Counters: { Errors: 0 CurrentSessionLifetimeMs: 4 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:38.703088Z :NOTICE: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:47:38.703096Z :DEBUG: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] [null] Abort session to cluster 2024-11-21T17:47:38.703196Z :NOTICE: [/Root] [/Root] [e3fcd996-715e1369-e1b5adf6-3357404] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:47:38.703347Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 grpc read done: success# 0, data# { } 2024-11-21T17:47:38.703357Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 grpc read failed 2024-11-21T17:47:38.703361Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 grpc closed 2024-11-21T17:47:38.703373Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3017873766761110210_v1 is DEAD 2024-11-21T17:47:38.703461Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer_aba_3_2_3017873766761110210_v1 2024-11-21T17:47:38.703649Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7439790706891009143:2359] disconnected; active server actors: 1 2024-11-21T17:47:38.703664Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] pipe [3:7439790706891009143:2359] client consumer_aba disconnected session consumer_aba_3_2_3017873766761110210_v1 >> TColumnShardTestSchema::ExternalTTL |77.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |77.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] >> KqpSinkLocks::EmptyRangeOlap >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T17:47:36.442972Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790695784219389:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:36.443061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ba3/r3tmp/tmpUcyZK2/pdisk_1.dat 2024-11-21T17:47:36.482500Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:36.489813Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:36.500558Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:47:36.509908Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20275, node 1 2024-11-21T17:47:36.531291Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000ba3/r3tmp/yandexSOlpQp.tmp 2024-11-21T17:47:36.531306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000ba3/r3tmp/yandexSOlpQp.tmp 2024-11-21T17:47:36.531372Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000ba3/r3tmp/yandexSOlpQp.tmp 2024-11-21T17:47:36.531421Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:36.535990Z INFO: TTestServer started on Port 17293 GrpcPort 20275 2024-11-21T17:47:36.543091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:36.543122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:36.544753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17293 PQClient connected to localhost:20275 === TenantModeEnabled() = 1 === Init PQ - start server on port 20275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:36.580818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:36.580842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:36.582311Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:47:36.582633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:36.591818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:47:36.591889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.591970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:47:36.592027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:36.592044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.593083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:36.593113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:36.593171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.593186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:36.593188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2024-11-21T17:47:36.593192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2024-11-21T17:47:36.593917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.593930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:36.593933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T17:47:36.594185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:47:36.594196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2024-11-21T17:47:36.594200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:47:36.594388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.594400Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.594405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:47:36.594420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:47:36.595201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:36.595631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2024-11-21T17:47:36.595672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:47:36.596215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211256643, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:36.596269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 7439790695784219811 RawX2: 4294969658 } } Step: 1732211256643 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:47:36.596280Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:47:36.596342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2024-11-21T17:47:36.596353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:47:36.596391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:47:36.596405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:47:36.596789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:36.596798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:47:36.596846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:36.596854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439790695784219829:2377], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2024-11-21T17:47:36.596862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.596872Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2024-11-21T17:47:36.596884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2024-11-21T17:47:36.596891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:47:36.596896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2024-11-21T17:47:36.596899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:47:36.596902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2024-11-21T17:47:36.596909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2024-11-21T17:47:36.596919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:47:36.596929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2024-11-21T17:47:36.596931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T17:47:36.597411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575 ... : [PQ: 72075186224037893] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:38.981455Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [3:7439790703917346443:2369] destroyed 2024-11-21T17:47:38.981467Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:47:38.982025Z :DEBUG: [] MessageGroupId [123] SessionId [] Write session: try to update token 2024-11-21T17:47:38.982200Z :INFO: [] MessageGroupId [123] SessionId [] Write session: Do CDS request 2024-11-21T17:47:38.982208Z :INFO: [] MessageGroupId [123] SessionId [] Start write session. Will connect to endpoint: localhost:28003 2024-11-21T17:47:38.983795Z :DEBUG: [] MessageGroupId [123] SessionId [] Write session: send init request: init_request { topic: "/Root/PQ/account/topic" message_group_id: "123" } 2024-11-21T17:47:38.983929Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:47:38.983946Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 4 2024-11-21T17:47:38.984043Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/PQ/account/topic" message_group_id: "123" } 2024-11-21T17:47:38.984061Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 4 topic: "/Root/PQ/account/topic" message_group_id: "123" from ipv6:[::1]:38022 2024-11-21T17:47:38.984066Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=4 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:38022 proto=v1 topic=/Root/PQ/account/topic durationSec=0 2024-11-21T17:47:38.984070Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:47:38.984314Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: describe result for acl check 2024-11-21T17:47:38.984349Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T17:47:38.984351Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:47:38.984353Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:47:38.984364Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790703917346462:2375] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:47:38.984368Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 4 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:47:38.984473Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037893, NodeId 3, Generation: 1 2024-11-21T17:47:38.984480Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:38.984487Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server connected, pipe [3:7439790703917346465:2375], now have 1 active actors on pipe 2024-11-21T17:47:38.984494Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:47:38.984499Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2024-11-21T17:47:38.984524Z node 3 :PERSQUEUE INFO: new Cookie 123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0 generated for partition 0 topic 'PQ/account/topic' owner 123 2024-11-21T17:47:38.984550Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:47:38.984566Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:47:38.984591Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:47:38.984593Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2024-11-21T17:47:38.984605Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:47:38.984618Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: 123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0 2024-11-21T17:47:38.984851Z :INFO: [] MessageGroupId [123] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211258984 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:38.984888Z :INFO: [] MessageGroupId [123] SessionId [] Write session established. Init response: last_sequence_number: 2 session_id: "123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0" topic: "PQ/account/topic" 2024-11-21T17:47:38.985021Z :DEBUG: [] MessageGroupId [123] SessionId [123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0] Write 1 messages with Id from 1 to 1 2024-11-21T17:47:38.985042Z :DEBUG: [] MessageGroupId [123] SessionId [123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0] Write session: try to update token 2024-11-21T17:47:38.985052Z :DEBUG: [] MessageGroupId [123] SessionId [123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0] Send 1 message(s) (0 left), first sequence number is 3 2024-11-21T17:47:38.985110Z :INFO: [] MessageGroupId [123] SessionId [123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0] Write session: close. Timeout = 10000 ms 2024-11-21T17:47:38.985228Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:47:38.985312Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T17:47:38.985443Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:47:38.985453Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2024-11-21T17:47:38.985482Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T17:47:38.985506Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:47:38.985696Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:47:38.985700Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2024-11-21T17:47:38.985714Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message topic: PQ/account/topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 372 offset: -1 2024-11-21T17:47:38.985735Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account/topic". Partition: 0. Amount: 376. Cookie: 3 2024-11-21T17:47:38.985751Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account/topic". Partition: 0: Cookie: 3 2024-11-21T17:47:38.985798Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2024-11-21T17:47:38.985837Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 443 count 1 nextOffset 3 batches 1 2024-11-21T17:47:38.985889Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account/topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 431 WTime 1732211258985 2024-11-21T17:47:38.985911Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:47:38.987329Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 376 2024-11-21T17:47:38.987343Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:47:38.987368Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Answering for message sourceid: '\000123', Topic: 'PQ/account/topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T17:47:38.987398Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T17:47:38.987414Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:47:38.987736Z :DEBUG: [] MessageGroupId [123] SessionId [123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 1 } 2024-11-21T17:47:38.987747Z :DEBUG: [] MessageGroupId [123] SessionId [123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0] Write session: acknoledged message 1 2024-11-21T17:47:39.085486Z :INFO: [] MessageGroupId [123] SessionId [123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0] Write session will now close 2024-11-21T17:47:39.085513Z :DEBUG: [] MessageGroupId [123] SessionId [123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0] Write session: aborting 2024-11-21T17:47:39.085936Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0 grpc read done: success: 0 data: 2024-11-21T17:47:39.085948Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0 grpc read failed 2024-11-21T17:47:39.085959Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0 grpc closed 2024-11-21T17:47:39.085962Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0 is DEAD 2024-11-21T17:47:39.086221Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:47:39.086384Z :INFO: [] MessageGroupId [123] SessionId [123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:47:39.086409Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:39.086430Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [3:7439790703917346465:2375] destroyed 2024-11-21T17:47:39.086458Z :DEBUG: [] MessageGroupId [123] SessionId [123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0] Write session is aborting and will not restart 2024-11-21T17:47:39.086456Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:47:39.086533Z :DEBUG: [] MessageGroupId [123] SessionId [123|7ecec2b7-18fa84b6-fb831f69-62a1fa28_0] Write session: destroy >> TxUsage::WriteToTopic_Demo_26 [GOOD] >> TxUsage::WriteToTopic_Demo_10 [GOOD] >> TColumnShardTestSchema::RebootColdTiers >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInTable [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo [GOOD] >> TxUsage::WriteToTopic_Demo_27 >> IntermediateDirsReboots::CreateSubDomainWithIntermediateDirs [GOOD] >> TxUsage::WriteToTopic_Demo_11 >> TColumnShardTestSchema::CreateTable >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateSubDomainWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:19.352508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:19.352533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:19.352538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:19.352543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:19.352549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:19.352553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:19.352563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:19.352641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:19.364153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:19.364179Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:19.366637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:19.366737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:19.366769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:19.376835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:19.376964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:19.377086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:19.377712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:19.378848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:19.379211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:19.379225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:19.379238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:19.379245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:19.379251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:19.379298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:19.380564Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:19.400477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:19.400614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.400692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:19.400757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:19.400766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.403636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:19.403675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:19.403756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.403769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:19.403773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:19.403780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:19.404361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.404375Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:19.404382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:19.404958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.404971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.404988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:19.404997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:19.405617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:19.406061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:19.406112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:19.406317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:19.406342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:19.406350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:19.406416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:19.406423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:19.406457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:19.406470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:19.406830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:19.406840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:19.406887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:19.406893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:19.406976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:19.406983Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:19.406994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:19.406998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:19.407004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:19.407009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:19.407014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:19.407018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:19.407030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:19.407035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:19.407039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... r pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:47:40.062900Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T17:47:40.062902Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T17:47:40.062904Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:47:40.062906Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T17:47:40.062908Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T17:47:40.062910Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:47:40.062912Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T17:47:40.062914Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T17:47:40.062924Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 5 2024-11-21T17:47:40.062927Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 5, subscribers: 0 2024-11-21T17:47:40.062929Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:47:40.062931Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T17:47:40.062933Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T17:47:40.062935Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2024-11-21T17:47:40.062936Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 3 2024-11-21T17:47:40.063264Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.063280Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.063284Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:40.063289Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:47:40.063295Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:47:40.063416Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.063425Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.063428Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:40.063432Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:47:40.063435Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:47:40.063787Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.063799Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.063802Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:40.063806Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T17:47:40.063810Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:47:40.063871Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.063878Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.063881Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:40.063884Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T17:47:40.063888Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:47:40.063940Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.063947Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.063951Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:40.063954Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T17:47:40.063958Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T17:47:40.063963Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T17:47:40.064484Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.064505Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.064514Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.064561Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.064817Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:47:40.064875Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:47:40.064882Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:47:40.064956Z node 83 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:47:40.064972Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:47:40.064977Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [83:448:2417] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:47:40.065036Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:40.065081Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 53us result status StatusSuccess 2024-11-21T17:47:40.065156Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 6 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 6 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:40.065215Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:40.065238Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 24us result status StatusPathDoesNotExist 2024-11-21T17:47:40.065254Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |77.8%| [TA] $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirsAndRejectInTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:18.307745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:18.307770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:18.307775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:18.307779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:18.307784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:18.307787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:18.307798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:18.307870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:18.322050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:18.322073Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:18.324317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:18.324426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:18.324459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:18.326848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:18.326923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:18.327028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:18.327204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:18.327674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:18.327914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:18.327922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:18.327931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:18.327937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:18.327941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:18.327973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:18.328969Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:18.342316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:18.342409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.342471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:18.342527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:18.342534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.343392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:18.343414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:18.343462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.343469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:18.343472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:18.343475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:18.343815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.343825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:18.343830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:18.344105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.344114Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.344119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:18.344127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:18.344605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:18.344936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:18.344987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:18.345172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:18.345198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:18.345205Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:18.345264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:18.345270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:18.345299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:18.345310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:18.345659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:18.345669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:18.345708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:18.345713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:18.345782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:18.345788Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:18.345799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:18.345803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:18.345808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:18.345814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:18.345819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:18.345822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:18.345832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:18.345837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:18.345841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:47:40.088474Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:4, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:40.088478Z node 88 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:4, at schemeshard: 72057594046678944 2024-11-21T17:47:40.088482Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:4, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:47:40.088487Z node 88 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:4 129 -> 240 2024-11-21T17:47:40.088600Z node 88 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.088607Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.088609Z node 88 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:40.088611Z node 88 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 5 2024-11-21T17:47:40.088613Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2024-11-21T17:47:40.088922Z node 88 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 10 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.088941Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 10 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.088945Z node 88 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:40.088949Z node 88 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 5 2024-11-21T17:47:40.088954Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2024-11-21T17:47:40.089024Z node 88 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 11 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.089032Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 11 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.089035Z node 88 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:40.089038Z node 88 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 11], version: 3 2024-11-21T17:47:40.089042Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 4 2024-11-21T17:47:40.089048Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/5, is published: true 2024-11-21T17:47:40.089730Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.089755Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:4, at schemeshard: 72057594046678944 2024-11-21T17:47:40.089765Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.089773Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.089793Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:4, at schemeshard: 72057594046678944 2024-11-21T17:47:40.089838Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.089859Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:4, at schemeshard: 72057594046678944 2024-11-21T17:47:40.089863Z node 88 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:4 ProgressState 2024-11-21T17:47:40.089869Z node 88 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:4 progress is 5/5 2024-11-21T17:47:40.089872Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2024-11-21T17:47:40.089875Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 5/5, is published: true 2024-11-21T17:47:40.089878Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 5/5 2024-11-21T17:47:40.089883Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:47:40.089885Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:47:40.089892Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T17:47:40.089898Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T17:47:40.089900Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T17:47:40.089903Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2024-11-21T17:47:40.089905Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T17:47:40.089906Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T17:47:40.089909Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2024-11-21T17:47:40.089911Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T17:47:40.089912Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T17:47:40.089915Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2024-11-21T17:47:40.089917Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:4 2024-11-21T17:47:40.089918Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:4 2024-11-21T17:47:40.089928Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2024-11-21T17:47:40.089970Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:40.089985Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:47:40.090434Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:47:40.090442Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:47:40.090507Z node 88 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:47:40.090524Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:47:40.090529Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [88:418:2393] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:47:40.090585Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:40.090619Z node 88 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 43us result status StatusSuccess 2024-11-21T17:47:40.090696Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 9 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "table_name" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:40.090744Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:40.090762Z node 88 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 19us result status StatusPathDoesNotExist 2024-11-21T17:47:40.090778Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TTicketParserTest::LoginBad >> Describe::Basic [GOOD] >> Describe::Statistics >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T17:47:38.242847Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790706093762933:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:38.242907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:38.243858Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790703851827943:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:38.243869Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b73/r3tmp/tmpjUpCAv/pdisk_1.dat 2024-11-21T17:47:38.273569Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:38.274995Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:38.314415Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12978, node 1 2024-11-21T17:47:38.343919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:38.343946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:38.348917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:38.364430Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000b73/r3tmp/yandexPgZl3o.tmp 2024-11-21T17:47:38.364445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000b73/r3tmp/yandexPgZl3o.tmp 2024-11-21T17:47:38.364526Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000b73/r3tmp/yandexPgZl3o.tmp 2024-11-21T17:47:38.364568Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:38.368355Z INFO: TTestServer started on Port 9979 GrpcPort 12978 2024-11-21T17:47:38.371491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:38.371512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:38.377163Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:47:38.377551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9979 PQClient connected to localhost:12978 === TenantModeEnabled() = 1 === Init PQ - start server on port 12978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:38.403359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:47:38.403413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.403473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:47:38.403538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:38.403551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.404323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:38.404348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:38.404400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.404415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:38.404418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2024-11-21T17:47:38.404422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:47:38.404939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.404950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:38.404954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:47:38.405323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.405334Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.405338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:47:38.405342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 waiting... 2024-11-21T17:47:38.406108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:38.406171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:38.406182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2024-11-21T17:47:38.406185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:38.406505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2024-11-21T17:47:38.406538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:47:38.407168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211258456, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:38.407202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 7439790706093763339 RawX2: 4294969648 } } Step: 1732211258456 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:47:38.407211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:47:38.407266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:47:38.407276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:47:38.407304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:47:38.407318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:47:38.407677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:38.407686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:47:38.407724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:38.407733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439790706093763360:2370], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2024-11-21T17:47:38.407741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.407746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:47:38.407759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:47:38.407761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T17:47:38.407765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2024-11-21T17:47:38.407769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T17:47:38.407772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:47:38.407775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T17:47:38.407800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:47:38.407810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:47:38.407812Z node 1 :FLAT_TX_SCHEMESHARD D ... MESHARD DEBUG: Publication details: tx: 281474976720664, [OwnerId: 72057594046644480, LocalPathId: 11], 5 2024-11-21T17:47:39.973919Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720664, [OwnerId: 72057594046644480, LocalPathId: 12], 2 2024-11-21T17:47:39.974092Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976720664 2024-11-21T17:47:39.974103Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976720664 2024-11-21T17:47:39.974105Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976720664 2024-11-21T17:47:39.974108Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720664, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2024-11-21T17:47:39.974110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T17:47:39.974141Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976720664 2024-11-21T17:47:39.974144Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976720664 2024-11-21T17:47:39.974147Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976720664 2024-11-21T17:47:39.974149Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720664, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2024-11-21T17:47:39.974150Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 2 2024-11-21T17:47:39.974162Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976720664 2024-11-21T17:47:39.974165Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976720664 2024-11-21T17:47:39.974166Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976720664 2024-11-21T17:47:39.974166Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720664, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 2 2024-11-21T17:47:39.974167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 4 2024-11-21T17:47:39.974171Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720664, subscribers: 1 2024-11-21T17:47:39.974173Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7439790711030399958:2327] 2024-11-21T17:47:39.974643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720664 2024-11-21T17:47:39.974649Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720664 2024-11-21T17:47:39.974656Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720664 === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2024-11-21T17:47:40.137543Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:47:40.137573Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T17:47:40.137769Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2024-11-21T17:47:40.137813Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:48160 2024-11-21T17:47:40.137826Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:48160 proto=v1 topic=Root/acc/topic1 durationSec=0 2024-11-21T17:47:40.137830Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:47:40.138409Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T17:47:40.138450Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T17:47:40.138457Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:47:40.138460Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:47:40.138473Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790715325367414:2334] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:47:40.138477Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:47:40.138677Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T17:47:40.138711Z node 3 :PERSQUEUE INFO: new Cookie 12345678|49e2ec98-ef948abe-d5c70e7c-e49c0d52_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2024-11-21T17:47:40.138876Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|49e2ec98-ef948abe-d5c70e7c-e49c0d52_0 Finish: 0 === InitializeWritePQService done === PersQueueClient 2024-11-21T17:47:40.141128Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|49e2ec98-ef948abe-d5c70e7c-e49c0d52_0 grpc read done: success: 0 data: 2024-11-21T17:47:40.141138Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|49e2ec98-ef948abe-d5c70e7c-e49c0d52_0 grpc read failed 2024-11-21T17:47:40.141255Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|49e2ec98-ef948abe-d5c70e7c-e49c0d52_0 2024-11-21T17:47:40.141260Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|49e2ec98-ef948abe-d5c70e7c-e49c0d52_0 is DEAD 2024-11-21T17:47:40.141349Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed 2024-11-21T17:47:40.145521Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:47:40.145535Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2024-11-21T17:47:40.145660Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2024-11-21T17:47:40.145682Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:48160 2024-11-21T17:47:40.145687Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:48160 proto=v1 topic=topic1 durationSec=0 2024-11-21T17:47:40.145691Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:47:40.146032Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2024-11-21T17:47:40.146080Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T17:47:40.146082Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:47:40.146084Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:47:40.146098Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790715325367427:2340] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:47:40.146102Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:47:40.146369Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T17:47:40.146508Z node 3 :PERSQUEUE INFO: new Cookie 12345678|ca66160e-d3703961-a2d246fe-66f1e764_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2024-11-21T17:47:40.146667Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|ca66160e-d3703961-a2d246fe-66f1e764_0 2024-11-21T17:47:40.147206Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 12345678|ca66160e-d3703961-a2d246fe-66f1e764_0 grpc read done: success: 0 data: 2024-11-21T17:47:40.147217Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|ca66160e-d3703961-a2d246fe-66f1e764_0 grpc read failed 2024-11-21T17:47:40.147222Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|ca66160e-d3703961-a2d246fe-66f1e764_0 grpc closed 2024-11-21T17:47:40.147225Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|ca66160e-d3703961-a2d246fe-66f1e764_0 is DEAD 2024-11-21T17:47:40.147498Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison |77.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T17:47:38.789562Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790705397036184:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:38.789746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:38.792602Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790703026510184:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:38.816900Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b63/r3tmp/tmpP39OX2/pdisk_1.dat 2024-11-21T17:47:38.824453Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:38.825586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:38.848640Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1599, node 1 2024-11-21T17:47:38.867675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000b63/r3tmp/yandexmadzoJ.tmp 2024-11-21T17:47:38.867693Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000b63/r3tmp/yandexmadzoJ.tmp 2024-11-21T17:47:38.867758Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000b63/r3tmp/yandexmadzoJ.tmp 2024-11-21T17:47:38.867819Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:38.869202Z INFO: TTestServer started on Port 26569 GrpcPort 1599 TClient is connected to server localhost:26569 PQClient connected to localhost:1599 === TenantModeEnabled() = 1 === Init PQ - start server on port 1599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:38.892553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:38.892582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:38.896390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:38.918978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:38.919008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:38.920252Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:47:38.920748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:38.928837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:47:38.928909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.928994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:47:38.929045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:38.929062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.930033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:38.930059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:38.930106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.930115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:38.930117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2024-11-21T17:47:38.930121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 2024-11-21T17:47:38.930816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.930824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:38.930827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T17:47:38.931170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.931176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.931180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:47:38.931185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:47:38.931890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 waiting... 2024-11-21T17:47:38.932320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2024-11-21T17:47:38.932359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:47:38.932944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:47:38.932950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2024-11-21T17:47:38.932954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:47:38.933234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211258981, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:38.933275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 7439790705397036732 RawX2: 4294969648 } } Step: 1732211258981 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:47:38.933281Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:47:38.933395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2024-11-21T17:47:38.933402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:47:38.933438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:47:38.933449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:47:38.933894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:38.933906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:47:38.933955Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:38.933963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439790705397036764:2377], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2024-11-21T17:47:38.933971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.933980Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2024-11-21T17:47:38.933997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2024-11-21T17:47:38.934004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:47:38.934008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2024-11-21T17:47:38.934016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:47:38.934019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2024-11-21T17:47:38.934025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2024-11-21T17:47:38.934037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:47:38.934048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2024-11-21T17:47:38.934055Z node 1 :FLAT_TX_SCHEMESHARD DEB ... RT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:47:40.580075Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:47:40.580089Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790712801261967:2335] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:47:40.580099Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:47:40.580265Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T17:47:40.580323Z node 3 :PERSQUEUE INFO: new Cookie 12345678|d81736c8-1f25db93-eff89c8b-16782e4c_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2024-11-21T17:47:40.580486Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|d81736c8-1f25db93-eff89c8b-16782e4c_0 2024-11-21T17:47:40.580961Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|d81736c8-1f25db93-eff89c8b-16782e4c_0 grpc read done: success: 0 data: 2024-11-21T17:47:40.580980Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|d81736c8-1f25db93-eff89c8b-16782e4c_0 grpc read failed 2024-11-21T17:47:40.581105Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|d81736c8-1f25db93-eff89c8b-16782e4c_0 2024-11-21T17:47:40.581117Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|d81736c8-1f25db93-eff89c8b-16782e4c_0 is DEAD Finish: 0 === InitializeWritePQService done === PersQueueClient 2024-11-21T17:47:40.581260Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed BEFORE MODIFY PERMISSIONS 2024-11-21T17:47:40.585502Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\027\010\001\022\023\032\021test_user@builtin\n\037\010\000\022\033\010\001\020\366\213\001\032\021test_user@builtin \003" } } TxId: 281474976715665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:38064" , at schemeshard: 72057594046644480 2024-11-21T17:47:40.585550Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:40.585572Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2024-11-21T17:47:40.585573Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2024-11-21T17:47:40.585613Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:40.585621Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:40.585639Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2024-11-21T17:47:40.585645Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2024-11-21T17:47:40.585657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2024-11-21T17:47:40.585672Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2024-11-21T17:47:40.585681Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2024-11-21T17:47:40.585683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2024-11-21T17:47:40.585685Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2024-11-21T17:47:40.585688Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2024-11-21T17:47:40.585690Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2024-11-21T17:47:40.586511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:47:40.586542Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, add access: -():test_user@builtin:- 2024-11-21T17:47:40.586574Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:40.586580Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2024-11-21T17:47:40.586614Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:40.586621Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7439790708506293848:2371], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 10 2024-11-21T17:47:40.586867Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2024-11-21T17:47:40.586890Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2024-11-21T17:47:40.586894Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2024-11-21T17:47:40.586898Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2024-11-21T17:47:40.586902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2024-11-21T17:47:40.586925Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2024-11-21T17:47:40.587291Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2024-11-21T17:47:40.587423Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:47:40.587432Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2024-11-21T17:47:40.587545Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2024-11-21T17:47:40.587568Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:38050 2024-11-21T17:47:40.587578Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:38050 proto=v1 topic=/Root/acc/topic1 durationSec=0 2024-11-21T17:47:40.587582Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:47:40.587796Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2024-11-21T17:47:40.587837Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T17:47:40.587843Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:47:40.587844Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:47:40.587857Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790712801261997:2343] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:47:40.587860Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:47:40.588069Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2024-11-21T17:47:40.588117Z node 3 :PERSQUEUE INFO: new Cookie test-message-group|35b1aceb-1af9bbaa-4b9cfabf-31e2d886_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2024-11-21T17:47:40.588282Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|35b1aceb-1af9bbaa-4b9cfabf-31e2d886_0 2024-11-21T17:47:40.588649Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-message-group|35b1aceb-1af9bbaa-4b9cfabf-31e2d886_0 grpc read done: success: 1 data: update_token_request [content omitted] 2024-11-21T17:47:40.588755Z node 3 :PQ_WRITE_PROXY INFO: updating token 2024-11-21T17:47:40.588770Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:47:40.588900Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|35b1aceb-1af9bbaa-4b9cfabf-31e2d886_0 describe result for acl check 2024-11-21T17:47:40.588924Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|35b1aceb-1af9bbaa-4b9cfabf-31e2d886_0 2024-11-21T17:47:40.589042Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|35b1aceb-1af9bbaa-4b9cfabf-31e2d886_0 is DEAD 2024-11-21T17:47:40.589127Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> TColumnShardTestSchema::CreateTable [GOOD] >> TTicketParserTest::LoginBad [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions >> IntermediateDirsReboots::CreateTableWithIntermediateDirs [GOOD] >> TxUsage::Offsets_Cannot_Be_Promoted_When_Reading_In_A_Transaction [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable [GOOD] Test command err: 2024-11-21T17:47:40.579052Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:47:40.595812Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:47:40.598475Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:47:40.598510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:47:40.598565Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:47:40.599317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:47:40.599361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:47:40.599397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:47:40.599418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:47:40.599433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:47:40.599449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:47:40.599463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:47:40.599501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:47:40.599521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:47:40.599538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:47:40.599554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:47:40.599570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:47:40.604030Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:47:40.605344Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:47:40.605432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:47:40.605460Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:47:40.605505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:47:40.605549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:47:40.605575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:47:40.605580Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:47:40.605590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:47:40.605600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:47:40.605607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:47:40.605611Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:47:40.605646Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:47:40.605654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:47:40.605662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:47:40.605666Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:47:40.605676Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:47:40.605692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:47:40.605700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:47:40.605704Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:47:40.605717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:47:40.605723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:47:40.605727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:47:40.605735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:47:40.605743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:47:40.605749Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:47:40.605793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2024-11-21T17:47:40.605803Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2024-11-21T17:47:40.605812Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T17:47:40.605822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2024-11-21T17:47:40.605845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:47:40.605853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:47:40.605857Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:47:40.605883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:47:40.605890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:47:40.605895Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:47:40.605909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:47:40.605916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:47:40.605920Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:47:40.605941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:47:40.605949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:47:40.605953Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:47:40.605967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normali ... KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=22290872206960;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2024-11-21T17:47:41.598206Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;this=22290826433856;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=22290872206960;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=tx_controller.cpp:371;event=start;tx_info=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;; 2024-11-21T17:47:41.598219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;this=22290826433856;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=22290872206960;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2024-11-21T17:47:41.598232Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;this=22290826433856;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=22290872206960;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=118; 2024-11-21T17:47:41.598355Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1018 at tablet 9437184, mediator 0 2024-11-21T17:47:41.598378Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] execute at tablet 9437184 2024-11-21T17:47:41.598426Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T17:47:41.598508Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 19 ttl settings: { Version: 1 } at tablet 9437184 2024-11-21T17:47:41.598531Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=tables_manager.cpp:259;method=RegisterTable;path_id=19; 2024-11-21T17:47:41.598537Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine.h:339;event=RegisterTable;path_id=19; 2024-11-21T17:47:41.598661Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T17:47:41.609804Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] complete at tablet 9437184 2024-11-21T17:47:41.609892Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 20 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2024-11-21T17:47:41.610250Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=22290826434496;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=tx_controller.cpp:311;event=start; 2024-11-21T17:47:41.610292Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=22290826434496;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=tx_controller.cpp:342;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2024-11-21T17:47:41.621744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;this=22290826434496;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2024-11-21T17:47:41.621770Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;this=22290826434496;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=119; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2024-11-21T17:47:41.622110Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=22290826434496;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=tx_controller.cpp:311;event=start; 2024-11-21T17:47:41.622155Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=22290826434496;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=tx_controller.cpp:342;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2024-11-21T17:47:41.633232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;this=22290826434496;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2024-11-21T17:47:41.633262Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;this=22290826434496;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2024-11-21T17:47:41.633599Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=22290826434496;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=tx_controller.cpp:311;event=start; 2024-11-21T17:47:41.633678Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=22290826434496;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=tx_controller.cpp:342;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2024-11-21T17:47:41.645443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;this=22290826434496;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2024-11-21T17:47:41.645471Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;this=22290826434496;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount >> TxUsage::ReadRuleGeneration >> TRestoreTests::ShouldFailOnInvalidValue[Raw] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TRestoreWithRebootsTests::ShouldSucceedOnLargeData[Raw] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 |77.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> IntermediateDirsReboots::CreateTableWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:20.949957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:20.949980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:20.949985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:20.949990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:20.949996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:20.950000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:20.950009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:20.950120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:20.959927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:20.959947Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:20.962232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:20.962330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:20.962361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:20.965108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:20.965179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:20.965282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:20.965474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:20.966193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:20.966451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:20.966462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:20.966475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:20.966482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:20.966487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:20.966525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:20.967925Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:20.979497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:20.979561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.979610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:20.979660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:20.979666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.980203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:20.980219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:20.981544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.981575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:20.981580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:20.981585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:20.982232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.982251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:20.982257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:20.982714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.982726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.982732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:20.982740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:20.983403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:20.983762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:20.983811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:20.983962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:20.983982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:20.983987Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:20.984041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:20.984048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:20.984075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:20.984085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:20.984487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:20.984500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:20.984548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:20.984553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:20.984640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.984646Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:20.984657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:20.984662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:20.984668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:20.984674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:20.984678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:20.984682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:20.984694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:20.984700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:20.984704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 8944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:41.926187Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:41.926190Z node 84 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:41.926193Z node 84 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T17:47:41.926196Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:47:41.926312Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 360777255187 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:47:41.926316Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 3 2024-11-21T17:47:41.926325Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:3, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 360777255187 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:47:41.926329Z node 84 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:3 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:47:41.926335Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:3 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 335 RawX2: 360777255187 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:47:41.926343Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:3, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:41.926346Z node 84 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T17:47:41.926348Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:3, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:47:41.926352Z node 84 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:3 129 -> 240 2024-11-21T17:47:41.926434Z node 84 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:41.926443Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:41.926446Z node 84 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:41.926450Z node 84 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T17:47:41.926454Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T17:47:41.926463Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: true 2024-11-21T17:47:41.927269Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:41.927292Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:41.927306Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T17:47:41.927321Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:41.927330Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:41.927338Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T17:47:41.927393Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2024-11-21T17:47:41.927399Z node 84 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:3 ProgressState 2024-11-21T17:47:41.927408Z node 84 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:3 progress is 4/4 2024-11-21T17:47:41.927412Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T17:47:41.927417Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2024-11-21T17:47:41.927421Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T17:47:41.927427Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:47:41.927434Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:47:41.927442Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:47:41.927445Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T17:47:41.927448Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T17:47:41.927453Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:47:41.927456Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T17:47:41.927459Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T17:47:41.927463Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:47:41.927466Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T17:47:41.927468Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T17:47:41.927479Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:47:41.927534Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:47:41.927926Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:47:41.927935Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:47:41.927979Z node 84 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:47:41.927996Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:47:41.928001Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [84:416:2391] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:47:41.928062Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:41.928110Z node 84 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 57us result status StatusSuccess 2024-11-21T17:47:41.928196Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "z" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:41.928278Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:41.928298Z node 84 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 22us result status StatusPathDoesNotExist 2024-11-21T17:47:41.928315Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> DstCreator::ExistingDst >> DstCreator::WithSyncIndex >> TTicketParserTest::BulkAuthorizationWithUserAccount2 |77.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |77.8%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |77.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache >> DstCreator::SamePartitionCount >> TColumnShardTestSchema::ExternalTTL [GOOD] >> TRestoreTests::ShouldFailOnInvalidValue[Raw] [GOOD] >> DstCreator::WithSyncIndex [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] >> KqpSinkTx::LocksAbortOnCommit [GOOD] >> DstCreator::SamePartitionCount [GOOD] >> DstCreator::ExistingDst [GOOD] >> ControlImplementationTests::TestParallelRegisterSharedControl [GOOD] >> TRestoreTests::ShouldFailOnInvalidValue[Zstd] |77.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> DstCreator::UnsupportedReplicationConsistency >> DstCreator::EmptyReplicationConfig >> DstCreator::UnsupportedReplicationConsistency [GOOD] >> TConsistentOpsWithReboots::CreateNotNullColumnTableWithReboots [GOOD] >> KqpSinkMvcc::OlapNamedStatement [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable >> TxUsage::WriteToTopic_Demo_22_RestartNo [GOOD] >> ControlImplementationTests::TestControlWrapperAsI64 [GOOD] >> TxUsage::WriteToTopic_Demo_6 [GOOD] |77.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 5079, MsgBus: 30133 2024-11-21T17:47:32.904721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:47:32.905044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:47:32.905058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00145f/r3tmp/tmpZh1elW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5079, node 1 TClient is connected to server localhost:30133 TClient is connected to server localhost:30133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:33.047860Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:33.047878Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:33.047882Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:33.055988Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:33.101781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:33.101824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:33.102303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:33.104752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:33.224567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:33.432287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:33.728746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:33.989073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:34.320893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1728:3347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:34.320937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:34.324858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.530366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.790973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.018257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.274783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.523449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.826774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2300:3791], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:35.826808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:35.826846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2305:3796], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:35.827709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:35.997569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2307:3798], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:36.219500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.446452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.748927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11088, MsgBus: 16175 2024-11-21T17:47:37.819460Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:47:37.819492Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:47:37.819513Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00145f/r3tmp/tmp1YN757/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11088, node 2 TClient is connected to server localhost:16175 2024-11-21T17:47:37.956922Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:37.957292Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:37.957304Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:37.957308Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:37.957406Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16175 2024-11-21T17:47:38.030905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:38.030934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:47:38.038685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:38.044080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:38.148360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:38.377818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:38.674475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:38.936266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:39.239221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1724:3344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:39.239295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:39.242599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:39.461611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:39.717896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:39.959435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:40.217697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:40.460467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:40.784999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2295:3787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:40.785053Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:40.785122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2300:3792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:40.786344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:40.973926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2302:3794], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:41.283449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:41.519325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:47:41.823063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> DstCreator::EmptyReplicationConfig [GOOD] >> TRestoreTests::ShouldFailOnInvalidValue[Zstd] [GOOD] >> TxUsage::WriteToTopic_Demo_7 >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] |77.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> ControlImplementationTests::TestParallelRegisterSharedControl [GOOD] >> TxUsage::WriteToTopic_Demo_35 [GOOD] >> TxUsage::WriteToTopic_Demo_36 |77.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> ControlImplementationTests::TestControlWrapperAsI64 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2024-11-21T17:47:42.871782Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790720182343593:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:42.872015Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001d20/r3tmp/tmpnpqdmL/pdisk_1.dat 2024-11-21T17:47:42.920498Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27310 TServer::EnableGrpc on GrpcPort 31423, node 1 2024-11-21T17:47:42.951085Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:42.951102Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:42.951104Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:42.951153Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27310 2024-11-21T17:47:42.971909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:42.971935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:42.973066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:42.998442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:43.000527Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:43.001572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:43.062501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211263048 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732211263118 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211263048 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732211263118 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T17:47:43.074074Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:43.074107Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:43.074109Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T17:47:43.074233Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T17:47:43.168204Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211263104, tx_id: 281474976715658 } } } 2024-11-21T17:47:43.168335Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T17:47:43.168840Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479} 2024-11-21T17:47:43.169438Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732211263118 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: ... GrpcPort 22967, node 2 2024-11-21T17:47:43.425162Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:43.425178Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:43.425180Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:43.425226Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:43.495761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:43.495791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:43.499317Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:43.499690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:43.503852Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:43.506875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:43.527617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211263545 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732211263587 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211263545 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732211263587 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T17:47:43.546347Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:43.546371Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:43.546373Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T17:47:43.548825Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T17:47:43.709061Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211263573, tx_id: 281474976715658 } } } 2024-11-21T17:47:43.709149Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T17:47:43.709488Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479} 2024-11-21T17:47:43.709710Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732211263587 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T17:47:43.709740Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL [GOOD] Test command err: 2024-11-21T17:47:39.806356Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:47:39.820730Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:39.822997Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:39.823097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:47:39.825346Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:47:39.825365Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:47:39.825404Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:47:39.826029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:47:39.826077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:47:39.826109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:47:39.826129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:47:39.826144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:47:39.826161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:47:39.826190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:47:39.826213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:47:39.826228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:47:39.826240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:47:39.826251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:47:39.826263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:47:39.829965Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:39.830047Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T17:47:39.830060Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:47:39.830066Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T17:47:39.831005Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:47:39.831053Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:47:39.831058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:47:39.831080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:47:39.831108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:47:39.831118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:47:39.831121Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:47:39.831127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:47:39.831133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:47:39.831138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:47:39.831140Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:47:39.831150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:47:39.831154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:47:39.831159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:47:39.831161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:47:39.831166Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:47:39.831170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:47:39.831175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:47:39.831180Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:47:39.831190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:47:39.831197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:47:39.831200Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:47:39.831206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:47:39.831211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:47:39.831214Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:47:39.831238Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2024-11-21T17:47:39.831244Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T17:47:39.831250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T17:47:39.831257Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T17:47:39.831270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:47:39.831276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:47:39.831278Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:47:39.831292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:47:39.831296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:47:39.831298Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:47:39.831307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:47:39.831311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024- ... canId=0;TxId=18446744073709551615;ScanGen=0;fline=interval.cpp:15;event=start_construct_result;interval_idx=0;interval_id=4; 2024-11-21T17:47:42.832348Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2024-11-21T17:47:42.832379Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T17:47:42.832391Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T17:47:42.832399Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:47:42.832606Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T17:47:42.832620Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T17:47:42.832631Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=4; 2024-11-21T17:47:42.832642Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=4; 2024-11-21T17:47:42.832654Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T17:47:42.832666Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T17:47:42.832671Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2024-11-21T17:47:42.832676Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:47:42.832740Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:47:42.832759Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T17:47:42.832766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T17:47:42.832774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=80000; 2024-11-21T17:47:42.832784Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=saved_at; 2024-11-21T17:47:42.832811Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:368:2380] send ScanData to [1:363:2375] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2024-11-21T17:47:42.832821Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T17:47:42.832832Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T17:47:42.832839Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T17:47:42.832857Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:47:42.832863Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T17:47:42.832870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T17:47:42.832874Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:368:2380] finished for tablet 9437184 2024-11-21T17:47:42.832885Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:368:2380] send ScanData to [1:363:2375] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:47:42.832941Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:368:2380] and sent to [1:363:2375] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.023}],"full":{"a":1732211262809400,"name":"_full_task","f":1732211262809400,"d_finished":0,"c":0,"l":1732211262832893,"d":23493},"events":[{"name":"bootstrap","f":1732211262809577,"d_finished":489,"c":1,"l":1732211262810066,"d":489},{"a":1732211262832855,"name":"ack","f":1732211262832735,"d_finished":106,"c":1,"l":1732211262832841,"d":144},{"a":1732211262832854,"name":"processing","f":1732211262810218,"d_finished":523,"c":5,"l":1732211262832842,"d":562},{"name":"ProduceResults","f":1732211262809863,"d_finished":245,"c":8,"l":1732211262832872,"d":245},{"a":1732211262832872,"name":"Finish","f":1732211262832872,"d_finished":0,"c":0,"l":1732211262832893,"d":21},{"name":"task_result","f":1732211262810221,"d_finished":399,"c":4,"l":1732211262832687,"d":399}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2024-11-21T17:47:42.832955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:47:42.809109Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2024-11-21T17:47:42.832961Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:47:42.832982Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.022171s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.020529s;size=0.0063152;details={columns=9;};};]};; 2024-11-21T17:47:42.832991Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:368:2380];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TSchemeShardTestExtSubdomainReboots::CreateForceDrop-AlterDatabaseCreateHiveFirst-true >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI |77.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::LocksAbortOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 61388, MsgBus: 20468 2024-11-21T17:47:30.402807Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790668786745432:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:30.402822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0014ce/r3tmp/tmpu3ViRC/pdisk_1.dat 2024-11-21T17:47:30.442534Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61388, node 1 2024-11-21T17:47:30.454127Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:30.454139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:30.454140Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:30.454166Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20468 TClient is connected to server localhost:20468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:30.502962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:30.502993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:30.504109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:30.525982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:30.656104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790668786746030:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:30.656125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:30.656181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790668786746042:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:30.656850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:47:30.658299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790668786746044:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:47:30.757577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:47:30.777116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790668786746250:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:47:30.777177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790668786746250:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:47:30.777221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790668786746250:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:47:30.777244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790668786746250:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:47:30.777263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790668786746250:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:47:30.777278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790668786746250:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:47:30.777295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790668786746250:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:47:30.777308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790668786746250:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:47:30.777327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790668786746250:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:47:30.777342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790668786746250:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:47:30.777357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790668786746250:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:47:30.777373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7439790668786746250:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:47:30.777467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790668786746252:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:47:30.777493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790668786746252:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:47:30.777535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790668786746252:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:47:30.777558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790668786746252:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:47:30.777578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790668786746252:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:47:30.777598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790668786746252:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:47:30.777618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790668786746252:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:47:30.777639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790668786746252:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:47:30.777662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790668786746252:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:47:30.777683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790668786746252:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:47:30.777704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790668786746252:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:47:30.777739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790668786746252:2318];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:47:30.778217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:47:30.778233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:47:30.778246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:47:30.778255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:47:30.778270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=ab ... 2Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037895;tx_state=complete;fline=interaction.h:353;batch=Group: [ 1 ] Name: [ 536572676579 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715665}],"starts":[{"inc":{"count_not_include":1},"id":281474976715665}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":1},"id":281474976715665}],"starts":[{"inc":{"count_include":2},"id":281474976715665}],"finishes":[{"inc":{"count_include":2},"id":281474976715665}]},"p":{"include":0,"pk":"1;Sergey;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715665}]},"p":{"include":2147483647}}]}; 2024-11-21T17:47:31.435762Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790673081721879:3252] TxId: 281474976715666. Ctx: { TraceId: 01jd7xc66bcb02p46wg5qtk8hw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODQzN2U4YWQtYTJiOGQ5Y2ItOGQwYmMxNWMtZDBlZmI5NDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root 2024-11-21T17:47:31.460372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:31.460404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:31.460405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:31.460416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:31.460430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:31.460431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:31.460448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:31.460450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:31.460552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:31.461368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:31.462893Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439790673081722116:3252] TxId: 281474976715668. Ctx: { TraceId: 01jd7xc6850neja46bpvthe3zh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODQzN2U4YWQtYTJiOGQ5Y2ItOGQwYmMxNWMtZDBlZmI5NDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database /Root WAIT_INDEXATION: 0 2024-11-21T17:47:31.793529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439790668786746246:2312];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=6; 2024-11-21T17:47:31.795685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;task_id=aee5499e-a83011ef-8cfb6587-173fec38;fline=with_appended.cpp:80;portions=3,;task_id=aee5499e-a83011ef-8cfb6587-173fec38; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T17:47:35.403143Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790668786745432:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:35.403245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 27766, MsgBus: 10348 2024-11-21T17:47:36.938202Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790694669030609:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:36.938460Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0014ce/r3tmp/tmp73pKfa/pdisk_1.dat 2024-11-21T17:47:36.953800Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27766, node 2 2024-11-21T17:47:36.962553Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:36.962570Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:36.962573Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:36.962620Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10348 TClient is connected to server localhost:10348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:37.039143Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:37.039176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:37.040209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:37.040943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:37.245548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790698963998495:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:37.245567Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790698963998522:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:37.245573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:37.246202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:47:37.247765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790698963998524:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:47:37.344083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:47:37.401478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:47:37.608755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:37.954223Z node 2 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715667; 2024-11-21T17:47:37.968802Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790698964006634:2931], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7439790698964006217:2931]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 7]`. ShardID=72075186224037889, Sink=[2:7439790698964006634:2931].{
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T17:47:37.969018Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790698964006586:2931], SessionActorId: [2:7439790698964006217:2931], Transaction locks invalidated. Table `/Root/KV`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[2:7439790698964006217:2931]. isRollback=0 2024-11-21T17:47:37.969334Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTRkMGIyZjItNmY4Y2EyNTktNDM2MmRlN2ItNzg3MDdjOWI=, ActorId: [2:7439790698964006217:2931], ActorState: ExecuteState, TraceId: 01jd7xccjc9spntzpc8s5yg78x, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7439790698964006587:2931] from: [2:7439790698964006586:2931] 2024-11-21T17:47:37.969416Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439790698964006587:2931] TxId: 281474976715667. Ctx: { TraceId: 01jd7xccjc9spntzpc8s5yg78x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTRkMGIyZjItNmY4Y2EyNTktNDM2MmRlN2ItNzg3MDdjOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/KV`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T17:47:37.971017Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTRkMGIyZjItNmY4Y2EyNTktNDM2MmRlN2ItNzg3MDdjOWI=, ActorId: [2:7439790698964006217:2931], ActorState: ExecuteState, TraceId: 01jd7xccjc9spntzpc8s5yg78x, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T17:47:41.938442Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790694669030609:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:41.938492Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 |77.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |77.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |77.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldFailOnInvalidValue[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:47:42.595225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:42.595252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:42.595257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:42.595261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:42.595268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:42.595273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:42.595281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:42.595340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:42.605452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:42.605477Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:42.608430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:42.609176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:42.609224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:42.610841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:42.612364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:42.612476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:42.612576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:42.613658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:42.613961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:42.613972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:42.614006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:42.614012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:42.614019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:42.614032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.615163Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:47:42.631890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:42.631987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.632050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:42.632095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:42.632103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.636773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:42.636814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:42.636889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.636902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:42.636908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:42.636914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:42.640727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.640756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:42.640764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:42.642858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.642878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.642886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:42.642910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:42.643616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:42.644170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:42.644225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:42.644429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:42.644471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:42.644479Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:42.644547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:42.644557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:42.644594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:42.644607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:42.645057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:42.645066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:42.645111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:42.645116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:47:42.645207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.645215Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:42.645227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:42.645231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:42.645237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:42.645242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:42.645248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:42.645252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:42.645265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:42.645271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:42.645276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:47:42.645609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:47:42.645622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:47:42.645629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:47:42.645634Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:47:42.645639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:42.645651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:47:43.377782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T17:47:43.378032Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:43.378055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:43.378065Z node 2 :FLAT_TX_SCHEMESHARD INFO: TRestore TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:47:43.378082Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T17:47:43.378110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:47:43.384375Z node 2 :DATASHARD_RESTORE DEBUG: [Import] [s3:102] Bootstrap: attempt# 0 2024-11-21T17:47:43.384402Z node 2 :DATASHARD_RESTORE DEBUG: [Import] [s3:102] AllocateResource 2024-11-21T17:47:43.384467Z node 2 :DATASHARD_RESTORE INFO: [Import] [s3:102] Handle TEvResourceBroker::TEvResourceAllocated { TaskId: 1 } 2024-11-21T17:47:43.384473Z node 2 :DATASHARD_RESTORE NOTICE: [Import] [s3:102] Restart: attempt# 0 2024-11-21T17:47:43.387583Z node 2 :DATASHARD_RESTORE DEBUG: [Import] [s3:102] HeadObject: key# /data_00.csv FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:47:43.388755Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:43.388769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: 2024-11-21T17:47:43.388870Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 localhost:28872 2024-11-21T17:47:43.388900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 86012697-8FFA-416F-B6B7-762999ADD456 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2024-11-21T17:47:43.389011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:47:43.389020Z node 2 :FLAT_TX_SCHEMESHARD INFO: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:43.389129Z node 2 :DATASHARD_RESTORE DEBUG: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2024-11-21T17:47:43.389139Z node 2 :DATASHARD_RESTORE DEBUG: [Import] [s3:102] HeadObject: key# /data_00.csv.zst 2024-11-21T17:47:43.389217Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:47:43.389229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:47:43.389234Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:47:43.389239Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:47:43.389245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:47:43.389262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:28872 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5CFB92CC-C77B-41E5-ABD2-3415D4649BF4 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2024-11-21T17:47:43.390068Z node 2 :DATASHARD_RESTORE DEBUG: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 7443c2f403aa74cff1f199511bd22374 ContentLength: 23 } } 2024-11-21T17:47:43.390293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:47:43.411207Z node 2 :DATASHARD_RESTORE DEBUG: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 } } FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:47:43.422235Z node 2 :DATASHARD_RESTORE DEBUG: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 } } 2024-11-21T17:47:43.422262Z node 2 :DATASHARD_RESTORE NOTICE: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 } 2024-11-21T17:47:43.422278Z node 2 :DATASHARD_RESTORE DEBUG: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-22 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:28872 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F3F71A13-C78A-4DB5-B706-08B4FCF7997F amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-22 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2024-11-21T17:47:43.423218Z node 2 :DATASHARD_RESTORE DEBUG: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 7443c2f403aa74cff1f199511bd22374 Body: 23b } 2024-11-21T17:47:43.423234Z node 2 :DATASHARD_RESTORE TRACE: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 23, body-size# 23 2024-11-21T17:47:43.423263Z node 2 :DATASHARD_RESTORE NOTICE: [Import] [s3:102] Finish: success# 0, error# Value parse error: '"a1"' m is expected. on line: "a1","value1", writtenBytes# 0, writtenRows# 0 2024-11-21T17:47:43.423276Z node 2 :DATASHARD_RESTORE INFO: [Import] [s3:102] Upload rows: count# 0, size# 8 2024-11-21T17:47:43.425115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 8589936886 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Value parse error: \'\"a1\"\' m is expected. on line: \"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:47:43.425134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:47:43.425155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 8589936886 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Value parse error: \'\"a1\"\' m is expected. on line: \"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:47:43.425170Z node 2 :FLAT_TX_SCHEMESHARD INFO: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 306 RawX2: 8589936886 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Value parse error: \'\"a1\"\' m is expected. on line: \"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:47:43.425183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:43.425187Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:47:43.425191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:47:43.425199Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:47:43.425242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:43.425735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:47:43.425779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:47:43.425785Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:47:43.425797Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:47:43.425813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:47:43.425820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:47:43.425832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:339:2314] message: TxId: 102 2024-11-21T17:47:43.425839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:47:43.425844Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:47:43.425848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:47:43.425873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:47:43.426307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:47:43.426317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:386:2360] TestWaitNotification: OK eventTxId 102 >> KqpSinkLocks::EmptyRangeOlap [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateNotNullColumnTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:21.655245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:21.655265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:21.655269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:21.655274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:21.655279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:21.655283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:21.655291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:21.655351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:21.663132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:21.663156Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:21.665429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:21.665510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:21.665530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:21.668032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:21.668095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:21.668178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.668360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:21.668954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.669153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:21.669159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.669167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:21.669171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:21.669175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:21.669199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:21.670370Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:21.683863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:21.683937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.683982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:21.684026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:21.684031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.685042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.685071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:21.685118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.685126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:21.685129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:21.685133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:21.685497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.685504Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:21.685507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:21.685861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.685876Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.685882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.685888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.686451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:21.686861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:21.686915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:21.687054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.687075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:21.687080Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.687138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:21.687143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.687164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:21.687182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:21.687626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:21.687641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:21.687682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.687688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:21.687758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.687765Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:21.687776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:21.687780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.687786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:21.687790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.687795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:21.687798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:21.687810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:21.687815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:21.687819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... sg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:43.911494Z node 87 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:43.911496Z node 87 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T17:47:43.911502Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:47:43.911508Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:47:43.911797Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 236 } } 2024-11-21T17:47:43.911804Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:47:43.911816Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 236 } } 2024-11-21T17:47:43.911824Z node 87 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 236 } } 2024-11-21T17:47:43.912076Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 373662157087 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:47:43.912082Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:47:43.912092Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 373662157087 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:47:43.912096Z node 87 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:47:43.912100Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 347 RawX2: 373662157087 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:47:43.912107Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:43.912110Z node 87 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:47:43.912112Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:47:43.912115Z node 87 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T17:47:43.912316Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:43.912338Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:43.912654Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:47:43.912679Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:47:43.912735Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:47:43.912741Z node 87 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:47:43.912750Z node 87 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:47:43.912753Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:47:43.912757Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:47:43.912761Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:47:43.912765Z node 87 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:47:43.912768Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:47:43.912783Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1002 2024-11-21T17:47:43.913358Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T17:47:43.913367Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 TestWaitNotification wait txId: 1003 2024-11-21T17:47:43.913381Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:47:43.913384Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:47:43.913490Z node 87 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T17:47:43.913506Z node 87 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:47:43.913515Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:47:43.913519Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [87:424:2399] 2024-11-21T17:47:43.913533Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:47:43.913536Z node 87 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [87:424:2399] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T17:47:43.913593Z node 87 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:43.913637Z node 87 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 55us result status StatusSuccess 2024-11-21T17:47:43.913718Z node 87 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "TestNotNullTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:43.913767Z node 87 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/TestNotNullTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:43.913787Z node 87 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/TestNotNullTable" took 22us result status StatusSuccess 2024-11-21T17:47:43.913863Z node 87 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/TestNotNullTable" PathDescription { Self { Name: "TestNotNullTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TestNotNullTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: true IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapNamedStatement [GOOD] Test command err: Trying to start YDB, gRPC: 2503, MsgBus: 65158 2024-11-21T17:47:30.507370Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790669251161155:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:30.507651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001496/r3tmp/tmpsalk9r/pdisk_1.dat 2024-11-21T17:47:30.560599Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2503, node 1 2024-11-21T17:47:30.571028Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:30.571039Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:30.571040Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:30.571066Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65158 TClient is connected to server localhost:65158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:30.608466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:30.608500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:30.609651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:30.639146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:30.757439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790669251161765:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:30.757454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790669251161754:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:30.757468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:30.758008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:47:30.759210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790669251161768:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:47:30.883202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:47:30.892539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790669251161971:2312];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:47:30.892588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790669251161971:2312];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:47:30.892616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790669251161971:2312];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:47:30.892632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790669251161971:2312];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:47:30.892649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790669251161971:2312];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:47:30.892663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790669251161971:2312];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:47:30.892684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790669251161971:2312];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:47:30.892699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790669251161971:2312];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:47:30.892714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790669251161971:2312];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:47:30.892727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790669251161971:2312];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:47:30.892742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790669251161971:2312];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:47:30.892758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439790669251161971:2312];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:47:30.892864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790669251161976:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:47:30.892885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790669251161976:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:47:30.892924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790669251161976:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:47:30.892952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790669251161976:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:47:30.892973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790669251161976:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:47:30.892994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790669251161976:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:47:30.893012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790669251161976:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:47:30.893032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790669251161976:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:47:30.893052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790669251161976:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:47:30.893071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790669251161976:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:47:30.893090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790669251161976:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:47:30.893110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439790669251161976:2316];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:47:30.893137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:47:30.893152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:47:30.893171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:47:30.893181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:47:30.893196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abst ... pl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675776Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675786Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675787Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675799Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675812Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675821Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037960;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675824Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675836Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675848Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675856Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675872Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037990;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675884Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675897Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675897Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675937Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675955Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675975Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675988Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.675991Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.676000Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.676010Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.676028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.676070Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.676089Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.676130Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.676439Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.676470Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.676488Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.676514Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037956;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.676547Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.676586Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037989;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677048Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037985;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677136Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677159Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677181Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677207Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677237Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037953;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677254Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037975;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677280Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677389Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677417Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677439Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037955;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2024-11-21T17:47:38.677463Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677465Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.677504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:38.806482Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7439790702047639903:2393];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T17:47:38.813037Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;task_id=b31365aa-a83011ef-8c07b59e-e3b2c7b7;fline=with_appended.cpp:80;portions=;task_id=b31365aa-a83011ef-8c07b59e-e3b2c7b7; 2024-11-21T17:47:38.818690Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;task_id=b313fbfa-a83011ef-b1466e09-92a620d;fline=with_appended.cpp:80;portions=3,;task_id=b313fbfa-a83011ef-b1466e09-92a620d; 2024-11-21T17:47:38.819119Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;task_id=b3148dea-a83011ef-9d440488-f1972ead;fline=with_appended.cpp:80;portions=;task_id=b3148dea-a83011ef-9d440488-f1972ead; 2024-11-21T17:47:38.819298Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;task_id=b31481ba-a83011ef-aaa83740-79278ee4;fline=with_appended.cpp:80;portions=;task_id=b31481ba-a83011ef-aaa83740-79278ee4; 2024-11-21T17:47:38.819446Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;task_id=b31490d8-a83011ef-b9a05344-5564dbbc;fline=with_appended.cpp:80;portions=;task_id=b31490d8-a83011ef-b9a05344-5564dbbc; 2024-11-21T17:47:38.820643Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;task_id=b314cb98-a83011ef-a7092c18-bddd8b11;fline=with_appended.cpp:80;portions=;task_id=b314cb98-a83011ef-a7092c18-bddd8b11; 2024-11-21T17:47:38.822078Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7439790702047640512:2448];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T17:47:38.825863Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;task_id=b315c822-a83011ef-9b835946-7db20bd5;fline=with_appended.cpp:80;portions=;task_id=b315c822-a83011ef-9b835946-7db20bd5; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T17:47:42.164647Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790702047638342:2093];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:42.164684Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] Test command err: 2024-11-21T17:47:41.028125Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790716428774480:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:41.028312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001792/r3tmp/tmpPaTnZ3/pdisk_1.dat 2024-11-21T17:47:41.148220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:41.148264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:41.152723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13866, node 1 2024-11-21T17:47:41.169121Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:47:41.169131Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:47:41.184458Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:41.184470Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:41.184475Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:41.184515Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:41.200325Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:41.262814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:41.272766Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:41.639223Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:47:41.639622Z node 1 :TICKET_PARSER DEBUG: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2024-11-21T17:47:41.639637Z node 1 :TICKET_PARSER ERROR: Ticket **** (5DAB89DE): Token is not in correct format 2024-11-21T17:47:41.855493Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790716578335850:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:41.855675Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001792/r3tmp/tmpFVzAdG/pdisk_1.dat 2024-11-21T17:47:41.870261Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3337, node 2 2024-11-21T17:47:41.880750Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:41.880762Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:41.880763Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:41.880801Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:41.956918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:41.956957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:47:41.957357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:41.959366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:41.961437Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:41.962420Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2024-11-21T17:47:41.962437Z node 2 :GRPC_CLIENT DEBUG: [418bf2843d0] Connect to grpc://localhost:5840 2024-11-21T17:47:41.963053Z node 2 :GRPC_CLIENT DEBUG: [418bf2843d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2024-11-21T17:47:41.975484Z node 2 :GRPC_CLIENT DEBUG: [418bf2843d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2024-11-21T17:47:41.975636Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2024-11-21T17:47:41.975664Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T17:47:41.976459Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2024-11-21T17:47:41.976510Z node 2 :GRPC_CLIENT DEBUG: [418bf2843d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2024-11-21T17:47:41.977352Z node 2 :GRPC_CLIENT DEBUG: [418bf2843d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2024-11-21T17:47:41.977435Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2024-11-21T17:47:41.977445Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for folder_id aaaa1234 - Access Denied' 2024-11-21T17:47:42.327382Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790723113096714:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001792/r3tmp/tmpZ16urb/pdisk_1.dat 2024-11-21T17:47:42.328425Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 4148, node 3 2024-11-21T17:47:42.340576Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:42.349554Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:42.349567Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:42.349569Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:42.349609Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31867 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:42.426902Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:42.426937Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:42.428003Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:42.429281Z node 3 :FLAT_TX_SCHEMESHAR ... ing.read something.write) 2024-11-21T17:47:42.457472Z node 3 :GRPC_CLIENT DEBUG: [418bf284190] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2024-11-21T17:47:42.463705Z node 3 :GRPC_CLIENT DEBUG: [418bf284190] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T17:47:42.463917Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2024-11-21T17:47:42.463991Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2024-11-21T17:47:42.781005Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790723322089322:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:42.781015Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001792/r3tmp/tmpQaUHvB/pdisk_1.dat 2024-11-21T17:47:42.794672Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65098, node 4 2024-11-21T17:47:42.801187Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:42.801200Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:42.801202Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:42.801249Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:42.883874Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:42.883919Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:42.884044Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:42.884769Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:42.888549Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:42.890994Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2024-11-21T17:47:42.891021Z node 4 :GRPC_CLIENT DEBUG: [418bf282f90] Connect to grpc://localhost:6736 2024-11-21T17:47:42.891288Z node 4 :GRPC_CLIENT DEBUG: [418bf282f90] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2024-11-21T17:47:42.895180Z node 4 :GRPC_CLIENT DEBUG: [418bf282f90] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2024-11-21T17:47:42.895277Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2024-11-21T17:47:42.895281Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2024-11-21T17:47:42.895283Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2024-11-21T17:47:42.895287Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2024-11-21T17:47:42.895290Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2024-11-21T17:47:42.895328Z node 4 :GRPC_CLIENT DEBUG: [418bf285ed0] Connect to grpc://localhost:4924 2024-11-21T17:47:42.895462Z node 4 :GRPC_CLIENT DEBUG: [418bf285ed0] Request GetUserAccountRequest { user_account_id: "user1" } 2024-11-21T17:47:42.897326Z node 4 :GRPC_CLIENT DEBUG: [418bf285ed0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2024-11-21T17:47:42.897471Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001792/r3tmp/tmpRlVhA9/pdisk_1.dat 2024-11-21T17:47:43.233819Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:47:43.259498Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16695, node 5 2024-11-21T17:47:43.288379Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:43.288392Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:43.288396Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:43.288434Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:43.329576Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:43.329611Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:43.330041Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:43.342366Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:47:43.348470Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:43.353165Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2024-11-21T17:47:43.353196Z node 5 :GRPC_CLIENT DEBUG: [418bf286c50] Connect to grpc://localhost:5068 2024-11-21T17:47:43.353397Z node 5 :GRPC_CLIENT DEBUG: [418bf286c50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2024-11-21T17:47:43.363584Z node 5 :GRPC_CLIENT DEBUG: [418bf286c50] Status 14 Service Unavailable 2024-11-21T17:47:43.364021Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T17:47:43.364033Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T17:47:43.364039Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T17:47:43.364057Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2024-11-21T17:47:43.364149Z node 5 :GRPC_CLIENT DEBUG: [418bf286c50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2024-11-21T17:47:43.370513Z node 5 :GRPC_CLIENT DEBUG: [418bf286c50] Status 1 CANCELLED 2024-11-21T17:47:43.370800Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2024-11-21T17:47:43.370817Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2024-11-21T17:47:43.370825Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::UnsupportedReplicationConsistency [GOOD] Test command err: 2024-11-21T17:47:42.979987Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790720296494817:2060];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:42.980103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001d0f/r3tmp/tmpLSBQVP/pdisk_1.dat 2024-11-21T17:47:43.040678Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28860 TServer::EnableGrpc on GrpcPort 3807, node 1 2024-11-21T17:47:43.066841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:43.066857Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:43.066859Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:43.066894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:43.080018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:43.080038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:43.081103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:43.116253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:43.119188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211263223 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211263167 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211263223 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2024-11-21T17:47:43.192389Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:43.192425Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:43.192427Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T17:47:43.193788Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T17:47:43.306762Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211263223, tx_id: 281474976715658 } } } 2024-11-21T17:47:43.306896Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T17:47:43.307559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:47:43.308082Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2024-11-21T17:47:43.308107Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2024-11-21T17:47:43.324494Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2024-11-21T17:47:43.324514Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211263223 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732211263370 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2024-11-21T17:47:43.447073Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790724451814465:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:43.447334Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001d0f/r3tmp/tmpxc89Fi/pdisk_1.dat 2024-11-21T17:47:43.463374Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21232 TServer::EnableGrpc on GrpcPort 27904, node 2 2024-11-21T17:47:43.478137Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:43.478149Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:43.478151Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:43.478199Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:43.547569Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:43.547610Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:43.548652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:43.550350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:43.552148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:43.617395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211263594 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732211263678 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211263594 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732211263678 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T17:47:43.632271Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:43.632328Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:43.632331Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T17:47:43.632512Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T17:47:43.792638Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211263664, tx_id: 281474976715658 } } } 2024-11-21T17:47:43.792709Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T17:47:43.793186Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:479} 2024-11-21T17:47:43.793498Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732211263678 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_STRONG } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T17:47:43.793535Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Unsupported replication consistency: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2024-11-21T17:47:42.721250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790720908449506:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:42.721308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e0b/r3tmp/tmpSYrF9U/pdisk_1.dat 2024-11-21T17:47:42.790480Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8006 TServer::EnableGrpc on GrpcPort 3267, node 1 2024-11-21T17:47:42.818131Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:42.818141Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:42.818143Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:42.818175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:42.820163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:42.820194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:42.824252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:42.848655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:42.851889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732211262957 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partition... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211262894 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732211262957 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2024-11-21T17:47:42.919918Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:42.919942Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:42.919945Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T17:47:42.920256Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T17:47:42.998690Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732211262957, tx_id: 281474976710658 } } } 2024-11-21T17:47:42.998785Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T17:47:42.999166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:47:42.999441Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2024-11-21T17:47:42.999453Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2024-11-21T17:47:43.007058Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2024-11-21T17:47:43.007432Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732211263055 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceC ... rceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 7 PathOwnerId: 72057594046644480 } 2024-11-21T17:47:43.012891Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 7] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732211263055 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732211263055 ParentPathId: 6 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ... (TRUNCATED) TClient::Ls request: /Root/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732211263055 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732211263055 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" >> LocalPartition::DiscoveryServiceBadPort [GOOD] >> LocalPartition::DiscoveryServiceBadNodeId >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor |77.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |77.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeOlap [GOOD] Test command err: Trying to start YDB, gRPC: 63872, MsgBus: 9308 2024-11-21T17:47:33.274616Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790684783845193:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:33.274813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001444/r3tmp/tmpxzuaxO/pdisk_1.dat 2024-11-21T17:47:33.330765Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63872, node 1 2024-11-21T17:47:33.339999Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:33.340012Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:33.340015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:33.340053Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9308 TClient is connected to server localhost:9308 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:47:33.375696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:33.375728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:47:33.376878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:33.406350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:33.530904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790684783845799:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.530924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790684783845807:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.530929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:33.531680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:47:33.533182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790684783845813:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:47:33.630965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:47:33.644084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439790684783846015:2312];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:47:33.644117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439790684783846015:2312];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:47:33.644146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439790684783846015:2312];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:47:33.644166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439790684783846015:2312];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:47:33.644190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439790684783846015:2312];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:47:33.644206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439790684783846015:2312];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:47:33.644222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439790684783846015:2312];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:47:33.644262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439790684783846015:2312];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:47:33.644291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790684783846016:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:47:33.644296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439790684783846015:2312];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:47:33.644308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439790684783846015:2312];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:47:33.644319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439790684783846015:2312];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:47:33.644333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439790684783846015:2312];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:47:33.644334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790684783846016:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:47:33.644368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790684783846016:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:47:33.644390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790684783846016:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:47:33.644407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790684783846016:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:47:33.644429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790684783846016:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:47:33.644446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790684783846016:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:47:33.644467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790684783846016:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:47:33.644489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790684783846016:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:47:33.644511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790684783846016:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:47:33.644537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790684783846016:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:47:33.644553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790684783846016:2313];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:47:33.645027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:47:33.645036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:47:33.645049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:47:33.645054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:47:33.645069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstr ... ed; 2024-11-21T17:47:40.948192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948252Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948296Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948370Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948402Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948442Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948519Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948557Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948628Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948658Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948695Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948724Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948789Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948818Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948852Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.948882Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.995888Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.995888Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.995934Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.996114Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.996441Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.996482Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.996542Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.996578Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:40.996984Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:41.123342Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037891;tx_state=complete;fline=interaction.h:353;batch=Group: [ 11 ] Name: [ 53657373696F6E32 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715665},{"inc":{"count":1},"id":281474976715666}],"starts":[{"inc":{"count_not_include":1},"id":281474976715665},{"inc":{"count_not_include":1},"id":281474976715666}],"finishes":[]},"p":{"include":1,"pk":"11;"}},{"i":{"txs":[{"inc":{"count":1},"id":281474976715665},{"inc":{"count":1},"id":281474976715666}],"starts":[{"inc":{"count_include":1},"id":281474976715666}],"finishes":[{"inc":{"count_include":1},"id":281474976715666}]},"p":{"include":0,"pk":"11;Session2;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715665},{"inc":{"count_not_include":1},"id":281474976715666}]},"p":{"include":2147483647,"pk":"11;"}}]}; 2024-11-21T17:47:41.129028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=complete;commit_tx_id=281474976715668;commit_lock_id=281474976715666;fline=manager.cpp:89;broken_lock_id=281474976715665; 2024-11-21T17:47:41.129157Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:41.129982Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:41.130025Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:41.130056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:41.130076Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:41.130097Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:41.130117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:41.130140Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:41.130412Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:41.130428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:47:41.143898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;local_tx_no=16;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037896;tx_state=complete;fline=interaction.h:353;batch=Group: [ 11 ] Name: [ 53657373696F6E31 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":1},"id":281474976715665}],"starts":[{"inc":{"count_not_include":1},"id":281474976715665}],"finishes":[]},"p":{"include":1,"pk":"11;"}},{"i":{"txs":[{"inc":{"count":1},"id":281474976715665}],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"11;Session1;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":1},"id":281474976715665}]},"p":{"include":2147483647,"pk":"11;"}}]}; 2024-11-21T17:47:41.144354Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037891;self_id=[2:7439790715473042146:2313];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=0; 2024-11-21T17:47:41.144609Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790719768017568:3201], SessionActorId: [2:7439790719768017114:3201], Got LOCKS BROKEN for table. ShardID=72075186224037891, Sink=[2:7439790719768017568:3201].{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 } 2024-11-21T17:47:41.144639Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790719768017568:3201], SessionActorId: [2:7439790719768017114:3201], Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 }. statusCode=ABORTED. subIssues=
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 . sessionActorId=[2:7439790719768017114:3201]. isRollback=0 2024-11-21T17:47:41.144659Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmExMTdmYzgtY2EyYjAzNzgtZmViZTgzMS1lNjcxODA1OA==, ActorId: [2:7439790719768017114:3201], ActorState: ExecuteState, TraceId: 01jd7xcfpc5amp7bg6vnba01y1, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7439790719768017587:3201] from: [2:7439790719768017568:3201] 2024-11-21T17:47:41.144693Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439790719768017587:3201] TxId: 281474976715670. Ctx: { TraceId: 01jd7xcfpc5amp7bg6vnba01y1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmExMTdmYzgtY2EyYjAzNzgtZmViZTgzMS1lNjcxODA1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 };
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 } 2024-11-21T17:47:41.144830Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmExMTdmYzgtY2EyYjAzNzgtZmViZTgzMS1lNjcxODA1OA==, ActorId: [2:7439790719768017114:3201], ActorState: ExecuteState, TraceId: 01jd7xcfpc5amp7bg6vnba01y1, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 };
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T17:47:44.871489Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790711178074067:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:44.871530Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-true >> TSolomonReboots::CreateDropSolomonWithReboots [GOOD] >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-false >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-true >> Describe::Statistics [GOOD] >> Describe::Location >> KqpUniqueIndex::ReplaceFkAlreadyExist >> KqpMultishardIndex::DataColumnWriteNull-StreamLookup >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex >> KqpIndexes::SelectConcurentTX2 >> KqpIndexes::CheckUpsertNonEquatableType-NotNull |77.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |77.9%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |77.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TSolomonReboots::CreateDropSolomonWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:20.796858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:20.796882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:20.796887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:20.796892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:20.796898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:20.796902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:20.796911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:20.796985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:20.806866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:20.806885Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:20.808988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:20.809115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:20.809145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:20.811523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:20.811583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:20.811670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:20.811839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:20.816547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:20.816831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:20.816841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:20.816854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:20.816861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:20.816867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:20.816903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:20.818489Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:20.833599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:20.833718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.833789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:20.833855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:20.833864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.834874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:20.834908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:20.835007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.835020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:20.835025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:20.835030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:20.835531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.835544Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:20.835548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:20.835804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.835810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.835815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:20.835821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:20.836224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:20.836626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:20.836674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:20.836832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:20.836856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:20.836863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:20.836924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:20.836931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:20.836962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:20.836975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:20.837426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:20.837438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:20.837487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:20.837492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:20.837573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.837580Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:20.837591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:20.837605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:20.837611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:20.837616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:20.837621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:20.837625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:20.837636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:20.837642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:20.837646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... TX_SCHEMESHARD INFO: TDropSolomon TPropose operationId#1004:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T17:47:47.226391Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 3] name: Solomon type: EPathTypeSolomonVolume state: EPathStateDrop stepDropped: 0 droppedTxId: 1004 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:47.226395Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:47:47.226416Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:47:47.226430Z node 106 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 130 2024-11-21T17:47:47.226444Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:47.226449Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:47:47.226535Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:47:47.226726Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T17:47:47.226983Z node 106 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:47.226990Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:47.227009Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:47:47.227025Z node 106 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.227029Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [106:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T17:47:47.227033Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [106:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T17:47:47.227073Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.227082Z node 106 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1004:0 ProgressState 2024-11-21T17:47:47.227088Z node 106 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:47:47.227092Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:47:47.227097Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T17:47:47.227101Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:47:47.227105Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:47:47.227108Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:47:47.227130Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:47:47.227135Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T17:47:47.227139Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T17:47:47.227142Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:47:47.227211Z node 106 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:47:47.227220Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:47:47.227223Z node 106 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:47:47.227227Z node 106 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:47:47.227231Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:47:47.227274Z node 106 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:47:47.227281Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:47:47.227285Z node 106 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:47:47.227287Z node 106 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T17:47:47.227291Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:47:47.227297Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T17:47:47.227812Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:47:47.227822Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:47:47.227940Z node 106 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:47:47.231025Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.231080Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 2024-11-21T17:47:47.231325Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:47:47.231344Z node 106 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:47:47.231364Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 Forgetting tablet 72075186233409547 2024-11-21T17:47:47.231478Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:47:47.231511Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:47:47.231555Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:47:47.231561Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:47:47.231570Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:47.231984Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:47:47.231992Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:47:47.232195Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:47:47.232201Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:47:47.232217Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:47:47.232287Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:47:47.232292Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:47:47.232335Z node 106 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:47:47.232347Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:47:47.232350Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [106:446:2419] TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:47:47.232384Z node 106 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:47:47.232392Z node 106 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T17:47:47.232434Z node 106 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:47.232458Z node 106 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/Solomon" took 31us result status StatusPathDoesNotExist 2024-11-21T17:47:47.232478Z node 106 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |77.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |77.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> KqpUniqueIndex::UpdateFkSameValue >> KqpMultishardIndex::DataColumnWriteNull-StreamLookup [GOOD] >> KqpMultishardIndex::DuplicateUpsert+StreamLookup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T17:47:37.850273Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790702187672013:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:37.850543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:37.853912Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790702351398617:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b8c/r3tmp/tmpcpRGJr/pdisk_1.dat 2024-11-21T17:47:37.883178Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:37.884191Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:37.888578Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:37.931269Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26195, node 1 2024-11-21T17:47:37.953263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:37.953297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:37.960691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:37.983280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:37.983302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:37.986381Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000b8c/r3tmp/yandexbojULB.tmp 2024-11-21T17:47:37.986392Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000b8c/r3tmp/yandexbojULB.tmp 2024-11-21T17:47:37.986469Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000b8c/r3tmp/yandexbojULB.tmp 2024-11-21T17:47:37.986522Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:37.987889Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:47:37.988289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:37.993170Z INFO: TTestServer started on Port 10407 GrpcPort 26195 TClient is connected to server localhost:10407 PQClient connected to localhost:26195 === TenantModeEnabled() = 1 === Init PQ - start server on port 26195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:38.043086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:47:38.043134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.043190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:47:38.043235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:38.043240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.043928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:38.043943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:38.043985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.043994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:38.043995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2024-11-21T17:47:38.043999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:47:38.044473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.044485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:38.044488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:47:38.044793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.044800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.044804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:47:38.044808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2024-11-21T17:47:38.045441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:38.045820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2024-11-21T17:47:38.045850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:47:38.046387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211258092, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:38.046411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7439790702187672559 RawX2: 4294969642 } } Step: 1732211258092 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:47:38.046416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:47:38.046473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:47:38.046479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:47:38.046504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:47:38.046518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:47:38.046845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:38.046850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:47:38.046888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:38.046891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439790702187672593:2377], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2024-11-21T17:47:38.046897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.046901Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:47:38.046909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:47:38.046911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T17:47:38.046915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2024-11-21T17:47:38.046918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T17:47:38.046920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:47:38.046922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2024-11-21T17:47:38.046939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:47:38.046943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T17:47:38.046945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T17:47:38.047465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:47:38.047477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, ... K, TabletId: 72075186224037899, NodeId 1, Generation: 1 2024-11-21T17:47:46.909346Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:46.909352Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server connected, pipe [1:7439790740842380609:2558], now have 1 active actors on pipe 2024-11-21T17:47:46.909429Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:47:46.909438Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2024-11-21T17:47:46.909466Z node 1 :PERSQUEUE INFO: new Cookie 1236|5deb9e97-60e7078b-8112c418-d106e5a9_0 generated for partition 0 topic 'PQ/account3/folder1/folder2/topic' owner 1236 2024-11-21T17:47:46.909501Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:47:46.909521Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:47:46.909615Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:47:46.909620Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2024-11-21T17:47:46.909633Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:47:46.909651Z node 1 :PQ_WRITE_PROXY INFO: session inited cookie: 22 partition: 0 MaxSeqNo: 0 sessionId: 1236|5deb9e97-60e7078b-8112c418-d106e5a9_0 2024-11-21T17:47:46.911474Z :INFO: [] MessageGroupId [1236] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211266911 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:46.911505Z :INFO: [] MessageGroupId [1236] SessionId [] Write session established. Init response: session_id: "1236|5deb9e97-60e7078b-8112c418-d106e5a9_0" topic: "PQ/account3/folder1/folder2/topic" 2024-11-21T17:47:46.911836Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5deb9e97-60e7078b-8112c418-d106e5a9_0] Write 1 messages with Id from 1 to 1 2024-11-21T17:47:46.912013Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5deb9e97-60e7078b-8112c418-d106e5a9_0] Write session: try to update token 2024-11-21T17:47:46.912027Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5deb9e97-60e7078b-8112c418-d106e5a9_0] Send 1 message(s) (0 left), first sequence number is 1 2024-11-21T17:47:46.912863Z :INFO: [] MessageGroupId [1236] SessionId [1236|5deb9e97-60e7078b-8112c418-d106e5a9_0] Write session: close. Timeout = 10000 ms 2024-11-21T17:47:46.915256Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|5deb9e97-60e7078b-8112c418-d106e5a9_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:47:46.915444Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T17:47:46.915579Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:47:46.915594Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2024-11-21T17:47:46.915632Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T17:47:46.915652Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:47:46.915813Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:47:46.915818Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2024-11-21T17:47:46.915890Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 0 messageNo: 1 size: 511961 2024-11-21T17:47:46.915945Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 1 messageNo: 1 size: 511961 2024-11-21T17:47:46.915989Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size: 176151 2024-11-21T17:47:46.915994Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size 176151 offset: -1 2024-11-21T17:47:46.916034Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0. Amount: 1200088. Cookie: 7 2024-11-21T17:47:47.335516Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790745137347917:2562], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:47.335607Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDY0OWUwZGEtMjllNmZjYTctNDdhYjEwM2UtNmNmYmRjNDU=, ActorId: [1:7439790745137347915:2561], ActorState: ExecuteState, TraceId: 01jd7xcnr432xdawazzqe57sw2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:47.335730Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:47:47.727605Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0: Cookie: 7 2024-11-21T17:47:47.727660Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 0 2024-11-21T17:47:47.727670Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 1 2024-11-21T17:47:47.727674Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 2 2024-11-21T17:47:47.727892Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob complete sourceId '\0001236' seqNo 1 partNo 2 FormedBlobsCount 0 NewHead: Offset 6 PartNo 0 PackedSize 1200285 count 1 nextOffset 7 batches 3 2024-11-21T17:47:47.728008Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account3/folder1/folder2/topic' partition 0 compactOffset 6,1 HeadOffset 6 endOffset 6 curOffset 7 d0000000000_00000000000000000006_00000_0000000001_00002| size 1200275 WTime 1732211267727 2024-11-21T17:47:47.728169Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:47:47.730663Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1200088 2024-11-21T17:47:47.730681Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:47:47.730698Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 6 is stored on disk 2024-11-21T17:47:47.730702Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:47:47.730707Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 1, Offset: 6 is stored on disk 2024-11-21T17:47:47.730712Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:47:47.730717Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 2, Offset: 6 is stored on disk 2024-11-21T17:47:47.730726Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T17:47:47.730768Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:47:47.731160Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5deb9e97-60e7078b-8112c418-d106e5a9_0] Write session got write response: sequence_numbers: 1 offsets: 6 already_written: false write_statistics { persist_duration_ms: 3 queued_in_partition_duration_ms: 812 throttled_on_partition_duration_ms: 811 } 2024-11-21T17:47:47.731187Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5deb9e97-60e7078b-8112c418-d106e5a9_0] Write session: acknoledged message 1 2024-11-21T17:47:47.816849Z :INFO: [] MessageGroupId [1236] SessionId [1236|5deb9e97-60e7078b-8112c418-d106e5a9_0] Write session will now close 2024-11-21T17:47:47.816873Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5deb9e97-60e7078b-8112c418-d106e5a9_0] Write session: aborting 2024-11-21T17:47:47.817093Z :INFO: [] MessageGroupId [1236] SessionId [1236|5deb9e97-60e7078b-8112c418-d106e5a9_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:47:47.817102Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5deb9e97-60e7078b-8112c418-d106e5a9_0] Write session: destroy DURATION 3.074929s 2024-11-21T17:47:47.821813Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|5deb9e97-60e7078b-8112c418-d106e5a9_0 grpc read done: success: 0 data: 2024-11-21T17:47:47.821842Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|5deb9e97-60e7078b-8112c418-d106e5a9_0 grpc read failed 2024-11-21T17:47:47.821852Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|5deb9e97-60e7078b-8112c418-d106e5a9_0 grpc closed 2024-11-21T17:47:47.821855Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|5deb9e97-60e7078b-8112c418-d106e5a9_0 is DEAD 2024-11-21T17:47:47.822118Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:47:47.822207Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:47.822223Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server disconnected, pipe [1:7439790740842380609:2558] destroyed 2024-11-21T17:47:47.822236Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::DropOwner. >> KqpIndexes::CheckUpsertNonEquatableType-NotNull [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::ReadWithRestarts >> KqpUniqueIndex::ReplaceFkAlreadyExist [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex [GOOD] >> KqpIndexes::DeleteByIndex >> KqpIndexes::SelectConcurentTX2 [GOOD] >> KqpIndexes::SelectFromAsyncIndexedTable >> KqpIndexes::SecondaryIndexOrderBy >> KqpMultishardIndex::DataColumnWriteNull+StreamLookup >> KqpMultishardIndex::DuplicateUpsert+StreamLookup [GOOD] |77.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |77.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots >> KqpUniqueIndex::UpdateFkSameValue [GOOD] >> KqpUniqueIndex::UpdateFkPkOverlap >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] >> KqpMultishardIndex::DataColumnSelect+StreamLookup >> KqpIndexes::DeleteByIndex [GOOD] >> KqpIndexes::SelectFromAsyncIndexedTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DuplicateUpsert+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15838, MsgBus: 30071 2024-11-21T17:47:47.711695Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790745385029163:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:47.711848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021ef/r3tmp/tmpR0bcRU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15838, node 1 2024-11-21T17:47:47.770566Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:47.770582Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:47.770584Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:47.770618Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:47.771001Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30071 2024-11-21T17:47:47.812735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:47.812766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:47.813887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:47.851416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:47.858859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.919002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:47:47.938033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.949904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.013198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790749679998011:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.013224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.040055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.046302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.053985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.061258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.068265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.075029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.083721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790749679998505:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.083742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.083788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790749679998510:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.084514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:48.088323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790749679998512:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:48.284025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 6694, MsgBus: 18890 2024-11-21T17:47:48.612395Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790748230714746:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:48.612490Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021ef/r3tmp/tmpjPkRxs/pdisk_1.dat 2024-11-21T17:47:48.620503Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6694, node 2 2024-11-21T17:47:48.629576Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:48.629586Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:48.629588Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:48.629613Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18890 TClient is connected to server localhost:18890 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:48.714429Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:48.714471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:48.714860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.715444Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:48.736877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.746653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.772832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.785655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.900650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790748230716287:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.900686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.909487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.919441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.930287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.946682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.958177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.972007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.990241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790748230716800:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.990263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.990311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790748230716805:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.991056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:48.999004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790748230716807:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:49.179458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DeleteByIndex [GOOD] Test command err: Trying to start YDB, gRPC: 21262, MsgBus: 31700 2024-11-21T17:47:47.713984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790743472693016:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:47.714065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021e1/r3tmp/tmp2Llcrm/pdisk_1.dat 2024-11-21T17:47:47.770045Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21262, node 1 2024-11-21T17:47:47.773554Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:47:47.777536Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:47.777548Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:47.777549Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:47.777585Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31700 TClient is connected to server localhost:31700 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:47:47.816516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:47.816553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:47.821164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:47.848534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.857942Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:47.864584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.931681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.947351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.958703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.033676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790747767661650:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.033726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.040449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.048589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.103711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.159038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.166029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.173244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.181947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790747767662167:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.181981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.181983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790747767662172:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.182534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:48.186141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790747767662174:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:48.334441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 204 cpu_time_us: 204 } query_phases { duration_us: 350 cpu_time_us: 350 } query_phases { duration_us: 2145 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 1468 affected_shards: 1 } query_phases { duration_us: 226 cpu_time_us: 226 } query_phases { duration_us: 766 cpu_time_us: 766 } query_phases { duration_us: 472 table_access { name: "/Root/TestTable/Index/indexImplTable" } cpu_time_us: 625 } query_phases { duration_us: 279 cpu_time_us: 279 } query_phases { duration_us: 755 cpu_time_us: 755 } query_phases { duration_us: 910 cpu_time_us: 798 } query_phases { duration_us: 1650 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 621 affected_shards: 2 } compilation { duration_us: 93210 cpu_time_us: 90829 } process_cpu_time_us: 2614 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":46,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":45,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_7_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1732211268445,\"TaskId\":1,\"Host\":\"ghrun-2rqap2spfm\",\"ComputeTimeUs\":27}],\"CpuTimeUs\":114}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"BaseTimeMs\":1732211268445,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":114,\"Max\":114,\"Min\":114}},\"CTE Name\":\"precompute_7_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":44,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":43,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1732211268445,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_8_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":42,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":41,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1732211268445,\"TaskId\":2,\"Host\":\"ghrun-2rqap2spfm\",\"ComputeTimeUs\":24}],\"CpuTimeUs\":142}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"BaseTimeMs\":1732211268445,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":142,\"Max\":142,\"Min\":142}},\"CTE Name\":\"precompute_8_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":35,\"Subplan Name\":\"CTE precompute_8_1\",\"Plans\":[{\"PlanNodeId\":34,\"Plans\":[{\"PlanNodeId\":33,\"Plans\":[{\"PlanNodeId\":32,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1},{\"InternalOperatorId\":1},{\"InternalOperatorId\":1},{\"InternalOperatorId\":1}],\"Iterator\":\"Map\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"E-Rows\":\"1\",\"Predicate\":\"Nth\",\"Name\":\"Filter\",\"E-Size\":\"5\",\"E-Cost\":\"0\"}],\"Node Type\":\"ConstantExpr-Filter\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1732211268443,\"StartTimeMs\":1732211268443,\"TaskId\":2,\"Host\":\"ghrun-2rqap2spfm\",\"ComputeTimeUs\":2,\"OutputChannels\":[{\"ChannelId\":2,\"DstStageId\":3}]}],\"PeakMemoryUsageBytes\":65536,\"CpuTimeUs\":148}],\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"34\",\"Push\":{}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576},\"Tasks\":1,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"BaseTimeMs\":1732211268443,\"CpuTimeUs\":{\"Count\":1,\"Sum\":103,\"Max\":103,\"Min\":103}}}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Collect\",\"S ... ($173) (block \'(\n (let $174 (AsStruct \'(\'\"Key\" (Member $173 \'\"Key\"))))\n (return (IfPresent (Lookup $42 $174) (lambda \'($175) (Just \'($174 $175 (Or (AggrNotEquals (Member $173 \'\"fk1\") (Member $175 \'\"fk1\")) (AggrNotEquals (Member $173 \'\"fk3\") (Member $175 \'\"fk3\")))))) (Nothing (OptionalType (TupleType $19 $40 $52)))))\n)))) (lambda \'($176) (Nth $176 \'0)) (lambda \'($177) \'((Nth $177 \'1) (Nth $177 \'2))) $31)))) \'(\'(\'\"_logical_id\" \'5220) \'(\'\"_id\" \'\"a1fad019-13395864-cc9529b4-f8e152d3\"))))\n(let $90 (DqPhyStage \'() (lambda \'() (ToStream (Just $88))) \'(\'(\'\"_logical_id\" \'5233) \'(\'\"_id\" \'\"73858670-ab1145d3-1d704bed-f42958cc\"))))\n(let $91 (DqCnValue (TDqOutput $89 \'0)))\n(let $92 (DqCnValue (TDqOutput $90 \'0)))\n(let $93 (KqpTxResultBinding $11 \'\"6\" \'0))\n(let $94 \'($87 $93))\n(let $95 (KqpPhysicalTx \'($89 $90) \'($91 $92) \'($47 $94) $3))\n(let $96 \'\"%kqp%tx_result_binding_7_0\")\n(let $97 (DictType $19 (TupleType $40 $52)))\n(let $98 %kqp%tx_result_binding_7_0)\n(let $99 (DqPhyStage \'() (lambda \'() (Iterator (FlatMap (Map $88 (lambda \'($178) (AsStruct \'(\'\"Key\" (Member $178 \'\"Key\")) \'(\'\"fk1\" (Member $178 \'\"fk1\")) \'(\'\"fk3\" (Member $178 \'\"fk3\"))))) (lambda \'($179) (block \'(\n (let $180 \'(\'\"Key\" (Member $179 \'\"Key\")))\n (let $181 \'(\'\"fk1\" (Member $179 \'\"fk1\")))\n (let $182 \'(\'\"fk3\" (Member $179 \'\"fk3\")))\n (return (IfPresent (Lookup $98 (AsStruct $180)) (lambda \'($183) (If (Nth $183 \'1) (Just (AsStruct $180 $181 \'(\'\"fk2\" (Member (Nth $183 \'0) \'\"fk2\")) $182)) (Nothing (OptionalType $27)))) (Just (AsStruct $180 $181 $43 $182))))\n)))))) \'(\'(\'\"_logical_id\" \'5855) \'(\'\"_id\" \'\"db8e4662-d8912762-f2e79d2f-32be47d3\"))))\n(let $100 (DqPhyStage \'() (lambda \'() (Iterator (Map (Filter (DictItems $98) (lambda \'($184) (Nth (Nth $184 \'1) \'1))) (lambda \'($185) (block \'(\n (let $186 (Nth (Nth $185 \'1) \'0))\n (return (AsStruct \'(\'\"Key\" (Member (Nth $185 \'0) \'\"Key\")) \'(\'\"fk1\" (Member $186 \'\"fk1\")) \'(\'\"fk2\" (Member $186 \'\"fk2\")) \'(\'\"fk3\" (Member $186 \'\"fk3\"))))\n)))))) \'(\'(\'\"_logical_id\" \'5869) \'(\'\"_id\" \'\"d5ad7b73-f9bd6057-911bf400-8ce157ee\"))))\n(let $101 (DqCnUnionAll (TDqOutput $99 \'0)))\n(let $102 (lambda \'($187) $187))\n(let $103 (DqPhyStage \'($101) $102 \'(\'(\'\"_logical_id\" \'5965) \'(\'\"_id\" \'\"7fdffab5-116816e-c5ad6e5b-347f0f1e\"))))\n(let $104 (DqCnUnionAll (TDqOutput $100 \'0)))\n(let $105 (DqPhyStage \'($104) $102 \'(\'(\'\"_logical_id\" \'5980) \'(\'\"_id\" \'\"9e61a4c6-53d1a55f-894a006c-a6b05bd2\"))))\n(let $106 \'($99 $100 $103 $105))\n(let $107 (DqCnResult (TDqOutput $103 \'0) \'()))\n(let $108 (DqCnResult (TDqOutput $105 \'0) \'()))\n(let $109 (KqpTxResultBinding $97 \'\"7\" \'0))\n(let $110 (KqpPhysicalTx $106 \'($107 $108) \'($94 \'($96 $109)) $3))\n(let $111 \'\"%kqp%tx_result_binding_7_1\")\n(let $112 (DqPhyStage \'() (lambda \'() (block \'(\n (let $188 \'(\'\"Key\" \'\"Value\" \'\"fk1\" \'\"fk3\"))\n (return (KqpEffects (KqpUpsertRows $22 (Iterator %kqp%tx_result_binding_7_1) $188 \'(\'(\'\"Mode\" \'\"upsert\")))))\n))) \'(\'(\'\"_logical_id\" \'6495) \'(\'\"_id\" \'\"63d6dcba-7c5fe718-c6482c42-6b608858\"))))\n(let $113 \'\"%kqp%tx_result_binding_8_1\")\n(let $114 (ListType $27))\n(let $115 (DqPhyStage \'() (lambda \'() (KqpEffects (KqpDeleteRows $64 (Iterator %kqp%tx_result_binding_8_1)))) \'(\'(\'\"_logical_id\" \'6509) \'(\'\"_id\" \'\"26ea8ac5-6bfe9471-dd6d33c6-f6d7d6cf\"))))\n(let $116 \'\"%kqp%tx_result_binding_8_0\")\n(let $117 (DqPhyStage \'() (lambda \'() (block \'(\n (let $189 \'(\'\"fk1\" \'\"fk2\" \'\"fk3\" \'\"Key\"))\n (return (KqpEffects (KqpUpsertRows $64 (Iterator %kqp%tx_result_binding_8_0) $189 \'())))\n))) \'(\'(\'\"_logical_id\" \'6523) \'(\'\"_id\" \'\"52f4532c-ad4a5b9b-d8672bec-3c0f3419\"))))\n(let $118 \'($112 $115 $117))\n(let $119 (KqpTxResultBinding $11 \'\"7\" \'1))\n(let $120 (KqpTxResultBinding $114 \'\"8\" \'0))\n(let $121 (KqpTxResultBinding $114 \'\"8\" \'1))\n(let $122 \'(\'($111 $119) \'($116 $120) \'($113 $121)))\n(let $123 (KqpPhysicalTx $118 \'() $122 \'($36 \'(\'\"with_effects\"))))\n(let $124 \'($4 $17 $38 $48 $61 $76 $86 $95 $110 $123))\n(return (KqpPhysicalQuery $124 \'() \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 103657 total_cpu_time_us: 99535 Trying to start YDB, gRPC: 24858, MsgBus: 13203 2024-11-21T17:47:48.808501Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790746078727420:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:48.808672Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021e1/r3tmp/tmpQLgMv1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24858, node 2 2024-11-21T17:47:48.826037Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:48.827174Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:48.827185Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:48.827188Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:48.827223Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13203 TClient is connected to server localhost:13203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:47:48.910878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:48.910904Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:48.911285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.911907Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:48.920612Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:48.928986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.947462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.972749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.982347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.129191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790750373696255:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.129222Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.135935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.143608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.156225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.167426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.182244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.195572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.214196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790750373696766:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.214234Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.214443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790750373696771:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.215173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:49.216953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790750373696773:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:49.379860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SelectFromAsyncIndexedTable [GOOD] Test command err: Trying to start YDB, gRPC: 62582, MsgBus: 9152 2024-11-21T17:47:47.768610Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790744113774599:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:47.768809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021ea/r3tmp/tmp2m4AfQ/pdisk_1.dat 2024-11-21T17:47:47.824869Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62582, node 1 2024-11-21T17:47:47.845604Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:47.845619Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:47.845621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:47.845661Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9152 2024-11-21T17:47:47.869910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:47.869935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:47.870955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:47.907389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.918551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:47.940916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:47:47.959118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.971726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.096798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790748408743422:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.096826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.121912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.128923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.138236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.145607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.200186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.208615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.216029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790748408743938:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.216049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790748408743943:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.216058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.216769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:48.221964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790748408743945:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:48.383316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.562502Z node 1 :TX_DATASHARD ERROR: Complete [1732211268606 : 281474976715681] from 72075186224037920 at tablet 72075186224037920, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2024-11-21T17:47:48.563631Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjVmM2VlYjUtMTYwMjIzYTQtNDUyMWVmZGItNDgzZWM2NmI=, ActorId: [1:7439790748408744228:2454], ActorState: ExecuteState, TraceId: 01jd7xcpwx7z0g8qa7p7b0crrf, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 3131, MsgBus: 14291 2024-11-21T17:47:48.883165Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790748536924465:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021ea/r3tmp/tmpe9BMqB/pdisk_1.dat 2024-11-21T17:47:48.888012Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:48.891454Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3131, node 2 2024-11-21T17:47:48.900946Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:48.900957Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:48.900959Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:48.900998Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14291 TClient is connected to server localhost:14291 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:48.984495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:48.984526Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:48.985159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.985513Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:47:48.986031Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:48.995287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.004378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.025104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.035513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.185314Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790752831893151:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.185335Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.190256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.201394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.209421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.216198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.223132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.230723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.246297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790752831893664:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.246321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790752831893669:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.246322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.246843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:49.250492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790752831893671:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:49.435470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.461682Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439790752831894133:2454] TxId: 281474976715672. Ctx: { TraceId: 01jd7xcqtn24qmj37xfnsvx1h6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGQxMTA2OTEtNDg5ZGRlNmItODE1N2IyMWUtN2M0NjlmNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2024-11-21T17:47:49.461771Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGQxMTA2OTEtNDg5ZGRlNmItODE1N2IyMWUtN2M0NjlmNjI=, ActorId: [2:7439790752831893956:2454], ActorState: ExecuteState, TraceId: 01jd7xcqtn24qmj37xfnsvx1h6, Create QueryResponse for error on request, msg: 2024-11-21T17:47:49.463181Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439790752831894146:2454] TxId: 281474976715674. Ctx: { TraceId: 01jd7xcqtp070q2nqerk2styrw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGQxMTA2OTEtNDg5ZGRlNmItODE1N2IyMWUtN2M0NjlmNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2024-11-21T17:47:49.463223Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGQxMTA2OTEtNDg5ZGRlNmItODE1N2IyMWUtN2M0NjlmNjI=, ActorId: [2:7439790752831893956:2454], ActorState: ExecuteState, TraceId: 01jd7xcqtp070q2nqerk2styrw, Create QueryResponse for error on request, msg: 2024-11-21T17:47:49.463979Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439790752831894155:2454] TxId: 281474976715676. Ctx: { TraceId: 01jd7xcqtqbv3tc4n6kyc11zm2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGQxMTA2OTEtNDg5ZGRlNmItODE1N2IyMWUtN2M0NjlmNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2024-11-21T17:47:49.464020Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGQxMTA2OTEtNDg5ZGRlNmItODE1N2IyMWUtN2M0NjlmNjI=, ActorId: [2:7439790752831893956:2454], ActorState: ExecuteState, TraceId: 01jd7xcqtqbv3tc4n6kyc11zm2, Create QueryResponse for error on request, msg: 2024-11-21T17:47:49.464753Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439790752831894164:2454] TxId: 281474976715678. Ctx: { TraceId: 01jd7xcqtr2dzx1xpds75309bv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGQxMTA2OTEtNDg5ZGRlNmItODE1N2IyMWUtN2M0NjlmNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2024-11-21T17:47:49.464798Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGQxMTA2OTEtNDg5ZGRlNmItODE1N2IyMWUtN2M0NjlmNjI=, ActorId: [2:7439790752831893956:2454], ActorState: ExecuteState, TraceId: 01jd7xcqtr2dzx1xpds75309bv, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2024-11-21T17:47:35.556431Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790690774683386:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:35.556632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:35.559184Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790692372176865:2086];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:35.581026Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:35.582724Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:35.583825Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000bca/r3tmp/tmpcr4kTh/pdisk_1.dat 2024-11-21T17:47:35.609041Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19339, node 1 2024-11-21T17:47:35.623291Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000bca/r3tmp/yandexv93XSb.tmp 2024-11-21T17:47:35.623303Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000bca/r3tmp/yandexv93XSb.tmp 2024-11-21T17:47:35.623361Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000bca/r3tmp/yandexv93XSb.tmp 2024-11-21T17:47:35.623404Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:35.627344Z INFO: TTestServer started on Port 7077 GrpcPort 19339 TClient is connected to server localhost:7077 PQClient connected to localhost:19339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:35.648547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:35.656616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:35.656639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:35.657825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.657934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:47:35.685644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:35.685674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:35.687332Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:47:35.687654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:47:35.894464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790690774684429:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:35.894483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790690774684438:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:35.894490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:35.895182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:35.896474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790690774684472:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:35.896525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:35.899777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790690774684444:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:47:35.923655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:35.973787Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790690774684603:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:35.973933Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTQ4ZTI0MjYtYzU0NWFmMzYtOWE3Y2Y0ZjgtNjIxZDU3MmI=, ActorId: [1:7439790690774684426:2303], ActorState: ExecuteState, TraceId: 01jd7xcajpeyxckpg3tc4h7pw2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:35.974505Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:47:35.987776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:36.013899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:47:36.097260Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd7xcaqjc56e0nta5f0ey81d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRhNThmMWUtZTE3NWZmMjQtNmEwYmZiNDYtNWVlYWIzYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790695069652192:3046] 2024-11-21T17:47:40.558918Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790690774683386:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:40.558984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:47:40.561922Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790692372176865:2086];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:40.561959Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok >>>>> Prepare scheme WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:47:41.196500Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790690774683690:2175], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:47:41.196592Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439790690774683690:2175], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2024-11-21T17:47:41.196611Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439790690774683690:2175], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439790690774684059:2430] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732211255698 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:47:41.196627Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439790690774683690:2175], cacheItem# { Subscriber: { Subscriber: [1:7439790690774684059:2430] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732211255698 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType ... se: Root, partition 1(assignId:4) wait data in partition inited, cookie 1 from offset10 2024-11-21T17:47:49.267283Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_6687397748024440164_v1 after read state TopicId: Topic /Root/account2/topic2 in database: Root, partition 1(assignId:4) EndOffset 10 ReadOffset 10 ReadGuid c9cbcdf3-b90283b1-d209a3a4-a8aac74f has messages 0 2024-11-21T17:47:49.267315Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_6687397748024440164_v1 read done: guid# c9cbcdf3-b90283b1-d209a3a4-a8aac74f, partition# TopicId: Topic /Root/account2/topic2 in database: Root, partition 1(assignId:4), size# 52 2024-11-21T17:47:49.267336Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_6687397748024440164_v1 empty read result, start new reading: guid# c9cbcdf3-b90283b1-d209a3a4-a8aac74f 2024-11-21T17:47:49.267342Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_6687397748024440164_v1 Process answer. Aval parts: 0 >>>>> Iteration: 30 Closing session. Got 0 messages 2024-11-21T17:47:49.271584Z :INFO: [/Root] [/Root] [61e18f97-7d87a96a-a05698e-9083026] Closing read session. Close timeout: 1.000000s 2024-11-21T17:47:49.271612Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account2/topic2:4:3:0:0 null:account2/topic2:3:5:0:0 null:account2/topic2:2:2:0:0 null:account2/topic2:1:1:0:0 null:account2/topic2:0:4:0:0 2024-11-21T17:47:49.271623Z :INFO: [/Root] [/Root] [61e18f97-7d87a96a-a05698e-9083026] Counters: { Errors: 0 CurrentSessionLifetimeMs: 10 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } >>>>> Iteration: 30 Session closed 2024-11-21T17:47:49.271914Z :INFO: [/Root] [/Root] [61e18f97-7d87a96a-a05698e-9083026] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:49.271921Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account2/topic2:4:3:0:0 null:account2/topic2:3:5:0:0 null:account2/topic2:2:2:0:0 null:account2/topic2:1:1:0:0 null:account2/topic2:0:4:0:0 2024-11-21T17:47:49.271925Z :INFO: [/Root] [/Root] [61e18f97-7d87a96a-a05698e-9083026] Counters: { Errors: 0 CurrentSessionLifetimeMs: 10 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:49.271948Z :NOTICE: [/Root] [/Root] [61e18f97-7d87a96a-a05698e-9083026] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:47:49.272103Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_6687397748024440164_v1 grpc read done: success# 0, data# { } 2024-11-21T17:47:49.272120Z node 3 :PQ_READ_PROXY INFO: session cookie 31 consumer userx session userx_3_31_6687397748024440164_v1 grpc read failed 2024-11-21T17:47:49.272139Z node 3 :PQ_READ_PROXY INFO: session cookie 31 consumer userx session userx_3_31_6687397748024440164_v1 grpc closed 2024-11-21T17:47:49.272171Z :INFO: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|cfa9967-aa699d11-5b4a2210-d50f4fd5_0] Write session: close. Timeout = 0 ms 2024-11-21T17:47:49.272160Z node 3 :PQ_READ_PROXY INFO: session cookie 31 consumer userx session userx_3_31_6687397748024440164_v1 is DEAD 2024-11-21T17:47:49.272176Z :INFO: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|cfa9967-aa699d11-5b4a2210-d50f4fd5_0] Write session will now close 2024-11-21T17:47:49.272182Z :DEBUG: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|cfa9967-aa699d11-5b4a2210-d50f4fd5_0] Write session: aborting 2024-11-21T17:47:49.272279Z :INFO: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|cfa9967-aa699d11-5b4a2210-d50f4fd5_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:47:49.272284Z :DEBUG: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|cfa9967-aa699d11-5b4a2210-d50f4fd5_0] Write session: destroy 2024-11-21T17:47:49.272293Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:49.272310Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Destroy direct read session userx_3_31_6687397748024440164_v1 2024-11-21T17:47:49.272317Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7439790754320963524:2765] destroyed 2024-11-21T17:47:49.272321Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:49.272323Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Destroy direct read session userx_3_31_6687397748024440164_v1 2024-11-21T17:47:49.272326Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7439790754320963522:2764] destroyed 2024-11-21T17:47:49.272342Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_6687397748024440164_v1 2024-11-21T17:47:49.272345Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_6687397748024440164_v1 2024-11-21T17:47:49.272379Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:49.272400Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Destroy direct read session userx_3_31_6687397748024440164_v1 2024-11-21T17:47:49.272407Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] server disconnected, pipe [3:7439790754320963518:2762] destroyed 2024-11-21T17:47:49.272413Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:49.272415Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Destroy direct read session userx_3_31_6687397748024440164_v1 2024-11-21T17:47:49.272419Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] server disconnected, pipe [3:7439790754320963520:2763] destroyed 2024-11-21T17:47:49.272425Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037904] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:49.272428Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037904] Destroy direct read session userx_3_31_6687397748024440164_v1 2024-11-21T17:47:49.272430Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037904] server disconnected, pipe [3:7439790754320963526:2766] destroyed 2024-11-21T17:47:49.272443Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_6687397748024440164_v1 2024-11-21T17:47:49.272446Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_6687397748024440164_v1 2024-11-21T17:47:49.272454Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_6687397748024440164_v1 2024-11-21T17:47:49.272483Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: account2/topic2|cfa9967-aa699d11-5b4a2210-d50f4fd5_0 grpc read done: success: 0 data: 2024-11-21T17:47:49.272492Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: account2/topic2|cfa9967-aa699d11-5b4a2210-d50f4fd5_0 grpc read failed 2024-11-21T17:47:49.272501Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: account2/topic2|cfa9967-aa699d11-5b4a2210-d50f4fd5_0 grpc closed 2024-11-21T17:47:49.272504Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: account2/topic2|cfa9967-aa699d11-5b4a2210-d50f4fd5_0 is DEAD 2024-11-21T17:47:49.272507Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037905][topic2] pipe [3:7439790754320963515:2759] disconnected; active server actors: 1 2024-11-21T17:47:49.272511Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037905][topic2] pipe [3:7439790754320963515:2759] client userx disconnected session userx_3_31_6687397748024440164_v1 2024-11-21T17:47:49.272685Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037902 (partition=1) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:47:49.272713Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:49.272721Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7439790750025995250:2473] destroyed 2024-11-21T17:47:49.272733Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902, Partition: 1, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:47:49.564982Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439790724256188833:2137], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:47:49.565033Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439790724256188833:2137], cacheItem# { Subscriber: { Subscriber: [3:7439790728551157425:3049] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:47:49.565054Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439790754320963546:4482], recipient# [3:7439790754320963545:2768], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:47:49.567542Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439790724256188833:2137], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:47:49.567578Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439790724256188833:2137], cacheItem# { Subscriber: { Subscriber: [3:7439790728551157425:3049] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:47:49.567596Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439790754320963548:4483], recipient# [3:7439790754320963547:2769], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpMultishardIndex::DataColumnWriteNull+StreamLookup [GOOD] >> TxUsage::WriteToTopic_Demo_36 [GOOD] >> KqpMultishardIndex::DataColumnWrite-StreamLookup >> KqpIndexes::SecondaryIndexOrderBy [GOOD] >> KqpIndexes::SecondaryIndexInsert1 >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 6888, MsgBus: 18042 2024-11-21T17:47:47.625887Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790744375494501:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:47.626125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021e6/r3tmp/tmpBPiJfV/pdisk_1.dat 2024-11-21T17:47:47.667987Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6888, node 1 2024-11-21T17:47:47.681967Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:47.681983Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:47.681984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:47.682021Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18042 TClient is connected to server localhost:18042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:47.726915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:47.726949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:47.728156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:47.732111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.740608Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:47.753400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.817206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.849464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.860978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.919372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790744375496051:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:47.919408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:47.957025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.011503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.018540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.025699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.080377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.089046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.097458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790748670463865:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.097486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790748670463870:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.097494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.098105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:48.102496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790748670463872:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:48.270941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.483553Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7xcpt18cwvbrb9t468canf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA5NGU5ZGEtZmY3NjRjOTAtYTBhMTM0ZGYtMjI4ZjU2YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T17:47:48.484733Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjA5NGU5ZGEtZmY3NjRjOTAtYTBhMTM0ZGYtMjI4ZjU2YmE=, ActorId: [1:7439790748670464878:2512], ActorState: ExecuteState, TraceId: 01jd7xcpt18cwvbrb9t468canf, Create QueryResponse for error on request, msg: 2024-11-21T17:47:48.558443Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7xcpw678t7crznqmd6dc1y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA5NGU5ZGEtZmY3NjRjOTAtYTBhMTM0ZGYtMjI4ZjU2YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T17:47:48.558518Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjA5NGU5ZGEtZmY3NjRjOTAtYTBhMTM0ZGYtMjI4ZjU2YmE=, ActorId: [1:7439790748670464878:2512], ActorState: ExecuteState, TraceId: 01jd7xcpw678t7crznqmd6dc1y, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 24350, MsgBus: 10621 2024-11-21T17:47:48.730751Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790746701561994:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:48.730961Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021e6/r3tmp/tmpCqWQEM/pdisk_1.dat 2024-11-21T17:47:48.744657Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24350, node 2 2024-11-21T17:47:48.751594Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:48.751611Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:48.751613Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:48.751656Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10621 TClient is connected to server localhost:10621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:48.831389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:48.831415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:48.832524Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:48.833279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.836954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.846352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.866797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.881923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.024522Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790750996530827:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.024636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.027531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.036706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.048033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.055210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.062332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.069214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.081002Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790750996531339:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.081062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790750996531344:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.081066Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.081783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:49.089536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790750996531346:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:49.300735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.510788Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7xcqt0fq8v746c1t1zg3ra, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWVhNjMyNzMtMTJjNjE5Yi00NzMwYjhmZS0yNzJkYWI2Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T17:47:49.510859Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWVhNjMyNzMtMTJjNjE5Yi00NzMwYjhmZS0yNzJkYWI2Yg==, ActorId: [2:7439790750996532373:2512], ActorState: ExecuteState, TraceId: 01jd7xcqt0fq8v746c1t1zg3ra, Create QueryResponse for error on request, msg: 2024-11-21T17:47:49.593763Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7xcqw83akfft64139qnveb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWVhNjMyNzMtMTJjNjE5Yi00NzMwYjhmZS0yNzJkYWI2Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T17:47:49.593883Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWVhNjMyNzMtMTJjNjE5Yi00NzMwYjhmZS0yNzJkYWI2Yg==, ActorId: [2:7439790750996532373:2512], ActorState: ExecuteState, TraceId: 01jd7xcqw83akfft64139qnveb, Create QueryResponse for error on request, msg: >> TxUsage::WriteToTopic_Demo_37 >> TxUsage::WriteToTopic_Demo_19_RestartNo >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] Test command err: Trying to start YDB, gRPC: 26684, MsgBus: 20600 2024-11-21T17:47:47.765624Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790744262551643:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:47.765970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021df/r3tmp/tmpgcbuLX/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26684, node 1 2024-11-21T17:47:47.844000Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:47.852973Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:47.852984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:47.852986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:47.853015Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20600 2024-11-21T17:47:47.867774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:47.867830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:47.868796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:47.909654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.916716Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:47:47.920123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:47.986860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.004134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.013440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.068845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790748557520476:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.068885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.097291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.152256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.158950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.166156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.173367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.180293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.188569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790748557520981:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.188583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790748557520986:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.188586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.189206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:48.193373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790748557520988:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:48.345570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14828, MsgBus: 4122 2024-11-21T17:47:48.646107Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790748081839607:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:48.646327Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021df/r3tmp/tmpDxu9wx/pdisk_1.dat 2024-11-21T17:47:48.658054Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14828, node 2 2024-11-21T17:47:48.666701Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:48.666717Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:48.666718Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:48.666772Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4122 TClient is connected to server localhost:4122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:48.748195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:48.748301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:48.748569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.749330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:47:48.752657Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:48.764879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.774748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.837355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.847225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.966774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790748081841180:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.966800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.970200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.977376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.032496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.041785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.055501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.062337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.070940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790752376808989:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.070962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.071006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790752376808994:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.071617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:49.075167Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790752376808996:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:49.242074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpIndexes::SecondaryIndexInsert1 [GOOD] >> KqpIndexes::UpsertWithNullKeysSimple >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] >> KqpIndexes::UpdateIndexSubsetPk >> KqpUniqueIndex::UpdateOnFkAlreadyExist ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexInsert1 [GOOD] Test command err: Trying to start YDB, gRPC: 27723, MsgBus: 15868 2024-11-21T17:47:48.913387Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790747696990830:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:48.913531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021d8/r3tmp/tmpNJyin2/pdisk_1.dat 2024-11-21T17:47:48.981760Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27723, node 1 2024-11-21T17:47:48.989503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:48.989516Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:48.989517Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:48.989549Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15868 2024-11-21T17:47:49.016569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:49.016589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:49.020674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:49.056969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.062213Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:49.068277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.086731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.107637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.122637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.221609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790751991959673:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.221642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.251800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.258539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.267698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.279106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.333892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.342241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.350391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790751991960188:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.350419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790751991960193:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.350419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.350953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:49.355422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790751991960195:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:49.506306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7102, MsgBus: 13386 2024-11-21T17:47:50.196936Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790755089599370:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:50.197075Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021d8/r3tmp/tmp6uxGVw/pdisk_1.dat 2024-11-21T17:47:50.211677Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7102, node 2 2024-11-21T17:47:50.217676Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:50.217695Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:50.217697Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:50.217737Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13386 TClient is connected to server localhost:13386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:50.296311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:50.296357Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:50.297458Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:50.299663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:50.301979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:50.316214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:50.335039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:50.343961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:50.516650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790755089600908:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.516706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.519723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.527438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.541939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.553326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.568980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.585221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.604117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790755089601408:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.604141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.604219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790755089601413:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.605282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:50.611160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790755089601415:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:50.813976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 6182, MsgBus: 7816 2024-11-21T17:47:48.431058Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790746242646971:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:48.431282Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021dd/r3tmp/tmprzGVCr/pdisk_1.dat 2024-11-21T17:47:48.474759Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6182, node 1 2024-11-21T17:47:48.487410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:48.487424Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:48.487425Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:48.487460Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7816 TClient is connected to server localhost:7816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:48.531299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:48.531323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:48.532426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:48.533772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.545673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.558983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.575342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.586860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:48.710914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790746242648501:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.710940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.738315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.792826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.804024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.859106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.866449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.872891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:48.881532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790746242649019:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.881564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.881582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790746242649024:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:48.882392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:48.886632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790746242649026:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:49.059153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 65044, MsgBus: 23511 2024-11-21T17:47:49.539087Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790750245921535:2126];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021dd/r3tmp/tmpYysRVF/pdisk_1.dat 2024-11-21T17:47:49.544349Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 65044, node 2 2024-11-21T17:47:49.554573Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:49.556642Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:49.556654Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:49.556655Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:49.556688Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23511 TClient is connected to server localhost:23511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:49.639312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:49.639345Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:49.640442Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:49.641686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.644470Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:49.654451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.662713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.678528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.689420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.859589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790750245923005:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.859621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.864863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.871215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.926119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.937699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.951804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.959501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.974060Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790750245923502:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.974083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.974088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790750245923507:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.974710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:49.978616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790750245923509:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:50.170195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2024-11-21T17:47:37.882422Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790699395094586:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:37.884626Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790700300599791:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:37.884724Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b80/r3tmp/tmp3haWUU/pdisk_1.dat 2024-11-21T17:47:37.920468Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:37.925050Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:37.925880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:37.961666Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9353, node 1 2024-11-21T17:47:37.992638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:37.992663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:38.007533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:38.012512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000b80/r3tmp/yandexE5N3Fa.tmp 2024-11-21T17:47:38.012530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000b80/r3tmp/yandexE5N3Fa.tmp 2024-11-21T17:47:38.012598Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000b80/r3tmp/yandexE5N3Fa.tmp 2024-11-21T17:47:38.012658Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:38.022087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:38.022107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:38.026106Z INFO: TTestServer started on Port 7708 GrpcPort 9353 2024-11-21T17:47:38.026728Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:47:38.027062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7708 PQClient connected to localhost:9353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:38.049143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:38.069223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:38.183406Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:38.200652Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2024-11-21T17:47:38.287426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790703690062685:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:38.287469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:38.287685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790703690062718:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:38.288671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:38.289368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790703690062750:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:38.289390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:38.316719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.317223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790703690062720:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:47:38.379726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:38.384664Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790703690062980:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:38.384738Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWI1OWJmMTgtYjBiNzg3NmEtNzM5ODExMzMtZTU5ZmQzN2Y=, ActorId: [1:7439790703690062679:2299], ActorState: ExecuteState, TraceId: 01jd7xccxd5sv06zmdyrab221r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:38.385171Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:47:38.403449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:47:38.437703Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xcd1j0a94h9c0fbr8vbdq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY5MGY2MTgtMmE3NWQ0YS0xYjljYTE4NC0xMmQyZmJiMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790703690063227:3026] 2024-11-21T17:47:42.881214Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790699395094586:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:42.881253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:47:42.889538Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790700300599791:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:42.889594Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211258099 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".metadata" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715662 CreateStep: 1732211258344 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" } Children { Name: "PQ" PathId: 2 SchemeshardId: 72057594046644480 PathTyp... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:43.652501Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790699395094681:2150], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:47:43.652594Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ... 1 TopicId: Topic /Root/account2/topic2 in database: Root, partition 0(assignId:5) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 94 bytes ..." SourceId: "\000123" SeqNo: 1 WriteTimestampMS: 1732211270786 CreateTimestampMS: 1732211270785 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 94 bytes ..." SourceId: "\000123" SeqNo: 2 WriteTimestampMS: 1732211270788 CreateTimestampMS: 1732211270785 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 94 bytes ..." SourceId: "\000123" SeqNo: 3 WriteTimestampMS: 1732211270788 CreateTimestampMS: 1732211270785 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 94 bytes ..." SourceId: "\000123" SeqNo: 4 WriteTimestampMS: 1732211270788 CreateTimestampMS: 1732211270785 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 19 RealReadOffset: 3 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T17:47:50.976718Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_11557478168320405286_v1 TopicId: Topic /Root/account2/topic2 in database: Root, partition 0(assignId:5) wait data in partition inited, cookie 1 from offset4 2024-11-21T17:47:50.976725Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_11557478168320405286_v1 after read state TopicId: Topic /Root/account2/topic2 in database: Root, partition 0(assignId:5) EndOffset 4 ReadOffset 4 ReadGuid bd237ba2-55723c32-967eaf50-f250ac30 has messages 1 2024-11-21T17:47:50.976762Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_11557478168320405286_v1 read done: guid# bd237ba2-55723c32-967eaf50-f250ac30, partition# TopicId: Topic /Root/account2/topic2 in database: Root, partition 0(assignId:5), size# 416 2024-11-21T17:47:50.976769Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_11557478168320405286_v1 response to read: guid# bd237ba2-55723c32-967eaf50-f250ac30 2024-11-21T17:47:50.976897Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_11557478168320405286_v1 Process answer. Aval parts: 0 2024-11-21T17:47:50.977590Z :DEBUG: [/Root] [/Root] [40588243-b603f7e7-90c278e6-45710faa] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:47:50.977688Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (0-3) 2024-11-21T17:47:50.977734Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T17:47:50.977745Z :DEBUG: [/Root] [/Root] [40588243-b603f7e7-90c278e6-45710faa] [null] The application data is transferred to the client. Number of messages 1, size 10 bytes GOT MESSAGE: DataReceived { PartitionStreamId: 3 PartitionId: 0 Message { Data: ..10 bytes.. Partition stream id: 3 Cluster: "". Topic: "account2/topic2" Partition: 0 PartitionKey: "" Information: { Offset: 0 SeqNo: 1 MessageGroupId: "123" CreateTime: 2024-11-21T17:47:50.785000Z WriteTime: 2024-11-21T17:47:50.786000Z Ip: "ipv6:[::1]:54860" UncompressedSize: 10 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:54860" } } } } 2024-11-21T17:47:50.977779Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2024-11-21T17:47:50.977782Z :DEBUG: [/Root] [/Root] [40588243-b603f7e7-90c278e6-45710faa] [null] The application data is transferred to the client. Number of messages 1, size 10 bytes GOT MESSAGE: DataReceived { PartitionStreamId: 3 PartitionId: 0 Message { Data: ..10 bytes.. Partition stream id: 3 Cluster: "". Topic: "account2/topic2" Partition: 0 PartitionKey: "" Information: { Offset: 1 SeqNo: 2 MessageGroupId: "123" CreateTime: 2024-11-21T17:47:50.785000Z WriteTime: 2024-11-21T17:47:50.788000Z Ip: "ipv6:[::1]:54860" UncompressedSize: 10 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:54860" } } } } 2024-11-21T17:47:50.977791Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 1} (2-2) 2024-11-21T17:47:50.977793Z :DEBUG: [/Root] [/Root] [40588243-b603f7e7-90c278e6-45710faa] [null] The application data is transferred to the client. Number of messages 1, size 10 bytes GOT MESSAGE: DataReceived { PartitionStreamId: 3 PartitionId: 0 Message { Data: ..10 bytes.. Partition stream id: 3 Cluster: "". Topic: "account2/topic2" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "123" CreateTime: 2024-11-21T17:47:50.785000Z WriteTime: 2024-11-21T17:47:50.788000Z Ip: "ipv6:[::1]:54860" UncompressedSize: 10 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:54860" } } } } 2024-11-21T17:47:50.977802Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 2} (3-3) 2024-11-21T17:47:50.977807Z :DEBUG: [/Root] [/Root] [40588243-b603f7e7-90c278e6-45710faa] [null] The application data is transferred to the client. Number of messages 1, size 10 bytes GOT MESSAGE: DataReceived { PartitionStreamId: 3 PartitionId: 0 Message { Data: ..10 bytes.. Partition stream id: 3 Cluster: "". Topic: "account2/topic2" Partition: 0 PartitionKey: "" Information: { Offset: 3 SeqNo: 4 MessageGroupId: "123" CreateTime: 2024-11-21T17:47:50.785000Z WriteTime: 2024-11-21T17:47:50.788000Z Ip: "ipv6:[::1]:54860" UncompressedSize: 10 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:54860" } } } } 2024-11-21T17:47:50.977825Z :INFO: [/Root] [/Root] [40588243-b603f7e7-90c278e6-45710faa] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:50.977841Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account2/topic2:4:5:0:0 null:account2/topic2:3:4:0:0 null:account2/topic2:2:1:0:0 null:account2/topic2:1:2:0:0 null:account2/topic2:0:3:3:0 2024-11-21T17:47:50.977847Z :INFO: [/Root] [/Root] [40588243-b603f7e7-90c278e6-45710faa] Counters: { Errors: 0 CurrentSessionLifetimeMs: 54 BytesRead: 40 MessagesRead: 4 BytesReadCompressed: 92 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:50.977886Z :NOTICE: [/Root] [/Root] [40588243-b603f7e7-90c278e6-45710faa] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:47:50.977894Z :DEBUG: [/Root] [/Root] [40588243-b603f7e7-90c278e6-45710faa] [null] Abort session to cluster 2024-11-21T17:47:50.977979Z :NOTICE: [/Root] [/Root] [40588243-b603f7e7-90c278e6-45710faa] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:47:50.978350Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_11557478168320405286_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T17:47:50.978371Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer user1 session user1_3_2_11557478168320405286_v1 grpc closed 2024-11-21T17:47:50.978389Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer user1 session user1_3_2_11557478168320405286_v1 is DEAD 2024-11-21T17:47:50.979849Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:50.979868Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Destroy direct read session user1_3_2_11557478168320405286_v1 2024-11-21T17:47:50.979876Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7439790755003342157:2519] destroyed 2024-11-21T17:47:50.979880Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:50.979881Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Destroy direct read session user1_3_2_11557478168320405286_v1 2024-11-21T17:47:50.979884Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7439790755003342156:2518] destroyed 2024-11-21T17:47:50.979896Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037905][topic2] pipe [3:7439790755003342148:2513] disconnected; active server actors: 1 2024-11-21T17:47:50.979899Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037905][topic2] pipe [3:7439790755003342148:2513] client user1 disconnected session user1_3_2_11557478168320405286_v1 2024-11-21T17:47:50.979942Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_11557478168320405286_v1 2024-11-21T17:47:50.979945Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_11557478168320405286_v1 2024-11-21T17:47:50.979675Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037904] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:50.979692Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037904] Destroy direct read session user1_3_2_11557478168320405286_v1 2024-11-21T17:47:50.979699Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037904] server disconnected, pipe [3:7439790755003342158:2520] destroyed 2024-11-21T17:47:50.979708Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:50.979710Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Destroy direct read session user1_3_2_11557478168320405286_v1 2024-11-21T17:47:50.979714Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] server disconnected, pipe [3:7439790755003342155:2517] destroyed 2024-11-21T17:47:50.979717Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:50.979719Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Destroy direct read session user1_3_2_11557478168320405286_v1 2024-11-21T17:47:50.979722Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037903] server disconnected, pipe [3:7439790755003342154:2516] destroyed 2024-11-21T17:47:50.979761Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_11557478168320405286_v1 2024-11-21T17:47:50.979763Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_11557478168320405286_v1 2024-11-21T17:47:50.979765Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_11557478168320405286_v1 2024-11-21T17:47:51.137818Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439790729233535415:2147], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:47:51.137894Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439790729233535415:2147], cacheItem# { Subscriber: { Subscriber: [3:7439790733528503419:2658] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:47:51.137922Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439790759298309484:4319], recipient# [3:7439790759298309481:2522], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpIndexes::MultipleModifications >> TxUsage::ReadRuleGeneration [GOOD] >> KqpIndexes::UpsertWithNullKeysSimple [GOOD] >> KqpIndexes::UpsertWithNullKeysComplex >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly [GOOD] >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate >> KqpMultishardIndex::DataColumnWrite-StreamLookup [GOOD] >> KqpMultishardIndex::CheckPushTopSort >> KqpIndexes::UpdateIndexSubsetPk [GOOD] >> KqpIndexes::UpdateOnReadColumns >> TVPatchTests::PatchPartPutError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::ReadRuleGeneration [GOOD] Test command err: 2024-11-21T17:47:18.223321Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790619791780079:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:18.223700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:18.252839Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001357/r3tmp/tmpePN7Cw/pdisk_1.dat 2024-11-21T17:47:18.282751Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25388, node 1 2024-11-21T17:47:18.300278Z INFO: TTestServer started on Port 65214 GrpcPort 25388 2024-11-21T17:47:18.300941Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001357/r3tmp/yandexH4Pz96.tmp 2024-11-21T17:47:18.300953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001357/r3tmp/yandexH4Pz96.tmp 2024-11-21T17:47:18.300976Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001357/r3tmp/yandexH4Pz96.tmp 2024-11-21T17:47:18.300985Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65214 2024-11-21T17:47:18.325854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:18.325895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:18.327417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:25388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:18.392384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.394807Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:18.402292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:18.513411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790619791780823:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.513430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790619791780845:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.513435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.513883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790619791780868:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.513922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.514237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:18.515870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790619791780852:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:47:18.580940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.586673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.601335Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790619791781053:2329], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:18.601534Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWI4ZDI5OGQtNjBiZjU5OTctYzg4ZjA4MzgtOGIyMzY4MA==, ActorId: [1:7439790619791780820:2304], ActorState: ExecuteState, TraceId: 01jd7xbskg56cf2qw2b0wgw58t, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:18.602047Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:47:18.602798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439790619791781193:2597] 2024-11-21T17:47:23.223700Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790619791780079:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:23.223737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:23.764779Z :ValidateSettingsFailOnStart INFO: TTopicSdkTestSetup started 2024-11-21T17:47:23.768760Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:47:23.783813Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439790641266617995:2780] connected; active server actors: 1 2024-11-21T17:47:23.783871Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T17:47:23.784593Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T17:47:23.784637Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T17:47:23.786345Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:47:23.786902Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T17:47:23.786986Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T17:47:23.787100Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:47:23.787125Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T17:47:23.787134Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:47:23.787135Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T17:47:23.787137Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:47:23.787141Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:47:23.787223Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T17:47:23.787627Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T17:47:23.787637Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T17:47:23.787646Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.787651Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790641266618028:2428], now have 1 active actors on pipe 2024-11-21T17:47:23.832564Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.832582Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790641266617994:2779], now have 1 active actors on pipe 2024-11-21T17:47:23.832587Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:47:23.834375Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7439790619791780484:2188] txId 281474976710673 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "t ... 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2024-11-21T17:47:50.154826Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 after read state TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid d3d9dfc0-836e60d0-e9e31785-b05b3238 has messages 1 2024-11-21T17:47:50.155016Z :DEBUG: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] [] Got ReadResponse, serverBytesSize = 199, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428601 2024-11-21T17:47:50.155054Z :DEBUG: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428601 2024-11-21T17:47:50.154839Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 read done: guid# d3d9dfc0-836e60d0-e9e31785-b05b3238, partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), size# 199 2024-11-21T17:47:50.154845Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 response to read: guid# d3d9dfc0-836e60d0-e9e31785-b05b3238 2024-11-21T17:47:50.154907Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 Process answer. Aval parts: 0 2024-11-21T17:47:50.155168Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2024-11-21T17:47:50.155212Z :DEBUG: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] [] Returning serverBytesSize = 199 to budget 2024-11-21T17:47:50.155220Z :DEBUG: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] [] In ContinueReadingDataImpl, ReadSizeBudget = 199, ReadSizeServerDelta = 52428601 2024-11-21T17:47:50.155425Z :DEBUG: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T17:47:50.155486Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (3-3) 2024-11-21T17:47:50.155501Z :DEBUG: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] [] The application data is transferred to the client. Number of messages 1, size 9 bytes 2024-11-21T17:47:50.155510Z :DEBUG: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] [] Returning serverBytesSize = 0 to budget 0 1 2024-11-21T17:47:50.155530Z :DEBUG: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] [] Commit offsets [3, 4). Partition stream id: 1 2024-11-21T17:47:50.155481Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 grpc read done: success# 1, data# { read_request { bytes_size: 199 } } 2024-11-21T17:47:50.155512Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 got read request: guid# a6907299-8fc10d2-d6ef1392-5b4554bb 2024-11-21T17:47:50.155691Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 3 end: 4 } } } } 2024-11-21T17:47:50.155727Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) committing to position 4 prev 3 end 4 by cookie 2 2024-11-21T17:47:50.155814Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T17:47:50.155822Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T17:47:50.155849Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user consumer-1 offset is set to 4 (startOffset 0) session consumer-1_5_2_2774322868340510279_v1 2024-11-21T17:47:50.155880Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:47:50.156261Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user consumer-1 readTimeStamp for offset 4 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 2 2024-11-21T17:47:50.156280Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:47:50.156294Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 2 2024-11-21T17:47:50.156315Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2024-11-21T17:47:50.156328Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) commit done to position 4 endOffset 4 with cookie 2 2024-11-21T17:47:50.156340Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 4 2024-11-21T17:47:50.157194Z :DEBUG: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 4 } } 2024-11-21T17:47:51.131302Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:47:51.131636Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|49ef0669-1c782825-ee4c85bf-ee1c45b1_0 describe result for acl check 2024-11-21T17:47:51.145394Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2024-11-21T17:47:51.145415Z :INFO: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:51.147672Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 checking auth because of timeout 2024-11-21T17:47:51.147699Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 auth for : consumer-1 2024-11-21T17:47:51.147811Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 Handle describe topics response 2024-11-21T17:47:51.147834Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 auth is DEAD 2024-11-21T17:47:51.147853Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 auth ok: topics# 1, initDone# 1 2024-11-21T17:47:52.145521Z :INFO: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:52.145543Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2024-11-21T17:47:52.145553Z :INFO: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2000 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:47:52.145579Z :NOTICE: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:47:52.145591Z :DEBUG: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] [] Abort session to cluster 2024-11-21T17:47:52.145838Z :NOTICE: [/Root] [/Root] [e411be32-4f7dc2dc-e0ae1839-ef1cb95c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:47:52.146742Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 grpc read done: success# 0, data# { } 2024-11-21T17:47:52.146764Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 grpc read failed 2024-11-21T17:47:52.146771Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 grpc closed 2024-11-21T17:47:52.146789Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_5_2_2774322868340510279_v1 is DEAD 2024-11-21T17:47:52.147910Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:52.147919Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session consumer-1_5_2_2774322868340510279_v1 2024-11-21T17:47:52.147927Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439790757719725943:2518] destroyed 2024-11-21T17:47:52.147949Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer-1_5_2_2774322868340510279_v1 2024-11-21T17:47:52.147965Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7439790757719725940:2515] disconnected; active server actors: 1 2024-11-21T17:47:52.147969Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7439790757719725940:2515] client consumer-1 disconnected session consumer-1_5_2_2774322868340510279_v1 2024-11-21T17:47:52.148455Z :INFO: [/Root] SessionId [test-message_group_id|49ef0669-1c782825-ee4c85bf-ee1c45b1_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T17:47:52.148467Z :INFO: [/Root] SessionId [test-message_group_id|49ef0669-1c782825-ee4c85bf-ee1c45b1_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T17:47:52.148476Z :DEBUG: [/Root] SessionId [test-message_group_id|49ef0669-1c782825-ee4c85bf-ee1c45b1_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T17:47:52.148607Z :INFO: [/Root] SessionId [test-message_group_id|49ef0669-1c782825-ee4c85bf-ee1c45b1_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T17:47:52.148612Z :DEBUG: [/Root] SessionId [test-message_group_id|49ef0669-1c782825-ee4c85bf-ee1c45b1_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T17:47:52.149291Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id|49ef0669-1c782825-ee4c85bf-ee1c45b1_0 grpc read done: success: 0 data: 2024-11-21T17:47:52.149305Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|49ef0669-1c782825-ee4c85bf-ee1c45b1_0 grpc read failed 2024-11-21T17:47:52.149312Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|49ef0669-1c782825-ee4c85bf-ee1c45b1_0 grpc closed 2024-11-21T17:47:52.149316Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|49ef0669-1c782825-ee4c85bf-ee1c45b1_0 is DEAD 2024-11-21T17:47:52.149559Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:47:52.151155Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:52.151177Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439790749129791132:2465] destroyed 2024-11-21T17:47:52.151197Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 26455, MsgBus: 28758 2024-11-21T17:47:49.065682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790750670289610:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:49.065708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021d6/r3tmp/tmpPy6sNZ/pdisk_1.dat 2024-11-21T17:47:49.122243Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26455, node 1 2024-11-21T17:47:49.136487Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:49.136500Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:49.136502Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:49.136544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28758 2024-11-21T17:47:49.166706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:49.166738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:49.167835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:49.185131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.193342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:49.216763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.235194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.245560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.358258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790750670291145:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.358281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.385127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.391068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.398139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.453330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.461487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.468157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:49.476557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790750670291661:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.476583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.476587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790750670291666:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.477137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:49.481383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790750670291668:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:49.659446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 10504, MsgBus: 64506 2024-11-21T17:47:50.171197Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790756428233882:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:50.171259Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021d6/r3tmp/tmpHs6AqU/pdisk_1.dat 2024-11-21T17:47:50.182912Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10504, node 2 2024-11-21T17:47:50.196414Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:50.196427Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:50.196429Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:50.196468Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64506 TClient is connected to server localhost:64506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:50.274715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:50.274749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:50.275518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.275753Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:47:50.282232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:50.290743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:50.308047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:50.318830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:50.463635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790756428235423:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.463672Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.472710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.481536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.536710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.546292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.554499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.569973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.629241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790756428235942:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.629262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.629400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790756428235947:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.630151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:50.637370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790756428235949:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } waiting... 2024-11-21T17:47:50.897247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.727806Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790760723206368:2717], TxId: 281474976715731, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=OTgzMzI1MWYtYjY1MzVmNjEtYmU2ZmRjY2ItNTBiOGVkN2U=. TraceId : 01jd7xct0jez5czh2jhgmgvr5h. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T17:47:51.727929Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439790760723206369:2718], TxId: 281474976715731, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=OTgzMzI1MWYtYjY1MzVmNjEtYmU2ZmRjY2ItNTBiOGVkN2U=. TraceId : 01jd7xct0jez5czh2jhgmgvr5h. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439790760723206365:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T17:47:51.728220Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTgzMzI1MWYtYjY1MzVmNjEtYmU2ZmRjY2ItNTBiOGVkN2U=, ActorId: [2:7439790756428236259:2454], ActorState: ExecuteState, TraceId: 01jd7xct0jez5czh2jhgmgvr5h, Create QueryResponse for error on request, msg: >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> TVPatchTests::PatchPartPutError [GOOD] >> KqpUniqueIndex::UpdateOnFkAlreadyExist [GOOD] >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService >> KqpIndexes::MultipleModifications [GOOD] >> KqpIndexes::InnerJoinWithNonIndexWherePredicate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2024-11-21T17:47:52.738929Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T17:47:52.739105Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2024-11-21T17:47:52.739117Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2024-11-21T17:47:52.739150Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2024-11-21T17:47:52.739159Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T17:47:52.739181Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2024-11-21T17:47:52.739192Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2024-11-21T17:47:52.739200Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2024-11-21T17:47:52.739218Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2024-11-21T17:47:52.739224Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2024-11-21T17:47:52.739234Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] >> KqpMultishardIndex::CheckPushTopSort [GOOD] >> TxUsage::WriteToTopic_Demo_7 [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit [GOOD] |78.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |78.0%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |78.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> TxUsage::WriteToTopic_Demo_8 >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] Test command err: Trying to start YDB, gRPC: 22974, MsgBus: 4146 2024-11-21T17:47:51.192410Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790762123145799:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:51.192556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021d0/r3tmp/tmpg6dR1B/pdisk_1.dat 2024-11-21T17:47:51.245886Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22974, node 1 2024-11-21T17:47:51.252566Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:51.252579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:51.252580Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:51.252609Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4146 TClient is connected to server localhost:4146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:51.293370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:51.293400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:51.294524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:51.322004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.328424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.398876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:51.419546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.431650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.482702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790762123147346:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.482732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.513919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.569932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.582606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.589193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.596250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.603493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.612092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790762123147852:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.612123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.612135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790762123147857:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.612706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:51.616282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790762123147859:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:51.773258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.798946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:47:51.808885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64485, MsgBus: 24757 2024-11-21T17:47:52.307385Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790764620604680:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:52.307746Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021d0/r3tmp/tmp8tdWrB/pdisk_1.dat 2024-11-21T17:47:52.320521Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64485, node 2 2024-11-21T17:47:52.325021Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:52.325037Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:52.325039Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:52.325084Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24757 TClient is connected to server localhost:24757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:52.410025Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:52.410060Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:52.410407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.411077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:52.419402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.437667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.457439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.472413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.652442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790764620606234:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.654373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.655444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.686473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.698269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.717084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.730112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.742544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.767840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790764620606737:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.767869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.767944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790764620606742:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.768906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:52.772067Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:47:52.772322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790764620606744:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:52.972890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.079484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:47:53.089398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.340454Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 3369, MsgBus: 30855 2024-11-21T17:47:51.332981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790759783209955:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:51.333149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021cb/r3tmp/tmpARb5p9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3369, node 1 2024-11-21T17:47:51.399960Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:51.400825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:51.400834Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:51.400836Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:51.400870Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30855 2024-11-21T17:47:51.434116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:51.434145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:51.435225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:51.461177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:51.468941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.492830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.516052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.528455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.647473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790759783211499:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.647501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.680909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.687297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.694213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.750131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.757808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.772369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.787547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790759783212015:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.787563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790759783212020:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.787571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.788359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:51.791429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790759783212022:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } waiting... 2024-11-21T17:47:52.008666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.105687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715675:1, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 5132, MsgBus: 14284 2024-11-21T17:47:52.453656Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790766915592599:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:52.453675Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021cb/r3tmp/tmp5oiy3D/pdisk_1.dat 2024-11-21T17:47:52.464966Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5132, node 2 2024-11-21T17:47:52.476503Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:52.476516Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:52.476528Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:52.476571Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14284 TClient is connected to server localhost:14284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:52.552654Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:52.552683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:52.556900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:52.557337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.560930Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.567429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.585238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:52.618665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.635415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.812488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790766915594138:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.812520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.817987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.825262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.838279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.850442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.865011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.878290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.893184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790766915594639:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.893213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.893269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790766915594644:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.894300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:52.897780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790766915594646:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } waiting... 2024-11-21T17:47:53.107675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpOlapSparsed::SwitchingMultiColumn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::CheckPushTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 19916, MsgBus: 29793 2024-11-21T17:47:49.686503Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790754043838880:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:49.686603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021d4/r3tmp/tmpLAQXsx/pdisk_1.dat 2024-11-21T17:47:49.734683Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19916, node 1 2024-11-21T17:47:49.749346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:49.749366Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:49.749368Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:49.749402Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29793 TClient is connected to server localhost:29793 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:47:49.785908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:49.785938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:49.787407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:49.815462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.818150Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:49.826323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.888774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.907322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.917287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:49.968593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790754043840280:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:49.968618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.007558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.017008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.029608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.043673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.056875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.112855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.128842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790758338808092:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.128870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.128887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790758338808097:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:50.129557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:50.132026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790758338808099:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:50.327040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.334892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.343676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:47:50.518236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 waiting... [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. Trying to start YDB, gRPC: 4663, MsgBus: 14308 2024-11-21T17:47:52.537575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790765819619594:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:52.537631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021d4/r3tmp/tmpZDfb28/pdisk_1.dat 2024-11-21T17:47:52.606299Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4663, node 1 2024-11-21T17:47:52.623356Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:52.623368Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:52.623370Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:52.623406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14308 2024-11-21T17:47:52.636513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:52.636538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:52.638137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:52.676548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.682980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.755527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.782581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.796675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.913110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790765819620985:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.913169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.918101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.924019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.981355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.042335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.053052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.071769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.085014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790770114588802:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.085042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.085090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790770114588807:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.085997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:53.094081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790770114588809:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:53.321370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] >> TTablesWithReboots::AlterAndForceDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapSparsed::SwitchingMultiColumn [GOOD] Test command err: Trying to start YDB, gRPC: 8429, MsgBus: 26808 2024-11-21T17:46:14.942138Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790342960932709:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:14.942154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e3a/r3tmp/tmpKqyslK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8429, node 1 2024-11-21T17:46:14.994557Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:14.996506Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:14.996516Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:14.996517Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:14.996541Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26808 TClient is connected to server localhost:26808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:15.043567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:15.043596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:15.044731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:15.072140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:15.084931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:15.093019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:15.093074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:15.093102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:15.093123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:15.093134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:15.093147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:15.093161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:15.093176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:15.093199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:15.093217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:15.093232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:15.093251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:15.095207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:15.095224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:15.095239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:15.095251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:15.095261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:15.095270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:15.095285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:15.095295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:15.095310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:15.095322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:15.095335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:15.095347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:15.095594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:15.095606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:15.095614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:15.095620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:15.095633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:15.095639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:15.095645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:15.095648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:15.095654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:15.095660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:15.095665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890 ... 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.569717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.569836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.569839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.606042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.606125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.670035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.670036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.670090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.670097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.706380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.706472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.770390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.770391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.770447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.770460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.806763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.806837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.870743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.870745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.870792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.870806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.907088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.907155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.971066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.971068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.971115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:45.971119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:46.007455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:46.007546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:46.071443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:46.071444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:46.071512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:46.071520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:46.107815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; 2024-11-21T17:47:46.107887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:802;event=skip_compaction;reason=disabled; Timing: checkTable took 4 seconds WAIT_COMPACTION: 9 2024-11-21T17:47:46.172149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790347255900662:2290];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T17:47:46.172172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790347255900661:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T17:47:46.701867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790347255900660:2288];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=3; 2024-11-21T17:47:46.712914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=b77748a0-a83011ef-99a3ecf8-f15658b5;fline=with_appended.cpp:80;portions=9,;task_id=b77748a0-a83011ef-99a3ecf8-f15658b5; 2024-11-21T17:47:46.731182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=b77748a0-a83011ef-87f2e9e8-431b99c7;fline=with_appended.cpp:80;portions=9,;task_id=b77748a0-a83011ef-87f2e9e8-431b99c7; WAIT_COMPACTION: 12 2024-11-21T17:47:47.255974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=b7c81d48-a83011ef-84761772-7fe07f97;fline=with_appended.cpp:80;portions=9,;task_id=b7c81d48-a83011ef-84761772-7fe07f97; WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 WAIT_COMPACTION: 12 Timing: wait took 6 seconds ==================================== QUERY: SELECT count(*) as count, FROM `/Root/olapStore/olapTable` RESULT: 2024-11-21T17:47:52.323170Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211272000, txId: 18446744073709551615] shutting down count: 14000 2024-11-21T17:47:52.930313Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211272281, txId: 18446744073709551615] shutting down Timing: Executing query took 1 seconds Timing: checkTable took 1 seconds Timing: wait took 0 seconds Timing: Fill took 20 seconds ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] Test command err: Trying to start YDB, gRPC: 20402, MsgBus: 6564 2024-11-21T17:47:51.663006Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790758844140836:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:51.663284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021c6/r3tmp/tmpsoDKn6/pdisk_1.dat 2024-11-21T17:47:51.713745Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20402, node 1 2024-11-21T17:47:51.723556Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:51.723567Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:51.723569Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:51.723601Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6564 TClient is connected to server localhost:6564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:51.764051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:51.764079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:51.765238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:51.792100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.794152Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:51.797834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.813423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.830363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.847178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.962169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790758844142393:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.962220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.018581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.026148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.038742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.051897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.067335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.081370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.103649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790763139110202:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.103677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.103834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790763139110207:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.104874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:52.109791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790763139110209:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } waiting... 2024-11-21T17:47:52.311554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.510041Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7xctpr9y7cvfdz31w1kqw2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQ5YTg3MjYtMzQyNGZiYy00ZGNjZDdiZC1jNzhlZDMxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T17:47:52.511529Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWQ5YTg3MjYtMzQyNGZiYy00ZGNjZDdiZC1jNzhlZDMxYw==, ActorId: [1:7439790763139111210:2512], ActorState: ExecuteState, TraceId: 01jd7xctpr9y7cvfdz31w1kqw2, Create QueryResponse for error on request, msg: 2024-11-21T17:47:52.623932Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7xctt84gck03k8zbb97vap, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQ5YTg3MjYtMzQyNGZiYy00ZGNjZDdiZC1jNzhlZDMxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T17:47:52.624025Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWQ5YTg3MjYtMzQyNGZiYy00ZGNjZDdiZC1jNzhlZDMxYw==, ActorId: [1:7439790763139111210:2512], ActorState: ExecuteState, TraceId: 01jd7xctt84gck03k8zbb97vap, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 7916, MsgBus: 21006 2024-11-21T17:47:52.986838Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790763338142540:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021c6/r3tmp/tmpm9j8hO/pdisk_1.dat 2024-11-21T17:47:52.992366Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 7916, node 2 2024-11-21T17:47:53.002939Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:53.003049Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:53.003062Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:53.003063Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:53.003105Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21006 TClient is connected to server localhost:21006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:53.086578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:53.086619Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:53.087668Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:53.088329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:53.091111Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:53.097446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:53.108101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.129471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:53.141136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:53.329441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790767633111232:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.329462Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.336331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.368952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.392777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.406427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.449717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.485658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.513615Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790767633111752:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.513642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.513747Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790767633111757:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.514691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:53.517021Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790767633111759:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:53.694701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:53.935714Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7xcw2jak4cne9yhs12sh9h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Nzk4M2Y0MzgtODA3MjRiMTgtYzc5ZTUwNmItNjhhYzYwMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T17:47:53.935781Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Nzk4M2Y0MzgtODA3MjRiMTgtYzc5ZTUwNmItNjhhYzYwMzU=, ActorId: [2:7439790767633112754:2512], ActorState: ExecuteState, TraceId: 01jd7xcw2jak4cne9yhs12sh9h, Create QueryResponse for error on request, msg: 2024-11-21T17:47:54.068958Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7xcw7ad1n5r0pvf90nxhmw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Nzk4M2Y0MzgtODA3MjRiMTgtYzc5ZTUwNmItNjhhYzYwMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T17:47:54.069032Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Nzk4M2Y0MzgtODA3MjRiMTgtYzc5ZTUwNmItNjhhYzYwMzU=, ActorId: [2:7439790767633112754:2512], ActorState: ExecuteState, TraceId: 01jd7xcw7ad1n5r0pvf90nxhmw, Create QueryResponse for error on request, msg: >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] Test command err: 2024-11-21T17:47:53.205215Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790768080394511:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:53.205278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f20/r3tmp/tmpBEjE6z/pdisk_1.dat 2024-11-21T17:47:53.292353Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:53.308676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:53.308702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:53.312787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14048, node 1 2024-11-21T17:47:53.353331Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:53.353349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:53.353352Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:53.353399Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:53.395005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.395732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:53.395739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.396466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:53.396530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:53.396534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:47:53.397207Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:53.397210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:47:53.397491Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.398266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211273443, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:53.398272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:47:53.398373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:47:53.398759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:53.398801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:53.398813Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:47:53.398822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:47:53.398832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:47:53.398842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T17:47:53.399206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:47:53.399221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:47:53.399224Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:47:53.399240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2024-11-21T17:47:53.400998Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:53.577220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790768080395300:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.577261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.600832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.600974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:47:53.601118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:53.601131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.602023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:47:53.602114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:53.602171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:53.602222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:47:53.602303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:47:53.602417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:47:53.602445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:47:53.602456Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:47:53.602543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:47:53.602552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:47:53.602554Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:47:53.604801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:47:53.604832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:47:53.605254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:47:53.657863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:47:53.657885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:47:53.657921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:47:53.658619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:47:53.659772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211273709, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:53.659792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211273709 2024-11-21T17:47:53.659827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:47:53.660350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:53.660438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:53.660463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:47:53.660819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:47:53.660836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:47:53.660841Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [Owner ... 17:47:53.674723Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:53.674745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T17:47:53.675030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:47:53.675044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:47:53.675048Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:47:53.675084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:47:53.675091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:47:53.675092Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T17:47:53.675106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:47:53.675112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:47:53.675114Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T17:47:53.675127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:47:53.675134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:47:53.675136Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T17:47:53.675146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:47:53.675153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:47:53.675154Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T17:47:53.675987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211273723, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:53.676001Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211273723, at schemeshard: 72057594046644480 2024-11-21T17:47:53.676029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T17:47:53.676049Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211273723, at schemeshard: 72057594046644480 2024-11-21T17:47:53.676062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T17:47:53.676074Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211273723, at schemeshard: 72057594046644480 2024-11-21T17:47:53.676083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T17:47:53.676093Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732211273723 2024-11-21T17:47:53.676103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T17:47:53.676570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:53.676663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:53.676672Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T17:47:53.676684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T17:47:53.676710Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T17:47:53.676714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T17:47:53.676722Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T17:47:53.676726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T17:47:53.676734Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T17:47:53.676737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T17:47:53.676742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:47:53.676750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T17:47:53.676754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T17:47:53.676756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T17:47:53.676760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T17:47:53.677545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:47:53.677575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:47:53.677579Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T17:47:53.677613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:47:53.677632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:47:53.677634Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T17:47:53.677647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:47:53.677649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:47:53.677650Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:47:53.677660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:47:53.677662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:47:53.677663Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T17:47:53.677673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:47:53.677688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:47:53.677689Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T17:47:53.677693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T17:47:53.678380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790768080395474:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:47:53.767164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:47:53.767214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:47:53.767993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:47:53.785517Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xcvy89mnj7d2vyr5fns4j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWFkMTIzOWItYzIzYjY2OWMtN2JiODI3MDMtNDU2ODQzMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:47:53.903521Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xcw5f0e5xpxxxzq2ty9h7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjExODQzMWItM2YwODA1YzMtZDc1NzI2MTQtYzc0ZmY3MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 5227, MsgBus: 29158 2024-11-21T17:47:51.365216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790760739251923:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:51.365547Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021cc/r3tmp/tmpCG0iWA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5227, node 1 2024-11-21T17:47:51.432330Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:51.437538Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:51.437554Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:51.437556Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:51.437591Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29158 2024-11-21T17:47:51.467903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:51.467937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:51.470144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:51.506983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.512445Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:51.515376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.531963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.549909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.560575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.661529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790760739253469:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.661550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.691713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.697753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.708001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.763056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.771747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.786272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.794218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790760739253986:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.794241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.794274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790760739253991:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.794931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:51.798253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790760739253993:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:52.011348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.023159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. Trying to start YDB, gRPC: 9235, MsgBus: 23206 2024-11-21T17:47:53.868030Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790770939561580:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:53.868281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021cc/r3tmp/tmpxojC0Q/pdisk_1.dat 2024-11-21T17:47:53.916713Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9235, node 1 2024-11-21T17:47:53.944387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:53.944401Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:53.944403Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:53.944435Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23206 2024-11-21T17:47:53.972456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:53.972492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:53.975108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:54.008187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:54.019985Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:54.033136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:54.059516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:47:54.101835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:54.113441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:54.209697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790775234530414:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:54.209755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:54.215875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:54.226262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:54.239292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:54.249662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:54.263699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:54.277511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:54.287525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790775234530918:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:54.287551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790775234530923:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:54.287552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:54.288379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:54.297992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790775234530925:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:54.497718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:54.504854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> TTablesWithReboots::DropCopyWithRebootsAtCommit >> TTablesWithReboots::CreateDroppedTableWithReboots >> TTablesWithReboots::CopyTableAndDropWithReboots |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::SimpleDropTableWithReboots >> TTablesWithReboots::TwiceRmDirWithReboots |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> BasicUsage::ReadWithRestarts [GOOD] >> BasicUsage::SessionNotDestroyedWhileCompressionInFlight |78.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |78.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |78.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> TTablesWithReboots::ChainedCopyTableAndDropWithReboots >> TSchemeShardTestExtSubdomainReboots::CreateForceDrop-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::InnerJoinWithNonIndexWherePredicate Test command err: Trying to start YDB, gRPC: 9841, MsgBus: 14248 2024-11-21T17:47:52.123722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790766226124269:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:52.123805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021c2/r3tmp/tmplh0U0q/pdisk_1.dat 2024-11-21T17:47:52.190491Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9841, node 1 2024-11-21T17:47:52.204957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:52.204968Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:52.204970Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:52.205008Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14248 2024-11-21T17:47:52.223163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:52.223196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:52.226909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:52.258595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.268965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:52.284833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.304726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.317066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.457883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790766226125676:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.457916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.527614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.534222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.593485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.607709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.624243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.638083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.656930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790766226126206:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.656957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.657103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790766226126211:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.658015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:52.661362Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T17:47:52.661546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790766226126213:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:52.889380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1795, MsgBus: 1570 2024-11-21T17:47:53.261379Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790771291008637:2083];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021c2/r3tmp/tmpRGcD1Y/pdisk_1.dat 2024-11-21T17:47:53.263602Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:53.284724Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1795, node 2 2024-11-21T17:47:53.296417Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:53.296428Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:53.296431Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:53.296474Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1570 TClient is connected to server localhost:1570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:53.360806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:53.360829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:53.361894Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:53.365334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:53.366282Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:53.367684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:53.427086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:53.449131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:53.469699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:53.590140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790771291010143:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.590166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.593547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.648321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.703000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.710060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.717664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.773341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.786444Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790771291010674:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.786479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.786564Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790771291010679:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:53.787363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:53.793852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790771291010681:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:53.968108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.987382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.997934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. >> TxUsage::WriteToTopic_Demo_37 [GOOD] >> Describe::Location [GOOD] >> Describe::DescribePartitionPermissions >> TTablesWithReboots::CreateWithRebootsAtCommit >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateForceDrop-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:45.822430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:45.822458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:45.822464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:45.822470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:45.822489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:45.822493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:45.822504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:45.822597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:45.832211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:45.832251Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:45.834515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:45.834620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:45.834668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:45.837767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:45.837875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:45.838017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:45.838291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:45.839042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:45.839376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:45.839388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:45.839403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:45.839410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:45.839417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:45.839464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:45.841035Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:45.859250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:45.859375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:45.859457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:45.859511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:45.859520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:45.860576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:45.860616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:45.860688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:45.860702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:45.860708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:45.860714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:45.861324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:45.861337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:45.861343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:45.861769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:45.861779Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:45.861786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:45.861794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:45.862509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:45.862988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:45.863049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:45.863274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:45.863307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:45.863316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:45.863380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:45.863387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:45.863423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:45.863437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:45.863839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:45.863849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:45.863900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:45.863905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:45.864000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:45.864008Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:45.864020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:45.864024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:45.864030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:45.864035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:45.864040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:45.864044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:45.864056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:45.864062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:45.864066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 4815Z node 41 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:56.324845Z node 41 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 134 -> 135 2024-11-21T17:47:56.324872Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:56.324880Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:47:56.325232Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:56.325241Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:56.325273Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:47:56.325297Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:56.325304Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T17:47:56.325312Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:47:56.325369Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.325376Z node 41 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T17:47:56.325380Z node 41 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 135 -> 240 2024-11-21T17:47:56.325503Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:56.325512Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:56.325516Z node 41 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:56.325521Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T17:47:56.325525Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:47:56.325747Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:56.325759Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:47:56.325763Z node 41 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:47:56.325768Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:47:56.325772Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:47:56.325783Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:47:56.326092Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.326103Z node 41 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:47:56.326116Z node 41 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:47:56.326120Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:47:56.326126Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:47:56.326132Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:47:56.326138Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:47:56.326142Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:47:56.326152Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:47:56.326234Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:47:56.326243Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:47:56.326259Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:47:56.326321Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:47:56.326327Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:47:56.326338Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:56.326419Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:56.326627Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:47:56.327019Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:47:56.327033Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1002 2024-11-21T17:47:56.327081Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T17:47:56.327090Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 TestWaitNotification wait txId: 1003 2024-11-21T17:47:56.327107Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:47:56.327110Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:47:56.327189Z node 41 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T17:47:56.327213Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:47:56.327219Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [41:341:2333] 2024-11-21T17:47:56.327239Z node 41 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:47:56.327252Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:47:56.327255Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [41:341:2333] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T17:47:56.327320Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:56.327352Z node 41 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 46us result status StatusPathDoesNotExist 2024-11-21T17:47:56.327391Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:47:56.327433Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:47:56.327451Z node 41 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2024-11-21T17:47:56.327527Z node 41 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_38 >> TNodeBrokerTest::TestRandomActions [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] Test command err: 2024-11-21T17:47:53.027473Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790768692468031:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:53.027633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f34/r3tmp/tmpAC2bEj/pdisk_1.dat 2024-11-21T17:47:53.098220Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9226, node 1 2024-11-21T17:47:53.110586Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:53.110599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:53.110601Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:53.110633Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16605 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:53.128072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:53.128111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:53.130455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:53.174037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.175458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:53.175477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.176684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:53.176732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:53.176737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:47:53.177524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:53.177535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:47:53.178001Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:53.178981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211273226, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:53.178992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:47:53.179062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 waiting... 2024-11-21T17:47:53.179498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:53.179545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:53.179555Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:47:53.179565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:47:53.179573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:47:53.179582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T17:47:53.180027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:47:53.180036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:47:53.180041Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:47:53.180054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T17:47:53.180431Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:55.291555Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790779727709445:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:55.291586Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f34/r3tmp/tmp3Br09e/pdisk_1.dat 2024-11-21T17:47:55.306900Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27715, node 4 2024-11-21T17:47:55.323328Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:55.323346Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:55.323347Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:55.323389Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:55.391433Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:55.391466Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:55.393071Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:55.394187Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:55.394305Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:47:55.394316Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:55.394818Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:47:55.394869Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:47:55.394877Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:47:55.395247Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:47:55.395254Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:47:55.395339Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:55.395621Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:47:55.396369Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211275445, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:47:55.396381Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:47:55.396443Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:47:55.396766Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:47:55.396808Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:47:55.396821Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:47:55.396831Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:47:55.396843Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:47:55.396857Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:47:55.396953Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:47:55.396964Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:47:55.396968Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:47:55.396976Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |78.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2024-11-21T17:47:07.436458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:07.436479Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:07.440046Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:47:07.440415Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:47:07.440493Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.440498Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.440503Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2024-11-21T17:47:07.440629Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:47:07.441153Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2024-11-21T17:47:07.441160Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.441163Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.441165Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.441200Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2024-11-21T17:47:07.441229Z node 1 :NODE_BROKER DEBUG: Using default config. 2024-11-21T17:47:07.441243Z node 1 :NODE_BROKER DEBUG: Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.441248Z node 1 :NODE_BROKER DEBUG: Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.473205Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2024-11-21T17:47:07.473245Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2024-11-21T17:47:07.473251Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2024-11-21T17:47:07.473260Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.483567Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:569:2205], Recipient [1:533:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.483859Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:522:2178], Recipient [1:533:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2024-11-21T17:47:07.483868Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2024-11-21T17:47:07.483882Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2024-11-21T17:47:07.483934Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:571:2207], Recipient [1:533:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.483960Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:522:2178], Recipient [1:533:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1024 } 2024-11-21T17:47:07.483963Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T17:47:07.483968Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1024 2024-11-21T17:47:07.483972Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T17:47:07.483974Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) is now active 2024-11-21T17:47:07.483977Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) enqueue tx 2024-11-21T17:47:07.483980Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) starts new tx 2024-11-21T17:47:07.483994Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1024 2024-11-21T17:47:07.483999Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2024-11-21T17:47:07.484017Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T17:47:07.484030Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: WRONG_REQUEST Reason: "Unknown node" } NodeId: 1024 } 2024-11-21T17:47:07.484035Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) completed tx 2024-11-21T17:47:07.484037Z node 1 :NODE_BROKER TRACE: TTxProcessor(1024) unlink from parent 2024-11-21T17:47:07.484039Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1024 2024-11-21T17:47:07.484041Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T17:47:07.484082Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:573:2209], Recipient [1:533:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.484099Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:522:2178], Recipient [1:533:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host14" Port: 13 ResolveHost: "host14" Address: "host14" Location { DataCenter: "13" Module: "13" Rack: "13" Unit: "13" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:07.484102Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:07.484107Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host14" Port: 13 ResolveHost: "host14" Address: "host14" Location { DataCenter: "13" Module: "13" Rack: "13" Unit: "13" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:07.484673Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:07.484692Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host14" Port: 13 ResolveHost: "host14" Address: "host14" Location { DataCenter: "13" Module: "13" Rack: "13" Unit: "13" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:07.484709Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:574:2184], Recipient [1:533:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.484714Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.484720Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.484723Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.484732Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:07.484737Z node 1 :NODE_BROKER DEBUG: Registration request from host14:13 (not fixed) tenant: dc-1 2024-11-21T17:47:07.484815Z node 1 :NODE_BROKER DEBUG: Adding node #1024 host14:13 to database resolvehost=host14 address=host14 dc=13 location=DC=13/M=13/R=13/U=13/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2024-11-21T17:47:07.484842Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=2 2024-11-21T17:47:07.495718Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:07.495739Z node 1 :NODE_BROKER DEBUG: Added node #1024 host14:13 2024-11-21T17:47:07.495743Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 1 to 2 2024-11-21T17:47:07.495747Z node 1 :NODE_BROKER DEBUG: Add node #1024 host14:13 to epoch cache 2024-11-21T17:47:07.495801Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host14" Port: 13 ResolveHost: "host14" Address: "host14" Location { DataCenter: "13" Module: "13" Rack: "13" Unit: "13" } Expire: 7200024000 Name: "slot-0" } 2024-11-21T17:47:07.495809Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.495903Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:586:2215], Recipient [1:533:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:07.495925Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:522:2178], Recipient [1:533:2184]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host6" Port: 5 ResolveHost: "host6" Address: "host6" Location { DataCenter: "5" Module: "5" Rack: "5" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:07.495929Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:07.495935Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host6" Port: 5 ResolveHost: "host6" Address: "host6" Location { DataCenter: "5" Module: "5" Rack: "5" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:07.496014Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:07.496025Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host6" Port: 5 ResolveHost: "host6" Address: "host6" Location { DataCenter: "5" Module: "5" Rack: "5" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:07.496035Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:587:2184], Recipient [1:533:2184]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.496038Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:07.496040Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:07.496043Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:07.496051Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:07.496054Z node 1 :NODE_BROKER DEBUG: Registration request from host6:5 (not fixed) tenant: dc-1 2024-11-21T17:47:07.496073Z node 1 :NODE_BROKER DEBUG: Adding node #1025 host6:5 to database resolvehost=host6 address=host6 dc=5 location=DC=5/M=5/R=5/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2024-11-21T17:47:07.496110Z node 1 :NODE_BROKER DEBUG: Update epoch version in database version=3 2024-11-21T17:47:07.506881Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:07.506900Z node 1 :NODE_BROKER DEBUG: Added node #1025 host6:5 2024-11-21T17:47:07.506905Z node 1 :NODE_BROKER DEBUG: Update current epoch version from 2 to 3 2024-11-21T17:47:07.506908Z node 1 :NODE_BROKER DEBUG: Add node #1025 host6:5 to epoch cache 2024-11-21T17:47:07.506958Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host6" Port: 5 ResolveHost: "host6" Address: "host6" Location { DataCenter: "5" Module: "5" Rack: "5" Unit: "5" } Expire: 7200024000 Name: "slot-1" } 2024-11-21T17:47:07.506968Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:07.507058Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:592:2220], Recipient [1:533:2184]: NKikimr::TEvTablet ... now locked by children 2024-11-21T17:47:55.910202Z node 1 :NODE_BROKER TRACE: TTxProcessor(1032) is now active 2024-11-21T17:47:55.910205Z node 1 :NODE_BROKER TRACE: TTxProcessor(1032) enqueue tx 2024-11-21T17:47:55.910208Z node 1 :NODE_BROKER TRACE: TTxProcessor(1032) starts new tx 2024-11-21T17:47:55.910226Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1032 2024-11-21T17:47:55.910233Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Node has expired 2024-11-21T17:47:55.910249Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T17:47:55.910263Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: WRONG_REQUEST Reason: "Node has expired" } NodeId: 1032 } 2024-11-21T17:47:55.910268Z node 1 :NODE_BROKER TRACE: TTxProcessor(1032) completed tx 2024-11-21T17:47:55.910271Z node 1 :NODE_BROKER TRACE: TTxProcessor(1032) unlink from parent 2024-11-21T17:47:55.910274Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1032 2024-11-21T17:47:55.910278Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T17:47:55.910700Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:22746:18131], Recipient [1:22589:18015]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:55.910731Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:522:2178], Recipient [1:22589:18015]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host6" Port: 5 ResolveHost: "host6" Address: "host6" Location { DataCenter: "5" Module: "5" Rack: "5" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:55.910735Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:55.910741Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host6" Port: 5 ResolveHost: "host6" Address: "host6" Location { DataCenter: "5" Module: "5" Rack: "5" Unit: "5" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:55.910821Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:55.910830Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host6" Port: 5 ResolveHost: "host6" Address: "host6" Location { DataCenter: "5" Module: "5" Rack: "5" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:55.910839Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:22747:18015], Recipient [1:22589:18015]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:55.910842Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:55.910845Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:55.910847Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:55.910852Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:55.910857Z node 1 :NODE_BROKER DEBUG: Registration request from host6:5 (not fixed) tenant: dc-1 2024-11-21T17:47:55.910864Z node 1 :NODE_BROKER ERROR: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2024-11-21T17:47:55.910871Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:55.910876Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: ERROR_TEMP Reason: "No free node IDs" } 2024-11-21T17:47:55.910879Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx 2024-11-21T17:47:55.911215Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:22750:18134], Recipient [1:22589:18015]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:55.911233Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:522:2178], Recipient [1:22589:18015]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1033 } 2024-11-21T17:47:55.911237Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T17:47:55.911241Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1033 2024-11-21T17:47:55.911244Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T17:47:55.911247Z node 1 :NODE_BROKER TRACE: TTxProcessor(1033) is now active 2024-11-21T17:47:55.911250Z node 1 :NODE_BROKER TRACE: TTxProcessor(1033) enqueue tx 2024-11-21T17:47:55.911252Z node 1 :NODE_BROKER TRACE: TTxProcessor(1033) starts new tx 2024-11-21T17:47:55.911257Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1033 2024-11-21T17:47:55.911263Z node 1 :NODE_BROKER DEBUG: Update node #1033 host5:4 lease in database lease=3 expire=1970-01-07T18:00:00.024000Z 2024-11-21T17:47:55.922416Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T17:47:55.922459Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1033 Expire: 583200024000 Epoch { Id: 161 Version: 483 Start: 576000024000 End: 579600024000 NextEnd: 583200024000 } } 2024-11-21T17:47:55.922474Z node 1 :NODE_BROKER DEBUG: Extended lease of #1033 host5:4 up to Wed, 07 Jan 1970 18:00:00 UTC (lease 3) 2024-11-21T17:47:55.922478Z node 1 :NODE_BROKER TRACE: TTxProcessor(1033) completed tx 2024-11-21T17:47:55.922481Z node 1 :NODE_BROKER TRACE: TTxProcessor(1033) unlink from parent 2024-11-21T17:47:55.922484Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1033 2024-11-21T17:47:55.922486Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T17:47:55.923004Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:22754:18138], Recipient [1:22589:18015]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:55.923017Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:522:2178], Recipient [1:22589:18015]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1033 } 2024-11-21T17:47:55.923021Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2024-11-21T17:47:55.923024Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) creating sub-processor 1033 2024-11-21T17:47:55.923028Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now locked by children 2024-11-21T17:47:55.923030Z node 1 :NODE_BROKER TRACE: TTxProcessor(1033) is now active 2024-11-21T17:47:55.923032Z node 1 :NODE_BROKER TRACE: TTxProcessor(1033) enqueue tx 2024-11-21T17:47:55.923035Z node 1 :NODE_BROKER TRACE: TTxProcessor(1033) starts new tx 2024-11-21T17:47:55.923043Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1033 2024-11-21T17:47:55.923048Z node 1 :NODE_BROKER DEBUG: Update node #1033 host5:4 lease in database lease=4 expire=1970-01-07T18:00:00.024000Z 2024-11-21T17:47:55.935219Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2024-11-21T17:47:55.935262Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1033 Expire: 583200024000 Epoch { Id: 161 Version: 483 Start: 576000024000 End: 579600024000 NextEnd: 583200024000 } } 2024-11-21T17:47:55.935280Z node 1 :NODE_BROKER DEBUG: Extended lease of #1033 host5:4 up to Wed, 07 Jan 1970 18:00:00 UTC (lease 4) 2024-11-21T17:47:55.935284Z node 1 :NODE_BROKER TRACE: TTxProcessor(1033) completed tx 2024-11-21T17:47:55.935286Z node 1 :NODE_BROKER TRACE: TTxProcessor(1033) unlink from parent 2024-11-21T17:47:55.935289Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) removing sub-processor 1033 2024-11-21T17:47:55.935291Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) is now active 2024-11-21T17:47:55.935803Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:22758:18142], Recipient [1:22589:18015]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:55.935822Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:522:2178], Recipient [1:22589:18015]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1032 } 2024-11-21T17:47:55.935827Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2024-11-21T17:47:55.935838Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2024-11-21T17:47:55.936159Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:22760:18144], Recipient [1:22589:18015]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:55.936181Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:522:2178], Recipient [1:22589:18015]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host8" Port: 7 ResolveHost: "host8" Address: "host8" Location { DataCenter: "7" Module: "7" Rack: "7" Unit: "7" } FixedNodeId: false Path: "dc-1" } 2024-11-21T17:47:55.936184Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2024-11-21T17:47:55.936189Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host8" Port: 7 ResolveHost: "host8" Address: "host8" Location { DataCenter: "7" Module: "7" Rack: "7" Unit: "7" } FixedNodeId: false Path: "dc-1" 2024-11-21T17:47:55.936285Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T17:47:55.936299Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host8" Port: 7 ResolveHost: "host8" Address: "host8" Location { DataCenter: "7" Module: "7" Rack: "7" Unit: "7" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2024-11-21T17:47:55.936312Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:22761:18015], Recipient [1:22589:18015]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:55.936315Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2024-11-21T17:47:55.936318Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) enqueue tx 2024-11-21T17:47:55.936320Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) starts new tx 2024-11-21T17:47:55.936331Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2024-11-21T17:47:55.936334Z node 1 :NODE_BROKER DEBUG: Registration request from host8:7 (not fixed) tenant: dc-1 2024-11-21T17:47:55.936357Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2024-11-21T17:47:55.936378Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host8" Port: 7 ResolveHost: "host8" Address: "host8" Location { DataCenter: "7" Module: "7" Rack: "7" Unit: "7" } Expire: 583200024000 Name: "slot-3" } 2024-11-21T17:47:55.936382Z node 1 :NODE_BROKER TRACE: TTxProcessor(root) completed tx |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TxUsage::WriteToTopic_Demo_27 [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo [GOOD] >> TxUsage::WriteToTopic_Demo_28 >> TProxyActorTest::TestCreateSemaphoreInterrupted >> KqpIndexes::UpdateOnReadColumns [GOOD] >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit >> LocalPartition::DiscoveryServiceBadNodeId [GOOD] >> LocalPartition::WithoutPartition |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateOnReadColumns [GOOD] Test command err: Trying to start YDB, gRPC: 6779, MsgBus: 1425 2024-11-21T17:47:51.538554Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790759152104321:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:51.538653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021c7/r3tmp/tmpCZRp3X/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6779, node 1 2024-11-21T17:47:51.593553Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:47:51.597693Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:51.597704Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:51.597705Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:51.597740Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1425 TClient is connected to server localhost:1425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:51.638082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:51.638108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:51.639225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:51.639356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.651496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.712703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.730312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.742406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:51.893839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790759152105720:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.893886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:51.931840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.943344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.954039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.974910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.984213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:51.997082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:52.016691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790763447073528:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.016718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.016868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790763447073533:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.017763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:52.022796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790763447073535:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:52.265710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13279, MsgBus: 63074 2024-11-21T17:47:52.672422Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790763678103477:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021c7/r3tmp/tmpdCGkBa/pdisk_1.dat 2024-11-21T17:47:52.681677Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 13279, node 2 2024-11-21T17:47:52.691969Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:52.691982Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:52.691984Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:52.692019Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:52.692023Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:63074 2024-11-21T17:47:52.774208Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:52.774241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:52.775296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:52.785967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.788782Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:52.791756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.807632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.842596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.868103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:52.973452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790763678104848:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:52.973479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_ ... VICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439790781364321535:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:56.573000Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:56.576442Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:56.588657Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:56.643797Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:56.657999Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:56.671830Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:56.685495Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:56.694003Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439790781364322044:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:56.694023Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:56.694044Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439790781364322049:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:56.694787Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:56.698902Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439790781364322051:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:56.854105Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:56.861595Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:47:56.875666Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10778, MsgBus: 29784 2024-11-21T17:47:57.334939Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439790784528354277:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:57.334972Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021c7/r3tmp/tmph8MiIf/pdisk_1.dat 2024-11-21T17:47:57.347962Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10778, node 6 2024-11-21T17:47:57.355511Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:57.355524Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:57.355526Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:57.355560Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29784 TClient is connected to server localhost:29784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:57.435117Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:57.435154Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:57.436136Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:57.437854Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:57.439670Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:47:57.444288Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:57.453985Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:57.474261Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:57.484217Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:57.607747Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439790784528355813:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:57.607775Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:57.612580Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:57.618831Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:57.630276Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:57.637754Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:57.693552Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:57.700600Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:57.716164Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439790784528356329:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:57.716203Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:57.716206Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439790784528356334:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:57.716870Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:57.720494Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439790784528356336:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:57.899959Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:47:57.906455Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:47:57.918116Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] Test command err: 2024-11-21T17:45:08.960076Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790060673069611:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:08.960111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0020ea/r3tmp/tmpDyLb7U/pdisk_1.dat 2024-11-21T17:45:09.529345Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:09.533114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:09.533132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:09.538459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11292, node 1 2024-11-21T17:45:09.604399Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:09.604410Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:09.604412Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:09.604443Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:45:09.776370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:09.777343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:45:09.777365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:45:09.780519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:45:09.780572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:45:09.780576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:45:09.784499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:45:09.784507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:45:09.784924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:45:09.785591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211109832, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:45:09.785595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:45:09.785648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:45:09.785955Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:45:09.785987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:45:09.785995Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:45:09.786003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:45:09.786011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:45:09.786022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T17:45:09.786500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:45:09.786510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:45:09.786513Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:45:09.786523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2024-11-21T17:45:09.788579Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:45:13.964365Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790060673069611:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:45:13.964400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:45:24.501107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:45:24.501124Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:09.532621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2024-11-21T17:46:09.532666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2024-11-21T17:46:09.532690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 2024-11-21T17:47:09.532916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2024-11-21T17:47:09.532977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2024-11-21T17:47:09.533011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2024-11-21T17:47:33.785760Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790683665252121:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:33.785936Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:33.790961Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790683698885887:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:33.810136Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:33.810576Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:33.811513Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000c69/r3tmp/tmpV9RRXz/pdisk_1.dat 2024-11-21T17:47:33.839466Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16962, node 1 2024-11-21T17:47:33.862500Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000c69/r3tmp/yandexYFqP8J.tmp 2024-11-21T17:47:33.862516Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000c69/r3tmp/yandexYFqP8J.tmp 2024-11-21T17:47:33.862575Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000c69/r3tmp/yandexYFqP8J.tmp 2024-11-21T17:47:33.862608Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:33.867603Z INFO: TTestServer started on Port 4179 GrpcPort 16962 TClient is connected to server localhost:4179 PQClient connected to localhost:16962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:33.886937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:33.886970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:33.889332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:33.892654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:33.911603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:33.914676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:33.914699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:33.915735Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:47:33.916100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:47:33.988419Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:34.103768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790687960220445:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:34.103795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790687960220454:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:34.103802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:34.104578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:34.104632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790687960220488:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:34.104653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:34.109824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790687960220459:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:47:34.137497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.194825Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790687960220618:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:34.194950Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDgxYzFkNzQtZGQ2ZDQzNzQtZTRmOGI0MmEtZDk3M2Q3MjA=, ActorId: [1:7439790687960220442:2303], ActorState: ExecuteState, TraceId: 01jd7xc8tq07mb4fbpd31js8ea, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:34.195475Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:47:34.199509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:34.230674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:47:34.281145Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd7xc8zcb6z64wb34bh0ttb1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTJiODYzMy1jMjhiOTM0LTMyNzRlN2I3LTIxNGM5YmI5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790687960220902:3024] 2024-11-21T17:47:38.788285Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790683665252121:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:38.788320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:47:38.789589Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790683698885887:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:38.789625Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:39.371693Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790683665252437:2188], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:47:39.371792Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439790683665252437:2188], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2024-11-21T17:47:39.371824Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439790683665252437:2188], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439790683665252787:2426] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732211253941 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:47:39.371851Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439790683665252437:2188], cacheItem# { Subscriber: { Subscriber: [1:7439790683665252787:2426] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732211253941 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, ... },{"labels":{"name":"topic.write.partition_throttled_milliseconds","bin":"50"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.write.partition_throttled_milliseconds","bin":"500"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.write.partition_throttled_milliseconds","bin":"5000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.write.partition_throttled_milliseconds","bin":"999999"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.write.uncompressed_bytes"},"value":0,"kind":"RATE"}]} ===Request counters with query: /counters/counters=datastreams/database=%2FRoot/cloud_id=somecloud/folder_id=somefolder/database_id=root/topic=account2%2Ftopic2/consumer=some@random@consumer/json counters: {"sensors":[{"labels":{"name":"api.grpc.topic.stream_read.bytes"},"value":0,"kind":"RATE"},{"labels":{"name":"api.grpc.topic.stream_read.messages"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.bytes"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"100"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"1000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"10000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"180000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"200"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"2000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"30000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"500"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"5000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"60000"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.lag_milliseconds","bin":"999999"},"value":0,"kind":"RATE"},{"labels":{"name":"topic.read.messages"},"value":0,"kind":"RATE"}]} ===Request counters with query: /counters/counters=pqproxy/subsystem=userAgents/json counters: {"sensors":[{"labels":{"sensor":"BytesReadByUserAgent","consumer":"some@random@consumer","sdk_build_info":"ydb-cpp-sdk\/2.6.2","protocol":"topic","user_agent":"test-client\/v0.1","host":""},"value":396,"kind":"RATE"},{"labels":{"topic":"\/Root\/account2\/topic2","sensor":"BytesWrittenByUserAgent","sdk_build_info":"ydb-cpp-sdk\/2.6.2","protocol":"topic","user_agent":"test-client\/v0.1","host":""},"value":460,"kind":"RATE"}]} ===Request counters with query: /counters/counters=pqproxy/subsystem=userAgents/json counters: {"sensors":[{"labels":{"sensor":"BytesReadByUserAgent","consumer":"some@random@consumer","sdk_build_info":"ydb-cpp-sdk\/2.6.2","protocol":"topic","user_agent":"test-client\/v0.1","host":""},"value":396,"kind":"RATE"},{"labels":{"topic":"\/Root\/account2\/topic2","sensor":"BytesWrittenByUserAgent","sdk_build_info":"ydb-cpp-sdk\/2.6.2","protocol":"topic","user_agent":"test-client\/v0.1","host":""},"value":460,"kind":"RATE"}]} 2024-11-21T17:47:58.530453Z :INFO: [/Root] [/Root] [61c41575-89b9481d-8ee63e0f-ec0a4f2] Closing read session. Close timeout: 0.000000s 2024-11-21T17:47:58.530468Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:account2/topic2:4:2:0:0 -:account2/topic2:3:3:0:0 -:account2/topic2:2:1:0:0 -:account2/topic2:1:4:0:0 -:account2/topic2:0:5:3:0 2024-11-21T17:47:58.530475Z :INFO: [/Root] [/Root] [61c41575-89b9481d-8ee63e0f-ec0a4f2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 7 BytesRead: 40 MessagesRead: 4 BytesReadCompressed: 40 BytesInflightUncompressed: 30 BytesInflightCompressed: 0 BytesInflightTotal: 30 MessagesInflight: 3 } 2024-11-21T17:47:58.530495Z :NOTICE: [/Root] [/Root] [61c41575-89b9481d-8ee63e0f-ec0a4f2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:47:58.530504Z :DEBUG: [/Root] [/Root] [61c41575-89b9481d-8ee63e0f-ec0a4f2] [] Returning serverBytesSize = 0 to budget 2024-11-21T17:47:58.530530Z :DEBUG: [/Root] [/Root] [61c41575-89b9481d-8ee63e0f-ec0a4f2] [] Abort session to cluster 2024-11-21T17:47:58.530663Z :NOTICE: [/Root] [/Root] [61c41575-89b9481d-8ee63e0f-ec0a4f2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:47:58.530811Z :INFO: [/Root] SessionId [123|4d68de62-9f33c749-28f3de4a-101cc77f_0] MessageGroupId [123] Write session: close. Timeout 0.000000s 2024-11-21T17:47:58.530817Z :INFO: [/Root] SessionId [123|4d68de62-9f33c749-28f3de4a-101cc77f_0] MessageGroupId [123] Write session will now close 2024-11-21T17:47:58.530822Z :DEBUG: [/Root] SessionId [123|4d68de62-9f33c749-28f3de4a-101cc77f_0] MessageGroupId [123] Write session: aborting 2024-11-21T17:47:58.530858Z :INFO: [/Root] SessionId [123|4d68de62-9f33c749-28f3de4a-101cc77f_0] MessageGroupId [123] Write session: gracefully shut down, all writes complete 2024-11-21T17:47:58.530862Z :DEBUG: [/Root] SessionId [123|4d68de62-9f33c749-28f3de4a-101cc77f_0] MessageGroupId [123] Write session: destroy 2024-11-21T17:47:58.530857Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_11665436092457284701_v1 grpc read done: success# 0, data# { } 2024-11-21T17:47:58.530867Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_11665436092457284701_v1 grpc read failed 2024-11-21T17:47:58.530871Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_11665436092457284701_v1 grpc closed 2024-11-21T17:47:58.530888Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_11665436092457284701_v1 is DEAD 2024-11-21T17:47:58.530979Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:58.530987Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session some@random@consumer_7_1_11665436092457284701_v1 2024-11-21T17:47:58.530996Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [7:7439790790573779155:2462] destroyed 2024-11-21T17:47:58.530999Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:58.531001Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session some@random@consumer_7_1_11665436092457284701_v1 2024-11-21T17:47:58.531003Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [7:7439790790573779152:2461] destroyed 2024-11-21T17:47:58.531012Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11665436092457284701_v1 2024-11-21T17:47:58.531014Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11665436092457284701_v1 2024-11-21T17:47:58.531063Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:58.531067Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:58.531075Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session some@random@consumer_7_1_11665436092457284701_v1 2024-11-21T17:47:58.531082Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Destroy direct read session some@random@consumer_7_1_11665436092457284701_v1 2024-11-21T17:47:58.531084Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [7:7439790790573779157:2463] destroyed 2024-11-21T17:47:58.531088Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [7:7439790790573779151:2460] destroyed 2024-11-21T17:47:58.531092Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:58.531095Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Destroy direct read session some@random@consumer_7_1_11665436092457284701_v1 2024-11-21T17:47:58.531117Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic2] pipe [7:7439790790573779145:2456] disconnected; active server actors: 1 2024-11-21T17:47:58.531098Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [7:7439790790573779148:2459] destroyed 2024-11-21T17:47:58.531125Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic2] pipe [7:7439790790573779145:2456] client some@random@consumer disconnected session some@random@consumer_7_1_11665436092457284701_v1 2024-11-21T17:47:58.531099Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11665436092457284701_v1 2024-11-21T17:47:58.531103Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11665436092457284701_v1 2024-11-21T17:47:58.531105Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11665436092457284701_v1 2024-11-21T17:47:58.531151Z node 7 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 123|4d68de62-9f33c749-28f3de4a-101cc77f_0 grpc read done: success: 0 data: 2024-11-21T17:47:58.531155Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|4d68de62-9f33c749-28f3de4a-101cc77f_0 grpc read failed 2024-11-21T17:47:58.531158Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|4d68de62-9f33c749-28f3de4a-101cc77f_0 grpc closed 2024-11-21T17:47:58.531160Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|4d68de62-9f33c749-28f3de4a-101cc77f_0 is DEAD 2024-11-21T17:47:58.531328Z node 7 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:47:58.531411Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:47:58.531429Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [7:7439790790573779139:2452] destroyed 2024-11-21T17:47:58.531439Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:47:58.843758Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7439790764803973084:2114], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:47:58.843807Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7439790764803973084:2114], cacheItem# { Subscriber: { Subscriber: [7:7439790769098941702:3045] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:47:58.843877Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7439790790573779176:3761], recipient# [7:7439790790573779175:2464], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TSequence::CreateSequence >> TProxyActorTest::TestAttachSession >> TSequence::CreateSequenceParallel >> TProxyActorTest::TestCreateSemaphore >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] Test command err: Trying to start YDB, gRPC: 26482, MsgBus: 15293 2024-11-21T17:47:11.280414Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790588600634331:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:11.280435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001734/r3tmp/tmp9zCZlJ/pdisk_1.dat 2024-11-21T17:47:11.330226Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26482, node 1 2024-11-21T17:47:11.344818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:11.344830Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:11.344831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:11.344862Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15293 2024-11-21T17:47:11.380765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:11.380793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:15293 2024-11-21T17:47:11.381889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:11.412516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.424745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.497884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.524348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.538487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:11.620924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790588600635862:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.620951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.649312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.657874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.670681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.682331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.688877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.696267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:11.718817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790588600636365:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.718857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.718914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790588600636370:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:11.719577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:11.723022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790588600636372:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:47:11.967517Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232010, txId: 281474976715672] shutting down 2024-11-21T17:47:12.013931Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232059, txId: 281474976715675] shutting down 2024-11-21T17:47:12.052937Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232094, txId: 281474976715678] shutting down 2024-11-21T17:47:12.093708Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232136, txId: 281474976715681] shutting down 2024-11-21T17:47:12.134965Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232178, txId: 281474976715684] shutting down 2024-11-21T17:47:12.176855Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232220, txId: 281474976715687] shutting down 2024-11-21T17:47:12.216422Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232255, txId: 281474976715690] shutting down 2024-11-21T17:47:12.253954Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232297, txId: 281474976715693] shutting down 2024-11-21T17:47:12.294603Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232339, txId: 281474976715696] shutting down 2024-11-21T17:47:12.336740Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232381, txId: 281474976715699] shutting down 2024-11-21T17:47:12.375391Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232416, txId: 281474976715702] shutting down 2024-11-21T17:47:12.416812Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232458, txId: 281474976715705] shutting down 2024-11-21T17:47:12.454678Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232500, txId: 281474976715708] shutting down 2024-11-21T17:47:12.488659Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232535, txId: 281474976715711] shutting down 2024-11-21T17:47:12.525409Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232570, txId: 281474976715714] shutting down 2024-11-21T17:47:12.565629Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232605, txId: 281474976715717] shutting down 2024-11-21T17:47:12.606076Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232647, txId: 281474976715720] shutting down 2024-11-21T17:47:12.655920Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232696, txId: 281474976715723] shutting down 2024-11-21T17:47:12.699055Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232738, txId: 281474976715726] shutting down 2024-11-21T17:47:12.737482Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232780, txId: 281474976715729] shutting down 2024-11-21T17:47:12.780447Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232822, txId: 281474976715732] shutting down 2024-11-21T17:47:12.822898Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232864, txId: 281474976715735] shutting down 2024-11-21T17:47:12.863249Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232906, txId: 281474976715738] shutting down 2024-11-21T17:47:12.909730Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232955, txId: 281474976715741] shutting down 2024-11-21T17:47:12.949471Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211232990, txId: 281474976715744] shutting down 2024-11-21T17:47:12.984263Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233025, txId: 281474976715747] shutting down 2024-11-21T17:47:13.017326Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211233060, txId: 281474976715750] shut ... ting down 2024-11-21T17:47:58.942443Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211278980, txId: 281474976718597] shutting down 2024-11-21T17:47:58.986108Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279022, txId: 281474976718600] shutting down 2024-11-21T17:47:59.026973Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279064, txId: 281474976718603] shutting down 2024-11-21T17:47:59.071116Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279113, txId: 281474976718606] shutting down 2024-11-21T17:47:59.118078Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279155, txId: 281474976718609] shutting down 2024-11-21T17:47:59.173892Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279211, txId: 281474976718612] shutting down 2024-11-21T17:47:59.231178Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279267, txId: 281474976718615] shutting down 2024-11-21T17:47:59.280193Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279316, txId: 281474976718618] shutting down 2024-11-21T17:47:59.330562Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279365, txId: 281474976718621] shutting down 2024-11-21T17:47:59.384953Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279421, txId: 281474976718624] shutting down 2024-11-21T17:47:59.433653Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279470, txId: 281474976718627] shutting down 2024-11-21T17:47:59.486915Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279526, txId: 281474976718630] shutting down 2024-11-21T17:47:59.539483Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279575, txId: 281474976718633] shutting down 2024-11-21T17:47:59.592468Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279631, txId: 281474976718636] shutting down 2024-11-21T17:47:59.635812Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279673, txId: 281474976718639] shutting down 2024-11-21T17:47:59.685313Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279722, txId: 281474976718642] shutting down 2024-11-21T17:47:59.727452Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279771, txId: 281474976718645] shutting down 2024-11-21T17:47:59.770346Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279813, txId: 281474976718648] shutting down 2024-11-21T17:47:59.813461Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279855, txId: 281474976718651] shutting down 2024-11-21T17:47:59.859839Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279897, txId: 281474976718654] shutting down 2024-11-21T17:47:59.903045Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279939, txId: 281474976718657] shutting down 2024-11-21T17:47:59.943299Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211279981, txId: 281474976718660] shutting down 2024-11-21T17:47:59.988355Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211280030, txId: 281474976718663] shutting down 2024-11-21T17:48:00.041502Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211280079, txId: 281474976718666] shutting down 2024-11-21T17:48:00.097157Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211280135, txId: 281474976718669] shutting down Trying to start YDB, gRPC: 25506, MsgBus: 5197 2024-11-21T17:48:00.458757Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790799300424307:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:00.458791Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001734/r3tmp/tmpcLEAaV/pdisk_1.dat 2024-11-21T17:48:00.470464Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25506, node 2 2024-11-21T17:48:00.478864Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:00.478895Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:00.478897Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:00.478936Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5197 TClient is connected to server localhost:5197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:00.559457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:00.559517Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:00.560583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:00.562357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:00.564572Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:00.574571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:00.584833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:00.604905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:00.629067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:00.724520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790799300425836:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:00.724547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:00.729795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:00.736932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:00.745706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:00.759855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:00.773905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:00.787851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:00.803227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790799300426341:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:00.803251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:00.803281Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790799300426346:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:00.803896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:00.807494Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790799300426348:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:01.090582Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211281129, txId: 281474976715671] shutting down >> TTablesWithReboots::CopyWithRebootsAtCommit >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex |78.1%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2024-11-21T17:46:59.020590Z :WriteRAW INFO: Random seed for debugging is 1732211219020583 2024-11-21T17:46:59.120335Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790536623295978:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:59.120427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:59.122987Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790538230713519:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:59.123014Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000d4b/r3tmp/tmphtbqbd/pdisk_1.dat 2024-11-21T17:46:59.146766Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:59.152393Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:59.179714Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27164, node 1 2024-11-21T17:46:59.195911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000d4b/r3tmp/yandexxcfDa1.tmp 2024-11-21T17:46:59.195922Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000d4b/r3tmp/yandexxcfDa1.tmp 2024-11-21T17:46:59.195982Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000d4b/r3tmp/yandexxcfDa1.tmp 2024-11-21T17:46:59.196021Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:59.200861Z INFO: TTestServer started on Port 22866 GrpcPort 27164 TClient is connected to server localhost:22866 PQClient connected to localhost:27164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:59.220093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:59.220118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:59.221845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:59.222920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... 2024-11-21T17:46:59.247176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:59.247200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:59.248725Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:59.248946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:46:59.420616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790538230713677:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:59.420641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790538230713659:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:59.420650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:59.421892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T17:46:59.426276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790538230713681:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T17:46:59.491510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T17:46:59.494087Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790536623296799:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:59.494202Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjYyODZkNWUtY2EyNjU4NmEtMmZiZTY1MDAtZjBiYTJkYmU=, ActorId: [1:7439790536623296758:2299], ActorState: ExecuteState, TraceId: 01jd7xb6zw9r2hnvey74yf8g6n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:59.494728Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:59.531308Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790538230713761:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:59.531421Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjFlNzNkMmMtNjRlOWU1ZTktNGViMjI2NDItZDNiZDU0MjE=, ActorId: [2:7439790538230713650:2277], ActorState: ExecuteState, TraceId: 01jd7xb6yw67rd6rfm7rs8q1e5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:59.531588Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:59.561203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:59.627916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:27164", true, true, 1000); 2024-11-21T17:46:59.656541Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd7xb75tcwwken52szyb7zhx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmU2OTA2MDYtZGViYWJiMzQtY2U1MjdjYjYtMzk4NGVjMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790536623297177:2922] 2024-11-21T17:47:04.120286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790536623295978:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:04.120319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:47:04.123366Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790538230713519:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:04.123392Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:05.674353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:27164 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:47:05.684895Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:27164 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: ... Y INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:41848 2024-11-21T17:48:01.268788Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:41848 proto=v1 topic=test-topic durationSec=0 2024-11-21T17:48:01.268791Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:48:01.269190Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T17:48:01.269224Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:48:01.269226Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:48:01.269227Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:48:01.269258Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439790803361549234:2469] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:48:01.269776Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439790803361549234:2469] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:48:01.284966Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439790803361549234:2469] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T17:48:01.285072Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439790803361549264:2469] connected; active server actors: 1 2024-11-21T17:48:01.285091Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439790803361549234:2469] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T17:48:01.285095Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439790803361549234:2469] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T17:48:01.285163Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439790803361549264:2469] disconnected; active server actors: 1 2024-11-21T17:48:01.285165Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439790803361549264:2469] disconnected no session 2024-11-21T17:48:01.299688Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:01.299711Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7439790803361549282:2469], now have 1 active actors on pipe 2024-11-21T17:48:01.299825Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:48:01.299839Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:48:01.299881Z node 16 :PERSQUEUE INFO: new Cookie src|20c9cfc1-d6dd56de-39468b1d-6e13a766_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T17:48:01.299917Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:48:01.299949Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:48:01.300148Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:48:01.300156Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:48:01.300174Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:48:01.299399Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439790803361549234:2469] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:48:01.299416Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439790803361549234:2469] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T17:48:01.299419Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439790803361549234:2469] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T17:48:01.299436Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:48:01.299733Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2024-11-21T17:48:01.300305Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|20c9cfc1-d6dd56de-39468b1d-6e13a766_0 2024-11-21T17:48:01.300749Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211281300 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:01.300785Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|20c9cfc1-d6dd56de-39468b1d-6e13a766_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T17:48:01.300962Z :INFO: [] MessageGroupId [src] SessionId [src|20c9cfc1-d6dd56de-39468b1d-6e13a766_0] Write session: close. Timeout = 0 ms 2024-11-21T17:48:01.300974Z :INFO: [] MessageGroupId [src] SessionId [src|20c9cfc1-d6dd56de-39468b1d-6e13a766_0] Write session will now close 2024-11-21T17:48:01.300980Z :DEBUG: [] MessageGroupId [src] SessionId [src|20c9cfc1-d6dd56de-39468b1d-6e13a766_0] Write session: aborting 2024-11-21T17:48:01.301156Z :INFO: [] MessageGroupId [src] SessionId [src|20c9cfc1-d6dd56de-39468b1d-6e13a766_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:48:01.301160Z :DEBUG: [] MessageGroupId [src] SessionId [src|20c9cfc1-d6dd56de-39468b1d-6e13a766_0] Write session: destroy 2024-11-21T17:48:01.301410Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|20c9cfc1-d6dd56de-39468b1d-6e13a766_0 grpc read done: success: 0 data: 2024-11-21T17:48:01.301421Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|20c9cfc1-d6dd56de-39468b1d-6e13a766_0 grpc read failed 2024-11-21T17:48:01.301500Z node 15 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|20c9cfc1-d6dd56de-39468b1d-6e13a766_0 2024-11-21T17:48:01.301515Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|20c9cfc1-d6dd56de-39468b1d-6e13a766_0 is DEAD 2024-11-21T17:48:01.301628Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:01.301734Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:01.301751Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7439790803361549282:2469] destroyed 2024-11-21T17:48:01.301761Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:48:01.307896Z :INFO: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Starting read session 2024-11-21T17:48:01.307909Z :DEBUG: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Starting cluster discovery 2024-11-21T17:48:01.307948Z :INFO: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:18906: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:18906
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:18906. " 2024-11-21T17:48:01.307952Z :DEBUG: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Restart cluster discovery in 0.008395s 2024-11-21T17:48:01.318228Z :DEBUG: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Starting cluster discovery 2024-11-21T17:48:01.318326Z :INFO: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:18906: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:18906
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:18906. " 2024-11-21T17:48:01.318331Z :DEBUG: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Restart cluster discovery in 0.016075s 2024-11-21T17:48:01.335385Z :DEBUG: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Starting cluster discovery 2024-11-21T17:48:01.335450Z :INFO: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:18906: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:18906
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:18906. " 2024-11-21T17:48:01.335456Z :DEBUG: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Restart cluster discovery in 0.029118s 2024-11-21T17:48:01.365383Z :DEBUG: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Starting cluster discovery 2024-11-21T17:48:01.365473Z :NOTICE: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:18906: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:18906
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:18906. " } 2024-11-21T17:48:01.365552Z :NOTICE: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:18906: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:18906
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:18906. " } 2024-11-21T17:48:01.365596Z :INFO: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Closing read session. Close timeout: 0.000000s 2024-11-21T17:48:01.365607Z :NOTICE: [/Root] [/Root] [43a8d1c-e8cf191-dc90f8aa-6f623fa5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> TTablesWithReboots::CopyTableAndDropWithReboots2 >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> TRestoreWithRebootsTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldFailOnOutboundKey[Zstd] |78.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> Describe::DescribePartitionPermissions [GOOD] >> LocalPartition::Basic |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> BasicUsage::SessionNotDestroyedWhileCompressionInFlight [GOOD] >> BasicUsage::SessionNotDestroyedWhileUserEventHandlingInFlight ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:00.125382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:00.125399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:00.125402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:00.125404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:00.125414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:00.125416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:00.125422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:00.125472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:00.132136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:00.132152Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:00.134406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:00.134947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:00.134983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:00.136101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:00.136267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:00.136334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:00.136379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:00.137264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:00.137469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:00.137476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:00.137506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:00.137512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:00.137518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:00.137530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.138478Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:00.151140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:00.151217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.151269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:00.151302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:00.151307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.151967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:00.151982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:00.152011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.152017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:00.152020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:00.152022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:00.152329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.152336Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:00.152339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:00.152686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.152694Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.152697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:00.152702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:00.153113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:00.153455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:00.153491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:00.153611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:00.153627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:00.153637Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:00.153683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:00.153689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:00.153709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:00.153718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:00.154073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:00.154078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:00.154106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:00.154110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:00.154176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.154180Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:00.154188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:00.154190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:00.154194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:00.154197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:00.154200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:00.154202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:00.154210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:00.154213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:00.154216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:00.154431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:00.154442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:00.154446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:00.154451Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:00.154455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:00.154468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:48:01.993298Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:48:01.993300Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:48:01.993302Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:48:01.993305Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:48:01.993313Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2024-11-21T17:48:01.993315Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:48:01.993410Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:01.993414Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:2 2024-11-21T17:48:01.993424Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:335:2315] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2024-11-21T17:48:01.993471Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T17:48:01.993474Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T17:48:01.993477Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2024-11-21T17:48:01.993481Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:01.993526Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:48:01.993542Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:48:01.993545Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2024-11-21T17:48:01.993547Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2024-11-21T17:48:01.993550Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2024-11-21T17:48:01.993608Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:01.993613Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2024-11-21T17:48:01.993618Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:338:2317] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2024-11-21T17:48:01.993823Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T17:48:01.993831Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T17:48:01.993838Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:48:01.993852Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:01.993892Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:48:01.993907Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:48:01.993909Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2024-11-21T17:48:01.993911Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2024-11-21T17:48:01.993915Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2024-11-21T17:48:01.993924Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:419:2374] message: TxId: 102 2024-11-21T17:48:01.993936Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2024-11-21T17:48:01.993941Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:48:01.993944Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:48:01.993957Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:48:01.993960Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2024-11-21T17:48:01.993962Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2024-11-21T17:48:01.993964Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:48:01.993966Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2024-11-21T17:48:01.993968Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2024-11-21T17:48:01.993972Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:48:01.993975Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2024-11-21T17:48:01.993977Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2024-11-21T17:48:01.993981Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:48:01.994046Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:48:01.994048Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:01.994071Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435080, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2024-11-21T17:48:01.994074Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2024-11-21T17:48:01.994080Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:01.994083Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:48:01.994090Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:48:01.994179Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:48:01.994182Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:01.994190Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:48:01.994192Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:01.994195Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:48:01.994197Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:01.994200Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:48:01.994202Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:01.994506Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:48:01.994512Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:01.994536Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:01.994579Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:01.994589Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:419:2374] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2024-11-21T17:48:01.994612Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:48:01.994615Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:513:2467] 2024-11-21T17:48:01.994816Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:515:2469], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:01.994825Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:01.994829Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2024-11-21T17:48:01.994912Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2024-11-21T17:48:01.994997Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:592:2546], Recipient [7:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T17:48:01.995003Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:48:01.995013Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:01.995042Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 27us result status StatusPathDoesNotExist 2024-11-21T17:48:01.995068Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSequence::AlterTableSetDefaultFromSequence [GOOD] |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |78.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |78.2%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |78.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TxUsage::WriteToTopic_Demo_38 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:00.079299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:00.079327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:00.079332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:00.079336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:00.079350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:00.079353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:00.079362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:00.079441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:00.089747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:00.089769Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:00.092295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:00.092865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:00.092900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:00.094191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:00.094335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:00.094404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:00.094449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:00.095261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:00.095493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:00.095500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:00.095532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:00.095540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:00.095545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:00.095554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.096559Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:00.109662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:00.109726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.109776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:00.109826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:00.109831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.110457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:00.110474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:00.110507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.110514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:00.110517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:00.110520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:00.110837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.110845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:00.110848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:00.111089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.111095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.111099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:00.111105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:00.111512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:00.111812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:00.111850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:00.111980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:00.112000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:00.112010Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:00.112054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:00.112060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:00.112085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:00.112098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:00.112397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:00.112402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:00.112432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:00.112436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:00.112498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:00.112503Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:00.112511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:00.112514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:00.112517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:00.112521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:00.112524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:00.112526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:00.112534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:00.112538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:00.112540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:00.112741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:00.112749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:00.112752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:00.112755Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:00.112758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:00.112768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... xId: 114 2024-11-21T17:48:02.738001Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 114, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T17:48:02.738004Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T17:48:02.738017Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 0/1, is published: true 2024-11-21T17:48:02.738020Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:48:02.738345Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [7:978:2927], Recipient [7:123:2149]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 304 } } 2024-11-21T17:48:02.738354Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T17:48:02.738365Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 304 } } 2024-11-21T17:48:02.738370Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2024-11-21T17:48:02.738387Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 304 } } 2024-11-21T17:48:02.738399Z node 7 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 304 } } 2024-11-21T17:48:02.738403Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:48:02.738612Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1038:2980], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:02.738619Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:02.738623Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:48:02.738915Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [7:978:2927], Recipient [7:123:2149]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 978 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2024-11-21T17:48:02.738924Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2024-11-21T17:48:02.738931Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 978 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2024-11-21T17:48:02.738935Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2024-11-21T17:48:02.738948Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 978 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2024-11-21T17:48:02.738955Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:48:02.738961Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 978 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2024-11-21T17:48:02.738974Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:02.738977Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2024-11-21T17:48:02.738981Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:48:02.738988Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 114:0 129 -> 240 2024-11-21T17:48:02.739014Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:48:02.739157Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:02.739184Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2024-11-21T17:48:02.739187Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:02.739199Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2024-11-21T17:48:02.739203Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:02.739567Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2024-11-21T17:48:02.739574Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:02.739593Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2024-11-21T17:48:02.739595Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:02.739600Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 114:0 2024-11-21T17:48:02.739615Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:978:2927] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2024-11-21T17:48:02.739669Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T17:48:02.739677Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T17:48:02.739683Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2024-11-21T17:48:02.739690Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 114:0 ProgressState 2024-11-21T17:48:02.739701Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:48:02.739705Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2024-11-21T17:48:02.739709Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2024-11-21T17:48:02.739714Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2024-11-21T17:48:02.739723Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:392:2357] message: TxId: 114 2024-11-21T17:48:02.739729Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2024-11-21T17:48:02.739735Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 114:0 2024-11-21T17:48:02.739738Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 114:0 2024-11-21T17:48:02.739763Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:48:02.740107Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:48:02.740121Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:392:2357] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2024-11-21T17:48:02.740152Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2024-11-21T17:48:02.740158Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1007:2949] 2024-11-21T17:48:02.740196Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1009:2951], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:02.740201Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:02.740205Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2024-11-21T17:48:02.740425Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:1046:2988], Recipient [7:123:2149]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2024-11-21T17:48:02.740430Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:48:02.741027Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:02.741072Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2024-11-21T17:48:02.741152Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2024-11-21T17:48:02.741208Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:48:02.741657Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:02.741683Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2024-11-21T17:48:02.741688Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TxUsage::WriteToTopic_Demo_39 |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TxUsage::WriteToTopic_Demo_8 [GOOD] >> TTablesWithReboots::SimultaneousDropForceDrop >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit [GOOD] |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TxUsage::WriteToTopic_Demo_9 >> TTablesWithReboots::SimpleDropTableWithReboots2 >> TxUsage::WriteToTopic_Demo_23_RestartNo >> TTablesWithReboots::AlterCopyWithReboots >> TTablesWithReboots::CopyTableWithReboots |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> LocalPartition::WithoutPartition [GOOD] >> LocalPartition::WithoutPartitionWithRestart |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::DropTableWithReboots >> TTablesWithReboots::TwiceRmDirWithReboots [GOOD] |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::TwiceRmDirWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:56.111679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:56.111699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:56.111704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:56.111708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:56.111717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:56.111720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:56.111728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:56.111807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:56.121847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:56.121871Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:56.124000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:56.124077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:56.124108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:56.126478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:56.126546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:56.126624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:56.126774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:56.127306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:56.127513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:56.127520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:56.127528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:56.127531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:56.127536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:56.127563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:56.128527Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:56.145700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:56.145767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.145814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:56.145853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:56.145862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.146641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:56.146675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:56.146722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.146734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:56.146739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:56.146744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:56.147291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.147315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:56.147320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:56.147629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.147636Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.147642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:56.147647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:56.148279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:56.148716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:56.148786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:56.149041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:56.149067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:56.149074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:56.149131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:56.149137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:56.149164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:56.149177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:56.149652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:56.149666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:56.149706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:56.149710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:56.149789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.149795Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:56.149806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:56.149810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:56.149815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:56.149820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:56.149824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:56.149827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:56.149838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:56.149844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:56.149848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T17:48:04.653747Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.653763Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 118 RawX2: 150323857504 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:04.653768Z node 35 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 1003:0, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T17:48:04.653784Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.653790Z node 35 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:48:04.653796Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:04.653803Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:04.653811Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:48:04.653815Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:48:04.653820Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:04.653824Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:48:04.653827Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:48:04.653833Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:48:04.653838Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2024-11-21T17:48:04.653842Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T17:48:04.653844Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:48:04.654148Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:04.654197Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:04.654540Z node 35 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:04.654552Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:04.654572Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:48:04.654592Z node 35 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.654596Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [35:205:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T17:48:04.654600Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [35:205:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 3 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:48:04.654685Z node 35 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:04.654706Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:04.654710Z node 35 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:48:04.654714Z node 35 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T17:48:04.654717Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:48:04.654770Z node 35 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:04.654778Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:04.654780Z node 35 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:48:04.654783Z node 35 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:48:04.654786Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:48:04.654794Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T17:48:04.654812Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:04.654816Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:48:04.654823Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:04.655155Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:04.655327Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:04.655343Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T17:48:04.655385Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:48:04.655390Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2024-11-21T17:48:04.655403Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:48:04.655405Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:48:04.655982Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "Victim" } } TxId: 1004 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:04.656012Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/Victim, pathId: 0, opId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.656029Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1004:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/Victim', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, at schemeshard: 72057594046678944 2024-11-21T17:48:04.656165Z node 35 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:48:04.656255Z node 35 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:48:04.656595Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1004, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Victim\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 1004 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:04.656630Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1004, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/Victim', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, operation: DROP DIRECTORY, path: /MyRoot/Victim 2024-11-21T17:48:04.656654Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:48:04.656658Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [35:348:2340] 2024-11-21T17:48:04.656683Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:48:04.656686Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [35:348:2340] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 2024-11-21T17:48:04.656758Z node 35 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Victim" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:04.656779Z node 35 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Victim" took 24us result status StatusPathDoesNotExist 2024-11-21T17:48:04.656808Z node 35 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Victim\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Victim" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TSubDomainTest::UserAttributes >> TSubDomainTest::CreateTablet >> ColumnShardTiers::TieringUsage [GOOD] >> TxUsage::WriteToTopic_Demo_28 [GOOD] >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain >> TxUsage::WriteToTopic_Demo_29 >> TSubDomainTest::CreateTabletForUnknownDomain |78.2%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubDomainTest::CreateDummyTabletsInDifferentDomains >> TSubDomainTest::UserAttributesApplyIf [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: 2024-11-21T17:45:26.607671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:45:26.608094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:45:26.608110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0021f2/r3tmp/tmpE9TwxN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9879, node 1 TClient is connected to server localhost:30052 2024-11-21T17:45:26.904407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:45:26.940940Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:26.943810Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:45:26.943831Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:45:26.943835Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:45:26.943905Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:45:26.992357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:45:26.992629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:45:27.005033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected REQUEST=CREATE OBJECT secretAccessKey ( TYPE SECRET) WITH (value = ak);EXPECTATION=1;WAITING=1 2024-11-21T17:45:37.162641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:649:2540], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.162672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.299608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2024-11-21T17:45:37.547277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:793:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.547306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.547363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:798:2639], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:45:37.548099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T17:45:37.715057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:800:2641], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:45:37.880184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:45:37.968536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2024-11-21T17:45:38.361051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.744761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:45:38.843670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T17:45:39.399041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:45:39.679509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secretAccessKey ( TYPE SECRET) WITH (value = ak);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secretAccessKey ( TYPE SECRET) WITH (value = ak);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secretSecretKey ( TYPE SECRET) WITH (value = fakeSecret);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secretSecretKey ( TYPE SECRET) WITH (value = fakeSecret);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secretSecretKey ( TYPE SECRET) WITH (value = fakeSecret);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tier1 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);EXPECTATION=1;WAITING=1 2024-11-21T17:46:02.110177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:1, at schemeshard: 72057594046644480 2024-11-21T17:46:02.537393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.120654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715726:0, at schemeshard: 72057594046644480 2024-11-21T17:46:03.230499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715729:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT tier1 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT tier1 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tier2 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tier2 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT tier2 ( TYPE TIER) WITH (tierConfig = ` Name : "fakeTier" ObjectStorage : { Endpoint: "fake" Bucket: "fake" SecretableAccessKey: { SecretId: { Id: "secretAccessKey" OwnerId: "root@builtin" } } SecretKey: "SId:secretSecretKey" } `);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tiering1 (TYPE TIERING_RULE) WITH (defaultColumn = timestamp, description = `{ "rules" : [ { "tierName" : "tier1", "durationForEvict" : "10d" }, { "tierName" : "tier2", "durationForEvict" : "20d" } ] }` );EXPECTATION=1;WAITING=1 2024-11-21T17:46:25.047273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715766:0, at schemeshard: 72057594046644480 2024-11-21T17:46:25.618538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715779:0, at schemeshard: 72057594046644480 2024-11-21T17:46:26.271671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715792:0, at schemeshard: 72057594046644480 2024-11-21T17:46:26.380623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715795:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT tiering1 (TYPE TIERING_RULE) WITH (defaultColumn = timestamp, description = `{ "rules" : [ { "tierName" : "tier1", "durationForEvict" : "10d" }, { "tierName" : "tier2", "durationForEvict" : "20d" } ] }` );RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT tiering1 (TYPE TIERING_RULE) WITH (defaultColumn = timestamp, description = `{ "rules" : [ { "tierName" : "tier1", "durationForEvict" : "10d" }, { "tierName" : "tier2", "durationForEvict" : "20d" } ] }` );EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT tiering2 (TYPE TIERING_RUL ... COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1723573746000 at tablet 72075186224037898 2024-11-21T17:48:05.149494Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037898;self_id=[1:5695:6283];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037898;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T17:48:05.149497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037898;self_id=[1:5695:6283];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037898;fline=columnshard_impl.cpp:516;problem=Background activities cannot be started: no index at tablet; 2024-11-21T17:48:05.225394Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2024-11-21T17:48:05.225427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=with_appended.cpp:80;portions=16,;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e; 2024-11-21T17:48:05.225508Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2024-11-21T17:48:05.225518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=DEFAULT;path_id=19;portion=13;before_size=160048;after_size=158528;before_rows=2991;after_rows=2990; 2024-11-21T17:48:05.225521Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2024-11-21T17:48:05.225535Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2024-11-21T17:48:05.225538Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=DEFAULT;path_id=19;portion=15;before_size=158528;after_size=156648;before_rows=2990;after_rows=2989; 2024-11-21T17:48:05.225540Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2024-11-21T17:48:05.225550Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2024-11-21T17:48:05.225553Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=DEFAULT;path_id=19;portion=14;before_size=156648;after_size=154768;before_rows=2989;after_rows=2988; 2024-11-21T17:48:05.225556Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2024-11-21T17:48:05.225559Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2024-11-21T17:48:05.225562Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=19;portion=16;before_size=154768;after_size=156288;before_rows=2988;after_rows=2989; 2024-11-21T17:48:05.225564Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1520;portion_bytes=1520;portion_raw_bytes=1097; 2024-11-21T17:48:05.225629Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=manager.cpp:14;event=unlock;process_id=CS::GENERAL::c2a33e78-a83011ef-9ebdfc4f-e3dbb86e; 2024-11-21T17:48:05.225643Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;fline=granule.cpp:99;event=OnCompactionFinished;info=(granule:19;path_id:19;size:156288;portions_count:6;); 2024-11-21T17:48:05.225647Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;tablet_id=72075186224037896;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:48:05.225663Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;tablet_id=72075186224037896;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:48:05.225674Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;tablet_id=72075186224037896;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=1; 2024-11-21T17:48:05.225685Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;tablet_id=72075186224037896;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=3;drop=0;skip=0; 2024-11-21T17:48:05.225692Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;tablet_id=72075186224037896;fline=manager.cpp:9;event=lock;process_id=CS::CLEANUP::PORTIONS::c2d2a7ee-a83011ef-ad80a50a-7da71f96; 2024-11-21T17:48:05.225705Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;tablet_id=72075186224037896;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:48:05.225709Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;tablet_id=72075186224037896;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:48:05.225722Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;tablet_id=72075186224037896;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:48:05.225739Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037896 Save Batch GenStep: 1:14 Blob count: 1 2024-11-21T17:48:05.225748Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:48:05.225765Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=16;external_task_id=c2a33e78-a83011ef-9ebdfc4f-e3dbb86e;mem=9361;cpu=0; 2024-11-21T17:48:05.225798Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037896 Save Batch GenStep: 1:15 Blob count: 1 2024-11-21T17:48:05.225817Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;local_tx_no=34;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037896;tx_state=complete;commit_tx_id=140737488355339;commit_lock_id=140737488355339;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2024-11-21T17:48:05.225821Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;local_tx_no=34;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037896;tx_state=complete;commit_tx_id=140737488355339;commit_lock_id=140737488355339;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=19;portion=17;before_size=156288;after_size=158168;before_rows=2989;after_rows=2990; 2024-11-21T17:48:05.225823Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;local_tx_no=34;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037896;tx_state=complete;commit_tx_id=140737488355339;commit_lock_id=140737488355339;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1880;portion_bytes=1880;portion_raw_bytes=1081; 2024-11-21T17:48:05.225845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;local_tx_no=34;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037896;tx_state=complete;fline=storage.cpp:66;event=granule_locked;path_id=19; 2024-11-21T17:48:05.225916Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;parent=[1:5681:6277];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=756;external_task_id=c2d2a7ee-a83011ef-ad80a50a-7da71f96;type=CS::TTL;priority=0;; 2024-11-21T17:48:05.226174Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;parent=[1:5681:6277];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=17;task=cpu=0;mem=756;external_task_id=c2d2a7ee-a83011ef-ad80a50a-7da71f96;type=CS::TTL;priority=0;; 2024-11-21T17:48:05.226179Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;parent=[1:5681:6277];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=c2d2a7ee-a83011ef-ad80a50a-7da71f96;mem=756;cpu=0; 2024-11-21T17:48:05.226183Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037896;parent=[1:5681:6277];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=c2d2a7ee-a83011ef-ad80a50a-7da71f96;task_id=17;mem=756;cpu=0; 2024-11-21T17:48:05.226192Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:5710:6292];tablet_id=72075186224037896;parent=[1:5681:6277];fline=manager.h:99;event=ask_data;request=request_id=45;19={portions_count=3};; 2024-11-21T17:48:05.226218Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:5710:6292];tablet_id=72075186224037896;parent=[1:5681:6277];fline=columnshard_impl.cpp:1005;background=cleanup;changes_info=type=CS::CLEANUP::PORTIONS;details=(drop 3 portions(portion_id:13;path_id:19;records_count:1;min_schema_snapshot:(plan_step=14500;tx_id=281474976715885;);schema_version:1;level:0;column_size:1520;index_size:0;meta:((produced=SPLIT_COMPACTED;));remove_snapshot:(plan_step=1723573386000;tx_id=18446744073709551615;);)(portion_id:14;path_id:19;records_count:1;min_schema_snapshot:(plan_step=14500;tx_id=281474976715885;);schema_version:1;level:0;column_size:1880;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1723573386000;tx_id=18446744073709551615;);)(portion_id:15;path_id:19;records_count:1;min_schema_snapshot:(plan_step=14500;tx_id=281474976715885;);schema_version:1;level:0;column_size:1880;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1723573386000;tx_id=18446744073709551615;);));; Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037896 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037896 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037897 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037897 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037899 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037899 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 72075186224037898 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier2' stopped at tablet 72075186224037898 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2024-11-21T17:48:05.653236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790821113326880:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:05.653254Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000bb4/r3tmp/tmpm0Grjo/pdisk_1.dat 2024-11-21T17:48:05.709723Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16264 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:48:05.727108Z node 1 :TX_PROXY DEBUG: actor# [1:7439790821113327105:2100] Handle TEvNavigate describe path dc-1 2024-11-21T17:48:05.727157Z node 1 :TX_PROXY DEBUG: Actor# [1:7439790821113327368:2245] HANDLE EvNavigateScheme dc-1 2024-11-21T17:48:05.727222Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790821113327127:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:05.727237Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439790821113327127:2113], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:48:05.727286Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790821113327369:2246][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:48:05.727535Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821113326827:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790821113327373:2246] 2024-11-21T17:48:05.727542Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821113326830:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790821113327374:2246] 2024-11-21T17:48:05.727554Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790821113326827:2049] Subscribe: subscriber# [1:7439790821113327373:2246], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:05.727557Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790821113326830:2052] Subscribe: subscriber# [1:7439790821113327374:2246], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:05.727563Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821113326833:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790821113327375:2246] 2024-11-21T17:48:05.727565Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790821113326833:2055] Subscribe: subscriber# [1:7439790821113327375:2246], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:05.727574Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790821113327373:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790821113326827:2049] 2024-11-21T17:48:05.727581Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790821113327374:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790821113326830:2052] 2024-11-21T17:48:05.727583Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790821113327375:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790821113326833:2055] 2024-11-21T17:48:05.727583Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821113326827:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790821113327373:2246] 2024-11-21T17:48:05.727586Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790821113327369:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790821113327370:2246] 2024-11-21T17:48:05.727588Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821113326830:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790821113327374:2246] 2024-11-21T17:48:05.727590Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821113326833:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790821113327375:2246] 2024-11-21T17:48:05.727591Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790821113327369:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790821113327371:2246] 2024-11-21T17:48:05.727600Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439790821113327369:2246][/dc-1] Set up state: owner# [1:7439790821113327127:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:05.727636Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790821113327369:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790821113327372:2246] 2024-11-21T17:48:05.727645Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439790821113327369:2246][/dc-1] Path was already updated: owner# [1:7439790821113327127:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:05.727650Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790821113327373:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790821113327370:2246], cookie# 1 2024-11-21T17:48:05.727651Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790821113327374:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790821113327371:2246], cookie# 1 2024-11-21T17:48:05.727653Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790821113327375:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790821113327372:2246], cookie# 1 2024-11-21T17:48:05.727656Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821113326827:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790821113327373:2246], cookie# 1 2024-11-21T17:48:05.727660Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821113326830:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790821113327374:2246], cookie# 1 2024-11-21T17:48:05.727662Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821113326833:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790821113327375:2246], cookie# 1 2024-11-21T17:48:05.727668Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790821113327373:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790821113326827:2049], cookie# 1 2024-11-21T17:48:05.727670Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790821113327374:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790821113326830:2052], cookie# 1 2024-11-21T17:48:05.727671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790821113327375:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790821113326833:2055], cookie# 1 2024-11-21T17:48:05.727674Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790821113327369:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790821113327370:2246], cookie# 1 2024-11-21T17:48:05.727677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790821113327369:2246][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:05.727679Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790821113327369:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790821113327371:2246], cookie# 1 2024-11-21T17:48:05.727682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790821113327369:2246][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:05.727684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790821113327369:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790821113327372:2246], cookie# 1 2024-11-21T17:48:05.727685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790821113327369:2246][/dc-1] Unexpected sync response: sender# [1:7439790821113327372:2246], cookie# 1 2024-11-21T17:48:05.735128Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439790821113327127:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:48:05.735219Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439790821113327127:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... d: 2 PathOwnerId: 72057594046644480 } 2024-11-21T17:48:06.162530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2024-11-21T17:48:06.162538Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439790826981622143:2111], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211286183 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [2:7439790826981622451:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732211286183 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [2:7439790826981622451:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732211286183 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } TClient::Ls request: /dc-1/USER_0 2024-11-21T17:48:06.162902Z node 2 :TX_PROXY DEBUG: actor# [2:7439790826981622121:2098] Handle TEvNavigate describe path /dc-1/USER_0 2024-11-21T17:48:06.162923Z node 2 :TX_PROXY DEBUG: Actor# [2:7439790826981622520:2346] HANDLE EvNavigateScheme /dc-1/USER_0 2024-11-21T17:48:06.162946Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439790826981622143:2111], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:06.162974Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790826981622451:2293][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7439790826981622143:2111], cookie# 10 2024-11-21T17:48:06.162987Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439790826981622455:2293][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7439790826981622452:2293], cookie# 10 2024-11-21T17:48:06.162999Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439790826981622456:2293][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7439790826981622453:2293], cookie# 10 2024-11-21T17:48:06.163007Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439790826981622457:2293][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7439790826981622454:2293], cookie# 10 2024-11-21T17:48:06.163008Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439790826981621848:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7439790826981622455:2293], cookie# 10 2024-11-21T17:48:06.163011Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439790826981621851:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7439790826981622456:2293], cookie# 10 2024-11-21T17:48:06.163015Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439790826981621854:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7439790826981622457:2293], cookie# 10 2024-11-21T17:48:06.163018Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439790826981622455:2293][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7439790826981621848:2049], cookie# 10 2024-11-21T17:48:06.163020Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439790826981622456:2293][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7439790826981621851:2052], cookie# 10 2024-11-21T17:48:06.163023Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439790826981622457:2293][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7439790826981621854:2055], cookie# 10 2024-11-21T17:48:06.163031Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790826981622451:2293][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7439790826981622452:2293], cookie# 10 2024-11-21T17:48:06.163036Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790826981622451:2293][/dc-1/USER_0] Sync is in progress: cookie# 10, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:06.163040Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790826981622451:2293][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7439790826981622453:2293], cookie# 10 2024-11-21T17:48:06.163047Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790826981622451:2293][/dc-1/USER_0] Sync is done: cookie# 10, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:06.163054Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790826981622451:2293][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7439790826981622454:2293], cookie# 10 2024-11-21T17:48:06.163061Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790826981622451:2293][/dc-1/USER_0] Unexpected sync response: sender# [2:7439790826981622454:2293], cookie# 10 2024-11-21T17:48:06.163066Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439790826981622143:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2024-11-21T17:48:06.163077Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439790826981622143:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7439790826981622451:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732211286183 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:48:06.163108Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439790826981622143:2111], cacheItem# { Subscriber: { Subscriber: [2:7439790826981622451:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732211286183 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2024-11-21T17:48:06.163152Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439790826981622521:2347], recipient# [2:7439790826981622520:2346], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:48:06.163170Z node 2 :TX_PROXY DEBUG: Actor# [2:7439790826981622520:2346] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T17:48:06.163197Z node 2 :TX_PROXY DEBUG: Actor# [2:7439790826981622520:2346] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2024-11-21T17:48:06.163345Z node 2 :TX_PROXY DEBUG: Actor# [2:7439790826981622520:2346] Handle TEvDescribeSchemeResult Forward to# [2:7439790826981622519:2345] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211286183 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211286183 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1... (TRUNCATED) >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain |78.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit [GOOD] >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::LsLs >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut >> TSubDomainTest::StartAndStopTenanNode >> KqpOlapStats::AddRowsSomeTablesInTableStore [GOOD] >> LocalPartition::Basic [GOOD] >> BasicUsage::SessionNotDestroyedWhileUserEventHandlingInFlight [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit >> TSubDomainTest::CreateTableInsideSubDomain >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced [GOOD] >> BasicUsage::ReadSessionCorrectClose >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable >> LocalPartition::DescribeBadPartition >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> TSubDomainTest::LsLs [GOOD] >> KqpScripting::StreamOperationTimeout >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> TSubDomainTest::LsAltered >> KqpScripting::StreamOperationTimeout [GOOD] >> BasicUsage::RecreateObserver >> TSubDomainTest::StartTenanNodeAndStopAtDestructor >> TSubDomainTest::LsAltered [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] |78.3%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped |78.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |78.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> TxUsage::WriteToTopic_Demo_39 [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartNo [GOOD] >> TxUsage::WriteToTopic_Demo_29 [GOOD] >> LocalPartition::WithoutPartitionWithRestart [GOOD] >> LocalPartition::WithoutPartitionUnknownEndpoint >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-true [GOOD] >> TxUsage::WriteToTopic_Demo_40 >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit >> TxUsage::WriteToTopic_Demo_30 |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |78.3%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |78.3%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |78.3%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] Test command err: 2024-11-21T17:48:08.000881Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790833514920519:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:08.001074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b5c/r3tmp/tmpWWelzj/pdisk_1.dat 2024-11-21T17:48:08.047954Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13808 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:48:08.068450Z node 1 :TX_PROXY DEBUG: actor# [1:7439790833514920733:2134] Handle TEvNavigate describe path dc-1 2024-11-21T17:48:08.068471Z node 1 :TX_PROXY DEBUG: Actor# [1:7439790833514921113:2392] HANDLE EvNavigateScheme dc-1 2024-11-21T17:48:08.068512Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790833514920756:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:08.068521Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439790833514920756:2148], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:48:08.068626Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790833514921116:2395][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:48:08.068920Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829219953115:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790833514921120:2395] 2024-11-21T17:48:08.068934Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829219953118:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790833514921121:2395] 2024-11-21T17:48:08.068943Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790829219953115:2050] Subscribe: subscriber# [1:7439790833514921120:2395], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:08.068950Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790829219953118:2053] Subscribe: subscriber# [1:7439790833514921121:2395], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:08.068954Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829219953121:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790833514921122:2395] 2024-11-21T17:48:08.068958Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790829219953121:2056] Subscribe: subscriber# [1:7439790833514921122:2395], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:08.068988Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790833514921120:2395][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790829219953115:2050] 2024-11-21T17:48:08.068996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790833514921122:2395][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790829219953121:2056] 2024-11-21T17:48:08.068999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790833514921121:2395][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790829219953118:2053] 2024-11-21T17:48:08.069004Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790833514921116:2395][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790833514921117:2395] 2024-11-21T17:48:08.069007Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829219953115:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790833514921120:2395] 2024-11-21T17:48:08.069010Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790833514921116:2395][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790833514921119:2395] 2024-11-21T17:48:08.069011Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829219953121:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790833514921122:2395] 2024-11-21T17:48:08.069014Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829219953118:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790833514921121:2395] 2024-11-21T17:48:08.069020Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439790833514921116:2395][/dc-1] Set up state: owner# [1:7439790833514920756:2148], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:08.069063Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790833514921116:2395][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790833514921118:2395] 2024-11-21T17:48:08.069076Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439790833514921116:2395][/dc-1] Path was already updated: owner# [1:7439790833514920756:2148], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:08.069082Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790833514921120:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790833514921117:2395], cookie# 1 2024-11-21T17:48:08.069085Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790833514921121:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790833514921118:2395], cookie# 1 2024-11-21T17:48:08.069088Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790833514921122:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790833514921119:2395], cookie# 1 2024-11-21T17:48:08.069092Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829219953115:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790833514921120:2395], cookie# 1 2024-11-21T17:48:08.069096Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829219953118:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790833514921121:2395], cookie# 1 2024-11-21T17:48:08.069099Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829219953121:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790833514921122:2395], cookie# 1 2024-11-21T17:48:08.069104Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790833514921120:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790829219953115:2050], cookie# 1 2024-11-21T17:48:08.069106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790833514921121:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790829219953118:2053], cookie# 1 2024-11-21T17:48:08.069109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790833514921122:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790829219953121:2056], cookie# 1 2024-11-21T17:48:08.069113Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790833514921116:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790833514921117:2395], cookie# 1 2024-11-21T17:48:08.069117Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790833514921116:2395][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:08.069120Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790833514921116:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790833514921118:2395], cookie# 1 2024-11-21T17:48:08.069123Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790833514921116:2395][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:08.069127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790833514921116:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790833514921119:2395], cookie# 1 2024-11-21T17:48:08.069128Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790833514921116:2395][/dc-1] Unexpected sync response: sender# [1:7439790833514921119:2395], cookie# 1 2024-11-21T17:48:08.076447Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439790833514920756:2148], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:48:08.076532Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439790833514920756:2148], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... 4037911711:2521], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:08.996194Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790834037910834:2053] Subscribe: subscriber# [3:7439790834037911713:2521], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:08.996197Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790834037910837:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [3:7439790834037911712:2520] 2024-11-21T17:48:08.996198Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790834037910837:2056] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2024-11-21T17:48:08.996199Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439790834037911711:2521][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439790834037910831:2050] 2024-11-21T17:48:08.996202Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790834037910837:2056] Subscribe: subscriber# [3:7439790834037911712:2520], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:08.996203Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439790834037911713:2521][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439790834037910834:2053] 2024-11-21T17:48:08.996206Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790834037910837:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7439790834037911714:2521] 2024-11-21T17:48:08.996207Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790834037910837:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2024-11-21T17:48:08.996208Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790834037911701:2521][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439790834037911706:2521] 2024-11-21T17:48:08.996211Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790834037910837:2056] Subscribe: subscriber# [3:7439790834037911714:2521], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:08.996212Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790834037911701:2521][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439790834037911707:2521] 2024-11-21T17:48:08.996216Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7439790834037911701:2521][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7439790834037911173:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:08.996218Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439790834037911709:2520][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439790834037910831:2050] 2024-11-21T17:48:08.996221Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439790834037911710:2520][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439790834037910834:2053] 2024-11-21T17:48:08.996241Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439790834037911712:2520][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439790834037910837:2056] 2024-11-21T17:48:08.996245Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439790834037911714:2521][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439790834037910837:2056] 2024-11-21T17:48:08.996248Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790834037911701:2521][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439790834037911708:2521] 2024-11-21T17:48:08.996249Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790834037911700:2520][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439790834037911703:2520] 2024-11-21T17:48:08.996253Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7439790834037911701:2521][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7439790834037911173:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:08.996256Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790834037911700:2520][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439790834037911704:2520] 2024-11-21T17:48:08.996257Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790834037910831:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439790834037911711:2521] 2024-11-21T17:48:08.996260Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790834037910831:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439790834037911709:2520] 2024-11-21T17:48:08.996261Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7439790834037911700:2520][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [3:7439790834037911173:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:08.996264Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790834037910834:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439790834037911713:2521] 2024-11-21T17:48:08.996264Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790834037911700:2520][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439790834037911705:2520] 2024-11-21T17:48:08.996266Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790834037910834:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439790834037911710:2520] 2024-11-21T17:48:08.996268Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7439790834037911700:2520][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7439790834037911173:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:08.996271Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790834037910837:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439790834037911712:2520] 2024-11-21T17:48:08.996273Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7439790834037911173:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T17:48:08.996279Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790834037910837:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439790834037911714:2521] 2024-11-21T17:48:08.996281Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7439790834037911173:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7439790834037911701:2521] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:48:08.996295Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439790834037911173:2147], cacheItem# { Subscriber: { Subscriber: [3:7439790834037911701:2521] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:48:08.996307Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7439790834037911173:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2024-11-21T17:48:08.996317Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7439790834037911173:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7439790834037911700:2520] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:48:08.996327Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439790834037911173:2147], cacheItem# { Subscriber: { Subscriber: [3:7439790834037911700:2520] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:48:08.996349Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439790834037911715:2523], recipient# [3:7439790834037911699:2279], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] Test command err: 2024-11-21T17:48:05.787312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790821640563542:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:05.787405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:48:05.792326Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790822663042639:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b9c/r3tmp/tmpxU2P7e/pdisk_1.dat 2024-11-21T17:48:05.818040Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:48:05.819720Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:05.841691Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:05.886400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:05.886430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:05.913761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:05.913789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:05.914297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:05.914336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:06.036520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24327 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:48:06.038294Z node 1 :TX_PROXY DEBUG: actor# [1:7439790821640563618:2139] Handle TEvNavigate describe path dc-1 2024-11-21T17:48:06.038356Z node 1 :TX_PROXY DEBUG: Actor# [1:7439790825935531325:2429] HANDLE EvNavigateScheme dc-1 2024-11-21T17:48:06.038433Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790821640563722:2194], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:06.038448Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439790821640563722:2194], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:48:06.038470Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:48:06.038481Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:48:06.038589Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825935531326:2430][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:48:06.038964Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821640563287:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790825935531331:2430] 2024-11-21T17:48:06.038981Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790821640563287:2053] Subscribe: subscriber# [1:7439790825935531331:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:06.039016Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825935531331:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790821640563287:2053] 2024-11-21T17:48:06.039021Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825935531326:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790825935531328:2430] 2024-11-21T17:48:06.039042Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821640563287:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790825935531331:2430] 2024-11-21T17:48:06.039073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:06.039125Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821640563290:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790825935531332:2430] 2024-11-21T17:48:06.039129Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790821640563290:2056] Subscribe: subscriber# [1:7439790825935531332:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:06.039133Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821640563293:2059] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790825935531333:2430] 2024-11-21T17:48:06.039136Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790821640563293:2059] Subscribe: subscriber# [1:7439790825935531333:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:06.039176Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825935531332:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790821640563290:2056] 2024-11-21T17:48:06.039179Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825935531333:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790821640563293:2059] 2024-11-21T17:48:06.039183Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825935531326:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790825935531329:2430] 2024-11-21T17:48:06.039193Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439790825935531326:2430][/dc-1] Set up state: owner# [1:7439790821640563722:2194], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:06.039224Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825935531326:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790825935531330:2430] 2024-11-21T17:48:06.039229Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439790825935531326:2430][/dc-1] Path was already updated: owner# [1:7439790821640563722:2194], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:06.039235Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825935531331:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790825935531328:2430], cookie# 1 2024-11-21T17:48:06.039238Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825935531332:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790825935531329:2430], cookie# 1 2024-11-21T17:48:06.039241Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825935531333:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790825935531330:2430], cookie# 1 2024-11-21T17:48:06.040219Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821640563290:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790825935531332:2430] 2024-11-21T17:48:06.040256Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821640563290:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790825935531332:2430], cookie# 1 2024-11-21T17:48:06.040262Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821640563293:2059] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790825935531333:2430] 2024-11-21T17:48:06.040267Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821640563293:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790825935531333:2430], cookie# 1 2024-11-21T17:48:06.040272Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790821640563287:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790825935531331:2430], cookie# 1 2024-11-21T17:48:06.040351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:06.040561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825935531332:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790821640563290:2056], cookie# 1 2024-11-21T17:48:06.040571Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825935531333:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790821640563293:2059], cookie# 1 2024-11-21T17:48:06.040575Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825935531331:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790821640563287:2053], cookie# 1 2024-11-21T17:48:06.040583Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825935531326:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790825935531329:2430], cookie# 1 2024-11-21T17:48:06.040588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825935531326:2430][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:06.040593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825935531326:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790825935531330:2430], cookie# 1 2024-11-21T17:48:06.040603Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825935531326:2430][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:06.040608Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825935531326:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790825935531328:2430], cookie# 1 2024-11-21T17:48:06.040615Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825935531326:2430][/dc-1] Unexpected sync response: sender# [1:7439790825935531328:2430], cookie# 1 2024-11-21T17:48:06.047639Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439790821640563722:2194], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChange ... 72057594046644480 }: sender# [6:7439790830486396799:2819] 2024-11-21T17:48:07.318828Z node 6 :SCHEME_BOARD_REPLICA INFO: [6:7439790830486395574:2050] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2024-11-21T17:48:07.318831Z node 6 :SCHEME_BOARD_REPLICA INFO: [6:7439790830486395574:2050] Subscribe: subscriber# [6:7439790830486396799:2819], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:07.318834Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439790830486395577:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [6:7439790830486396794:2818] 2024-11-21T17:48:07.318835Z node 6 :SCHEME_BOARD_REPLICA INFO: [6:7439790830486395577:2053] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2024-11-21T17:48:07.318837Z node 6 :SCHEME_BOARD_REPLICA INFO: [6:7439790830486395577:2053] Subscribe: subscriber# [6:7439790830486396794:2818], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:07.318841Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439790830486395577:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [6:7439790830486396800:2819] 2024-11-21T17:48:07.318843Z node 6 :SCHEME_BOARD_REPLICA INFO: [6:7439790830486395577:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2024-11-21T17:48:07.318845Z node 6 :SCHEME_BOARD_REPLICA INFO: [6:7439790830486395577:2053] Subscribe: subscriber# [6:7439790830486396800:2819], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:07.318848Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439790830486395580:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [6:7439790830486396801:2819] 2024-11-21T17:48:07.318850Z node 6 :SCHEME_BOARD_REPLICA INFO: [6:7439790830486395580:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2024-11-21T17:48:07.318852Z node 6 :SCHEME_BOARD_REPLICA INFO: [6:7439790830486395580:2056] Subscribe: subscriber# [6:7439790830486396801:2819], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:07.318855Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][6:7439790830486396793:2818][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [6:7439790830486395574:2050] 2024-11-21T17:48:07.318858Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][6:7439790830486396794:2818][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [6:7439790830486395577:2053] 2024-11-21T17:48:07.318872Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7439790830486396787:2818][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [6:7439790830486396790:2818] 2024-11-21T17:48:07.318881Z node 6 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][6:7439790830486396787:2818][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [6:7439790830486395916:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:07.318886Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7439790830486396787:2818][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [6:7439790830486396791:2818] 2024-11-21T17:48:07.318889Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: [main][6:7439790830486396787:2818][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [6:7439790830486395916:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:07.318891Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][6:7439790830486396799:2819][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7439790830486395574:2050] 2024-11-21T17:48:07.318893Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][6:7439790830486396800:2819][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7439790830486395577:2053] 2024-11-21T17:48:07.318895Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][6:7439790830486396801:2819][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7439790830486395580:2056] 2024-11-21T17:48:07.318900Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7439790830486396788:2819][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7439790830486396796:2819] 2024-11-21T17:48:07.318902Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7439790830486396788:2819][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7439790830486396797:2819] 2024-11-21T17:48:07.318905Z node 6 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][6:7439790830486396788:2819][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [6:7439790830486395916:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:07.318906Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7439790830486396788:2819][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7439790830486396798:2819] 2024-11-21T17:48:07.318909Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: [main][6:7439790830486396788:2819][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [6:7439790830486395916:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:07.318911Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439790830486395574:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7439790830486396793:2818] 2024-11-21T17:48:07.318912Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439790830486395574:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7439790830486396799:2819] 2024-11-21T17:48:07.318914Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439790830486395577:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7439790830486396794:2818] 2024-11-21T17:48:07.318915Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439790830486395577:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7439790830486396800:2819] 2024-11-21T17:48:07.318919Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [6:7439790830486395916:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2024-11-21T17:48:07.318922Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [6:7439790830486395916:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [6:7439790830486396787:2818] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:48:07.318933Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7439790830486395916:2147], cacheItem# { Subscriber: { Subscriber: [6:7439790830486396787:2818] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:48:07.318935Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [6:7439790830486395916:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T17:48:07.318938Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [6:7439790830486395916:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [6:7439790830486396788:2819] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:48:07.318942Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7439790830486395916:2147], cacheItem# { Subscriber: { Subscriber: [6:7439790830486396788:2819] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:48:07.318954Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7439790830486396802:2821], recipient# [6:7439790830486396778:2282], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:07.318961Z node 6 :SCHEME_BOARD_REPLICA DEBUG: [6:7439790830486395580:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7439790830486396801:2819] >> TxUsage::WriteToTopic_Demo_9 [GOOD] >> TxUsage::WriteToTopic_Demo_42 >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable [GOOD] >> TTablesWithReboots::SimpleDropTableWithReboots [GOOD] >> TSubDomainTest::FailIfAffectedSetNotInterior >> TxUsage::WriteToTopic_Demo_11 [GOOD] >> TTablesWithReboots::AlterAndForceDrop [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 >> TxUsage::WriteToTopic_Demo_12 >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases |78.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |78.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] >> TSubDomainTest::Boot >> TxUsage::WriteToTopic_Demo_30 [GOOD] |78.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterAndForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:54.807066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:54.807091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:54.807097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:54.807102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:54.807112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:54.807117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:54.807126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:54.807208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:54.818700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:54.818723Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:54.821999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:54.822114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:54.822146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:54.828727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:54.828794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:54.828879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:54.829065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:54.829845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:54.830135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:54.830143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:54.830151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:54.830155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:54.830160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:54.830201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:54.831576Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:54.846471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:54.846543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:54.846591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:54.846626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:54.846631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:54.847356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:54.847378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:54.847419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:54.847427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:54.847430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:54.847434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:54.847764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:54.847771Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:54.847774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:54.848016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:54.848022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:54.848026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:54.848030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:54.848466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:54.849048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:54.849086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:54.849248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:54.849268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:54.849273Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:54.849311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:54.849315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:54.849335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:54.849343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:54.849647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:54.849653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:54.849683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:54.849687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:54.849746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:54.849750Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:54.849758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:54.849760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:54.849764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:54.849767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:54.849770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:54.849772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:54.849780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:54.849784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:54.849786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... X_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:48:15.139100Z node 83 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:15.139102Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T17:48:15.139105Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T17:48:15.139107Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 2 2024-11-21T17:48:15.139152Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:15.139157Z node 83 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1004:0 ProgressState 2024-11-21T17:48:15.139166Z node 83 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:48:15.139169Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:48:15.139174Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T17:48:15.139177Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:48:15.139181Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:48:15.139184Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:48:15.139198Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:48:15.139201Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2024-11-21T17:48:15.139203Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:48:15.139205Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:48:15.139207Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:48:15.139297Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:15.139303Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:15.139305Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:48:15.139310Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:48:15.139312Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:15.139407Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:15.139416Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:15.139420Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:48:15.139423Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:48:15.139426Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:48:15.139635Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:15.139642Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:15.139645Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:48:15.139647Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:48:15.139649Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:48:15.139655Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T17:48:15.139839Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:15.139930Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:48:15.139945Z node 83 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:48:15.139987Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:15.140027Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409546 2024-11-21T17:48:15.140466Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:15.140471Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:48:15.140481Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:48:15.140489Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:48:15.140494Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:15.140771Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:48:15.140833Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:48:15.141109Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:48:15.141116Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:48:15.141139Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1003 2024-11-21T17:48:15.141174Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:48:15.141178Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2024-11-21T17:48:15.141187Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:48:15.141189Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:48:15.141224Z node 83 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:48:15.141238Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:48:15.141241Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [83:477:2452] 2024-11-21T17:48:15.141246Z node 83 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:48:15.141255Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:48:15.141257Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [83:477:2452] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted 2024-11-21T17:48:15.141297Z node 83 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 Deleted tabletId 72075186233409546 2024-11-21T17:48:15.141332Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:15.141350Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 24us result status StatusSuccess 2024-11-21T17:48:15.141397Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2024-11-21T17:48:07.802874Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790829113473092:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:07.802954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b6c/r3tmp/tmpD7vlrZ/pdisk_1.dat 2024-11-21T17:48:07.859242Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64707 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:48:07.867699Z node 1 :TX_PROXY DEBUG: actor# [1:7439790829113473171:2100] Handle TEvNavigate describe path dc-1 2024-11-21T17:48:07.867731Z node 1 :TX_PROXY DEBUG: Actor# [1:7439790829113473437:2245] HANDLE EvNavigateScheme dc-1 2024-11-21T17:48:07.867790Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790829113473195:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:07.867813Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439790829113473195:2113], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:48:07.867865Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790829113473438:2246][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:48:07.868284Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829113472896:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790829113473443:2246] 2024-11-21T17:48:07.868284Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829113472893:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790829113473442:2246] 2024-11-21T17:48:07.868307Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790829113472896:2052] Subscribe: subscriber# [1:7439790829113473443:2246], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:07.868308Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790829113472893:2049] Subscribe: subscriber# [1:7439790829113473442:2246], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:07.868324Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829113472899:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790829113473444:2246] 2024-11-21T17:48:07.868330Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790829113473442:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790829113472893:2049] 2024-11-21T17:48:07.868333Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790829113473443:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790829113472896:2052] 2024-11-21T17:48:07.868335Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790829113472899:2055] Subscribe: subscriber# [1:7439790829113473444:2246], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:07.868337Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790829113473438:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790829113473439:2246] 2024-11-21T17:48:07.868342Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790829113473438:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790829113473440:2246] 2024-11-21T17:48:07.868344Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829113472893:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790829113473442:2246] 2024-11-21T17:48:07.868349Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439790829113473438:2246][/dc-1] Set up state: owner# [1:7439790829113473195:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:07.868349Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829113472896:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790829113473443:2246] 2024-11-21T17:48:07.868386Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790829113473444:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790829113472899:2055] 2024-11-21T17:48:07.868397Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790829113473442:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790829113473439:2246], cookie# 1 2024-11-21T17:48:07.868399Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790829113473443:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790829113473440:2246], cookie# 1 2024-11-21T17:48:07.868401Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790829113473444:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790829113473441:2246], cookie# 1 2024-11-21T17:48:07.868403Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790829113473438:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790829113473441:2246] 2024-11-21T17:48:07.868414Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439790829113473438:2246][/dc-1] Path was already updated: owner# [1:7439790829113473195:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:07.868418Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829113472899:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790829113473444:2246] 2024-11-21T17:48:07.868421Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829113472899:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790829113473444:2246], cookie# 1 2024-11-21T17:48:07.868424Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829113472893:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790829113473442:2246], cookie# 1 2024-11-21T17:48:07.868427Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790829113472896:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790829113473443:2246], cookie# 1 2024-11-21T17:48:07.868432Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790829113473444:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790829113472899:2055], cookie# 1 2024-11-21T17:48:07.868435Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790829113473442:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790829113472893:2049], cookie# 1 2024-11-21T17:48:07.868438Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790829113473443:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790829113472896:2052], cookie# 1 2024-11-21T17:48:07.868442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790829113473438:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790829113473441:2246], cookie# 1 2024-11-21T17:48:07.868453Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790829113473438:2246][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:07.868457Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790829113473438:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790829113473439:2246], cookie# 1 2024-11-21T17:48:07.868460Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790829113473438:2246][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:07.868470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790829113473438:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790829113473440:2246], cookie# 1 2024-11-21T17:48:07.868472Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790829113473438:2246][/dc-1] Unexpected sync response: sender# [1:7439790829113473440:2246], cookie# 1 2024-11-21T17:48:07.876404Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439790829113473195:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:48:07.876498Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439790829113473195:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... 22] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2024-11-21T17:48:08.736985Z node 2 :TX_PROXY DEBUG: Actor# [2:7439790834404596083:2322] Handle TEvDescribeSchemeResult Forward to# [2:7439790834404596082:2321] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211288276 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211288276 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) TClient::Ls request: /dc-1 2024-11-21T17:48:08.737410Z node 2 :TX_PROXY DEBUG: actor# [2:7439790834404595668:2098] Handle TEvNavigate describe path /dc-1 2024-11-21T17:48:08.737419Z node 2 :TX_PROXY DEBUG: Actor# [2:7439790834404596086:2325] HANDLE EvNavigateScheme /dc-1 2024-11-21T17:48:08.737432Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439790834404595690:2111], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:08.737444Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790834404595935:2246][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7439790834404595690:2111], cookie# 4 2024-11-21T17:48:08.737459Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439790834404595939:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439790834404595936:2246], cookie# 4 2024-11-21T17:48:08.737472Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439790834404595940:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439790834404595937:2246], cookie# 4 2024-11-21T17:48:08.737476Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439790834404595395:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439790834404595939:2246], cookie# 4 2024-11-21T17:48:08.737482Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439790834404595941:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439790834404595938:2246], cookie# 4 2024-11-21T17:48:08.737482Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439790834404595398:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439790834404595940:2246], cookie# 4 2024-11-21T17:48:08.737491Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439790834404595401:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439790834404595941:2246], cookie# 4 2024-11-21T17:48:08.737493Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439790834404595939:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439790834404595395:2049], cookie# 4 2024-11-21T17:48:08.737497Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439790834404595940:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439790834404595398:2052], cookie# 4 2024-11-21T17:48:08.737499Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439790834404595941:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439790834404595401:2055], cookie# 4 2024-11-21T17:48:08.737508Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790834404595935:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439790834404595936:2246], cookie# 4 2024-11-21T17:48:08.737512Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790834404595935:2246][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:08.737515Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790834404595935:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439790834404595937:2246], cookie# 4 2024-11-21T17:48:08.737522Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790834404595935:2246][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:08.737530Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790834404595935:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439790834404595938:2246], cookie# 4 2024-11-21T17:48:08.737537Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439790834404595935:2246][/dc-1] Unexpected sync response: sender# [2:7439790834404595938:2246], cookie# 4 2024-11-21T17:48:08.737540Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439790834404595690:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2024-11-21T17:48:08.737556Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439790834404595690:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7439790834404595935:2246] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732211288255 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:48:08.737571Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439790834404595690:2111], cacheItem# { Subscriber: { Subscriber: [2:7439790834404595935:2246] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732211288255 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2024-11-21T17:48:08.737610Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439790834404596087:2326], recipient# [2:7439790834404596086:2325], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:48:08.737626Z node 2 :TX_PROXY DEBUG: Actor# [2:7439790834404596086:2325] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T17:48:08.737649Z node 2 :TX_PROXY DEBUG: Actor# [2:7439790834404596086:2325] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2024-11-21T17:48:08.737736Z node 2 :TX_PROXY DEBUG: Actor# [2:7439790834404596086:2325] Handle TEvDescribeSchemeResult Forward to# [2:7439790834404596085:2324] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211288255 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211288255 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211288276 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594... (TRUNCATED) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::SchemeLimits-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:47.630073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:47.630093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:47.630097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:47.630101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:47.630114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:47.630117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:47.630127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:47.630192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:47.638389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:47.638407Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:47.640281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:47.640363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:47.640394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:47.642139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:47.642185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:47.642264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.642399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:47.643113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.643386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:47.643397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.643410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:47.643416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:47.643421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:47.643457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:47.644679Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:47.660640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:47.660710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.660755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:47.660793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:47.660800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.661281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.661299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:47.661332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.661340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:47.661345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:47.661349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:47.661639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.661647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:47.661651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:47.661910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.661918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.661923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.661929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.662451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:47.662794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:47.662836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:47.662999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.663020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:47.663026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.663070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:47.663076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.663101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:47.663111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:47.663438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:47.663446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:47.663478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.663483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:47.663548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.663554Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:47.663563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:47.663567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.663572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:47.663577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.663581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:47.663584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:47.663594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:47.663599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:47.663603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... , path id: 1 2024-11-21T17:48:12.553045Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [96:529:2466], at schemeshard: 72075186233409546, txId: 1005, path id: 2 2024-11-21T17:48:12.553054Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72075186233409546 2024-11-21T17:48:12.553060Z node 96 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#1005:0 ProgressState, at schemeshard: 72075186233409546 2024-11-21T17:48:12.553068Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1005 ready parts: 1/1 2024-11-21T17:48:12.553092Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409547 message:Transaction { AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1005 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409547 2024-11-21T17:48:12.553253Z node 96 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409546, cookie: 1005 2024-11-21T17:48:12.553264Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409546, cookie: 1005 2024-11-21T17:48:12.553271Z node 96 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 1005 2024-11-21T17:48:12.553276Z node 96 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 1005, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 5 2024-11-21T17:48:12.553281Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2024-11-21T17:48:12.553427Z node 96 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72075186233409546, cookie: 1005 2024-11-21T17:48:12.553438Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72075186233409546, cookie: 1005 2024-11-21T17:48:12.553442Z node 96 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 1005 2024-11-21T17:48:12.553446Z node 96 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 1005, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 2 2024-11-21T17:48:12.553449Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2024-11-21T17:48:12.553460Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T17:48:12.553886Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72075186233409546 to tablet: 72075186233409547 cookie: 0:1005 msg type: 269090816 2024-11-21T17:48:12.553920Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72075186233409547 2024-11-21T17:48:12.554032Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1005 2024-11-21T17:48:12.554321Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestModificationResults wait txId: 1006 2024-11-21T17:48:12.554892Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpMkDir MkDir { Name: "B" } } TxId: 1006 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2024-11-21T17:48:12.554928Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_0/B, operationId: 1006:0, at schemeshard: 72075186233409546 2024-11-21T17:48:12.554957Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72075186233409546, LocalPathId: 1], parent name: MyRoot/USER_0, child name: B, child id: [OwnerId: 72075186233409546, LocalPathId: 3], at schemeshard: 72075186233409546 2024-11-21T17:48:12.554969Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 0 2024-11-21T17:48:12.554979Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1006:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2024-11-21T17:48:12.555030Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2024-11-21T17:48:12.555039Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 1 2024-11-21T17:48:12.555389Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1006, response: Status: StatusAccepted TxId: 1006 SchemeshardId: 72075186233409546 PathId: 3, at schemeshard: 72075186233409546 2024-11-21T17:48:12.555413Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1006, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/USER_0/B 2024-11-21T17:48:12.555444Z node 96 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T17:48:12.555449Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 1006, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T17:48:12.555471Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 1006, path id: [OwnerId: 72075186233409546, LocalPathId: 3] 2024-11-21T17:48:12.555487Z node 96 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T17:48:12.555491Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [96:529:2466], at schemeshard: 72075186233409546, txId: 1006, path id: 1 2024-11-21T17:48:12.555495Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [96:529:2466], at schemeshard: 72075186233409546, txId: 1006, path id: 3 2024-11-21T17:48:12.555553Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72075186233409546 2024-11-21T17:48:12.555559Z node 96 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#1006:0 ProgressState, at schemeshard: 72075186233409546 2024-11-21T17:48:12.555566Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1006 ready parts: 1/1 2024-11-21T17:48:12.555588Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409547 message:Transaction { AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1006 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409547 2024-11-21T17:48:12.555677Z node 96 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 1006 2024-11-21T17:48:12.555687Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 1006 2024-11-21T17:48:12.555691Z node 96 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 1006 2024-11-21T17:48:12.555695Z node 96 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 1006, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 6 2024-11-21T17:48:12.555699Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2024-11-21T17:48:12.555748Z node 96 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72075186233409546, cookie: 1006 2024-11-21T17:48:12.555758Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72075186233409546, cookie: 1006 2024-11-21T17:48:12.555761Z node 96 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 1006 2024-11-21T17:48:12.555765Z node 96 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 1006, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 2 2024-11-21T17:48:12.555769Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 2 2024-11-21T17:48:12.555778Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T17:48:12.556165Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72075186233409546 to tablet: 72075186233409547 cookie: 0:1006 msg type: 269090816 2024-11-21T17:48:12.556188Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72075186233409547 2024-11-21T17:48:12.556546Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1006 2024-11-21T17:48:12.556569Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestModificationResults wait txId: 1007 2024-11-21T17:48:12.557005Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpMkDir MkDir { Name: "C" } } TxId: 1007 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2024-11-21T17:48:12.557038Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_0/C, operationId: 1007:0, at schemeshard: 72075186233409546 2024-11-21T17:48:12.557053Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1007:1, propose status:StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_0/C', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, at schemeshard: 72075186233409546 2024-11-21T17:48:12.557491Z node 96 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1007, response: Status: StatusResourceExhausted Reason: "Check failed: path: \'/MyRoot/USER_0/C\', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154" TxId: 1007 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T17:48:12.557516Z node 96 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1007, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_0/C', error: paths count limit exceeded, limit: 2, paths: 2, delta: 1, source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, operation: CREATE DIRECTORY, path: /MyRoot/USER_0/C TestModificationResult got TxId: 1007, wait until txId: 1007 >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit [GOOD] >> TxUsage::WriteToTopic_Demo_31 >> TConsistentOpsWithReboots::CreateIndexedTableWithReboots [GOOD] >> BasicUsage::RecreateObserver [GOOD] >> TSubDomainTest::GenericCases [GOOD] >> TxUsage::WriteToTopic_Demo_40 [GOOD] >> TSubDomainTest::Boot [GOOD] >> TSubDomainTest::CheckAccessCopyTable >> TxUsage::WriteToTopic_Demo_20_RestartNo >> LocalPartition::DescribeBadPartition [GOOD] >> TxUsage::WriteToTopic_Demo_41 >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> TSubDomainTest::ConsistentCopyTable >> LocalPartition::DescribeHang >> TSubDomainTest::ConsistentCopyTable [GOOD] |78.3%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::SimpleDropTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:55.674654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:55.674678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:55.674684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:55.674689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:55.674701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:55.674705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:55.674715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:55.674800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:55.683973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:55.683995Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:55.685906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:55.685998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:55.686024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:55.688370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:55.688444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:55.688530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:55.688687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:55.689309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:55.689586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:55.689595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:55.689607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:55.689614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:55.689620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:55.689665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:55.690852Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:55.705912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:55.705974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.706029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:55.706068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:55.706073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.706681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:55.706701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:55.706734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.706741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:55.706744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:55.706747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:55.707109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.707123Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:55.707128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:55.707424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.707431Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.707436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:55.707440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:55.707870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:55.708254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:55.708302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:55.708468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:55.708487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:55.708494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:55.708542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:55.708548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:55.708574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:55.708585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:55.708935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:55.708941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:55.708973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:55.708976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:55.709050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.709056Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:55.709063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:55.709066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:55.709070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:55.709073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:55.709077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:55.709081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:55.709091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:55.709096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:55.709101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ARD INFO: TDropTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T17:48:15.109298Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:15.109309Z node 79 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1003:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T17:48:15.109314Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:48:15.109321Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2024-11-21T17:48:15.109330Z node 79 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T17:48:15.109352Z node 79 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 137 -> 129 2024-11-21T17:48:15.109365Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:15.109374Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:48:15.109420Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:15.109495Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:15.109780Z node 79 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:15.109790Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:15.109812Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:48:15.109832Z node 79 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:15.109836Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [79:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T17:48:15.109840Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [79:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:48:15.109912Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:15.109918Z node 79 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:48:15.109931Z node 79 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:15.109935Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:48:15.109939Z node 79 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T17:48:15.110055Z node 79 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:15.110065Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:15.110068Z node 79 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:48:15.110072Z node 79 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T17:48:15.110077Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:48:15.110236Z node 79 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:15.110246Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:15.110250Z node 79 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:48:15.110253Z node 79 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:48:15.110257Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:48:15.110266Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:48:15.110824Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:15.110835Z node 79 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:15.110893Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:48:15.110915Z node 79 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:48:15.110918Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:15.110922Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:48:15.110925Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:15.110927Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:48:15.110929Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:48:15.110943Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:48:15.111069Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:15.111338Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:15.112090Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 339302418699 } TabletId: 72075186233409546 State: 4 2024-11-21T17:48:15.112101Z node 79 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:15.112388Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:15.112447Z node 79 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:48:15.112819Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:15.112857Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:48:15.112913Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:15.112917Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:48:15.112925Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:15.113391Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:48:15.113401Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:48:15.113437Z node 79 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:48:15.113468Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:48:15.113471Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:48:15.113503Z node 79 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:48:15.113514Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:48:15.113516Z node 79 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [79:474:2449] TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted 2024-11-21T17:48:15.113543Z node 79 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 Deleted tabletId 72075186233409546 2024-11-21T17:48:15.113577Z node 79 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:15.113597Z node 79 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 25us result status StatusPathDoesNotExist 2024-11-21T17:48:15.113623Z node 79 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2024-11-21T17:48:15.114046Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790863135582176:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:15.114233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b58/r3tmp/tmpmQWFRa/pdisk_1.dat 2024-11-21T17:48:15.166059Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61624 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:48:15.186247Z node 1 :TX_PROXY DEBUG: actor# [1:7439790863135582396:2137] Handle TEvNavigate describe path dc-1 2024-11-21T17:48:15.186272Z node 1 :TX_PROXY DEBUG: Actor# [1:7439790863135582772:2394] HANDLE EvNavigateScheme dc-1 2024-11-21T17:48:15.186318Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790863135582419:2150], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:15.186328Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439790863135582419:2150], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:48:15.186395Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790863135582773:2395][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:48:15.186683Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790863135582068:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790863135582777:2395] 2024-11-21T17:48:15.186694Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790863135582071:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790863135582778:2395] 2024-11-21T17:48:15.186702Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790863135582068:2051] Subscribe: subscriber# [1:7439790863135582777:2395], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:15.186707Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790863135582071:2054] Subscribe: subscriber# [1:7439790863135582778:2395], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:15.186711Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790863135582074:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790863135582779:2395] 2024-11-21T17:48:15.186713Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790863135582074:2057] Subscribe: subscriber# [1:7439790863135582779:2395], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:15.186733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790863135582777:2395][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790863135582068:2051] 2024-11-21T17:48:15.186742Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790863135582779:2395][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790863135582074:2057] 2024-11-21T17:48:15.186745Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790863135582778:2395][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790863135582071:2054] 2024-11-21T17:48:15.186751Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790863135582773:2395][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790863135582774:2395] 2024-11-21T17:48:15.186757Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790863135582773:2395][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790863135582776:2395] 2024-11-21T17:48:15.186767Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439790863135582773:2395][/dc-1] Set up state: owner# [1:7439790863135582419:2150], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:15.186804Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790863135582773:2395][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790863135582775:2395] 2024-11-21T17:48:15.186813Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439790863135582773:2395][/dc-1] Path was already updated: owner# [1:7439790863135582419:2150], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:15.186820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790863135582777:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790863135582774:2395], cookie# 1 2024-11-21T17:48:15.186823Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790863135582778:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790863135582775:2395], cookie# 1 2024-11-21T17:48:15.186831Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790863135582779:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790863135582776:2395], cookie# 1 2024-11-21T17:48:15.186837Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790863135582068:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790863135582777:2395] 2024-11-21T17:48:15.186841Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790863135582068:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790863135582777:2395], cookie# 1 2024-11-21T17:48:15.186850Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790863135582074:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790863135582779:2395] 2024-11-21T17:48:15.186852Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790863135582074:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790863135582779:2395], cookie# 1 2024-11-21T17:48:15.186853Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790863135582071:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790863135582778:2395] 2024-11-21T17:48:15.186855Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790863135582071:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790863135582778:2395], cookie# 1 2024-11-21T17:48:15.186949Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790863135582777:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790863135582068:2051], cookie# 1 2024-11-21T17:48:15.186957Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790863135582779:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790863135582074:2057], cookie# 1 2024-11-21T17:48:15.186958Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790863135582778:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790863135582071:2054], cookie# 1 2024-11-21T17:48:15.186962Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790863135582773:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790863135582774:2395], cookie# 1 2024-11-21T17:48:15.186967Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790863135582773:2395][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:15.186969Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790863135582773:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790863135582776:2395], cookie# 1 2024-11-21T17:48:15.186971Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790863135582773:2395][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:15.186974Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790863135582773:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790863135582775:2395], cookie# 1 2024-11-21T17:48:15.186976Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790863135582773:2395][/dc-1] Unexpected sync response: sender# [1:7439790863135582775:2395], cookie# 1 2024-11-21T17:48:15.193791Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439790863135582419:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:48:15.193868Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439790863135582419:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... : DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/dir/dir_0/table TableId: [72057594046644480:7:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } },{ Path: dc-1/USER_0/dir/dir_1/table TableId: [72057594046644480:8:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:48:18.712246Z node 4 :TX_PROXY DEBUG: actor# [4:7439790871640743545:2134] Handle TEvProposeTransaction 2024-11-21T17:48:18.712256Z node 4 :TX_PROXY DEBUG: actor# [4:7439790871640743545:2134] TxId# 281474976715668 ProcessProposeTransaction 2024-11-21T17:48:18.712263Z node 4 :TX_PROXY DEBUG: actor# [4:7439790871640743545:2134] Cookie# 0 userReqId# "" txid# 281474976715668 SEND to# [4:7439790875935712040:2999] DataReq marker# P0 2024-11-21T17:48:18.712276Z node 4 :TX_PROXY TRACE: StateWaitInit, received event# 269811712, Sender [4:7439790871640743545:2134], Recipient [4:7439790875935712040:2999]: NKikimr::TEvTxProxyReq::TEvMakeRequest 2024-11-21T17:48:18.712283Z node 4 :TX_PROXY TRACE: StateWaitInit, processing event TEvTxProxyReq::TEvMakeRequest 2024-11-21T17:48:18.712289Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] Cookie# 0 txid# 281474976715668 HANDLE TDataReq marker# P1 2024-11-21T17:48:18.712335Z node 4 :TX_PROXY DEBUG: Actor [4:7439790875935712040:2999] txid 281474976715668 disallow followers cause of operation 2 read target mode 0 2024-11-21T17:48:18.712342Z node 4 :TX_PROXY DEBUG: Actor [4:7439790875935712040:2999] txid 281474976715668 disallow followers cause of operation 2 read target mode 0 2024-11-21T17:48:18.712346Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] txid# 281474976715668 SEND to# [4:7439790871640743598:2147] TSchemeCache with 2 scheme entries. DataReq marker# P2 2024-11-21T17:48:18.712365Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [4:7439790871640743598:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 8] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo Point: (Uint64 : 42) },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 7] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo Point: (Uint64 : 42) }] } 2024-11-21T17:48:18.712379Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [4:7439790871640743598:2147], cacheItem# { Subscriber: { Subscriber: [4:7439790875935711996:2986] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211298750 PathId: [OwnerId: 72057594046644480, LocalPathId: 8] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 8] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:48:18.712393Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [4:7439790871640743598:2147], cacheItem# { Subscriber: { Subscriber: [4:7439790875935711879:2881] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211298700 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 7] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:48:18.712446Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439790875935712042:3001], recipient# [4:7439790875935712040:2999], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 8] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } Point: (Uint64 : 42) },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 7] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } Point: (Uint64 : 42) }] } 2024-11-21T17:48:18.712462Z node 4 :TX_PROXY TRACE: StateWaitResolve, received event# 269746178, Sender [4:7439790875935712042:3001], Recipient [4:7439790875935712040:2999]: NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult 2024-11-21T17:48:18.712468Z node 4 :TX_PROXY TRACE: StateWaitResolve, processing event TEvTxProxySchemeCache::TEvResolveKeySetResult 2024-11-21T17:48:18.712471Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] txid# 281474976715668 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2024-11-21T17:48:18.712571Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] txid# 281474976715668 SEND TEvProposeTransaction to datashard 72075186224037892 with 327 bytes program affected shards 2 followers disallowed marker# P4 2024-11-21T17:48:18.712600Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] txid# 281474976715668 SEND TEvProposeTransaction to datashard 72075186224037894 with 327 bytes program affected shards 2 followers disallowed marker# P4 2024-11-21T17:48:18.714155Z node 4 :TX_PROXY TRACE: StateWaitPrepare, received event# 269550080, Sender [5:7439790874754070114:2306], Recipient [4:7439790875935712040:2999] 2024-11-21T17:48:18.714165Z node 4 :TX_PROXY TRACE: StateWaitPrepare, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T17:48:18.714173Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] txid# 281474976715668 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037894 read size 0 out readset size 0 marker# P6 2024-11-21T17:48:18.714177Z node 4 :TX_PROXY TRACE: StateWaitPrepare, received event# 269550080, Sender [5:7439790874754069933:2292], Recipient [4:7439790875935712040:2999] 2024-11-21T17:48:18.714178Z node 4 :TX_PROXY TRACE: StateWaitPrepare, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T17:48:18.714181Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] txid# 281474976715668 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037892 read size 0 out readset size 0 marker# P6 2024-11-21T17:48:18.714187Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] txid# 281474976715668 SEND EvProposeTransaction to# 72075186224037888 Coordinator marker# P7 2024-11-21T17:48:18.714530Z node 4 :TX_PROXY TRACE: StateWaitPlan, received event# 269091328, Sender [5:7439790874754069727:2270], Recipient [4:7439790875935712040:2999] 2024-11-21T17:48:18.714539Z node 4 :TX_PROXY TRACE: StateWaitPlan, processing event TEvTxProxy::TEvProposeTransactionStatus 2024-11-21T17:48:18.714558Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] txid# 281474976715668 HANDLE TEvProposeTransactionStatus TDataReq marker# P11 Status# 16 2024-11-21T17:48:18.751597Z node 4 :TX_PROXY TRACE: StateWaitPlan, received event# 269091328, Sender [5:7439790874754069727:2270], Recipient [4:7439790875935712040:2999] 2024-11-21T17:48:18.751614Z node 4 :TX_PROXY TRACE: StateWaitPlan, processing event TEvTxProxy::TEvProposeTransactionStatus 2024-11-21T17:48:18.751625Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] txid# 281474976715668 HANDLE TEvProposeTransactionStatus TDataReq marker# P10 Status# 17 2024-11-21T17:48:18.753098Z node 4 :TX_PROXY TRACE: StateWaitPlan, received event# 269550080, Sender [5:7439790874754069933:2292], Recipient [4:7439790875935712040:2999] 2024-11-21T17:48:18.753106Z node 4 :TX_PROXY TRACE: StateWaitPlan, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T17:48:18.753129Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] txid# 281474976715668 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2024-11-21T17:48:18.753144Z node 4 :TX_PROXY TRACE: StateWaitPlan, received event# 269550080, Sender [5:7439790874754070114:2306], Recipient [4:7439790875935712040:2999] 2024-11-21T17:48:18.753150Z node 4 :TX_PROXY TRACE: StateWaitPlan, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T17:48:18.753153Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] txid# 281474976715668 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037894 marker# P12 2024-11-21T17:48:18.753226Z node 4 :TX_PROXY DEBUG: Actor# [4:7439790875935712040:2999] txid# 281474976715668 MergeResult ExecComplete TDataReq marker# P17 2024-11-21T17:48:18.753255Z node 4 :TX_PROXY INFO: Actor# [4:7439790875935712040:2999] txid# 281474976715668 RESPONSE Status# ExecComplete prepare time: 0.001896s execute time: 0.039069s total time: 0.040965s marker# P13 2024-11-21T17:48:18.758731Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439790871640743253:2050] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7439790874754069680:2099] 2024-11-21T17:48:18.758740Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439790871640743256:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7439790874754069681:2099] 2024-11-21T17:48:18.758744Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7439790871640743253:2050] Unsubscribe: subscriber# [5:7439790874754069680:2099], path# /dc-1/USER_0 2024-11-21T17:48:18.758750Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439790871640743259:2056] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7439790874754069682:2099] 2024-11-21T17:48:18.758750Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7439790871640743256:2053] Unsubscribe: subscriber# [5:7439790874754069681:2099], path# /dc-1/USER_0 2024-11-21T17:48:18.758752Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7439790871640743259:2056] Unsubscribe: subscriber# [5:7439790874754069682:2099], path# /dc-1/USER_0 2024-11-21T17:48:18.758789Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2024-11-21T17:48:18.758959Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 2910, MsgBus: 20305 2024-11-21T17:47:08.641974Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790576081377641:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:08.642307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001767/r3tmp/tmplV4MWm/pdisk_1.dat 2024-11-21T17:47:08.689528Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2910, node 1 2024-11-21T17:47:08.706025Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:47:08.706038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:47:08.706039Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:47:08.706068Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20305 TClient is connected to server localhost:20305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:47:08.742218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:08.742237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:08.743392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:08.749505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.760318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.773365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.791114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.801941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:08.958126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790576081379181:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:08.958162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:08.985949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:47:08.991489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.001153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.008224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.014897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.021731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:47:09.030519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790580376346981:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.030546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.030549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790580376346986:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:09.031154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:47:09.035355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790580376346988:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:47:09.245429Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229287, txId: 281474976710672] shutting down 2024-11-21T17:47:09.280891Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229322, txId: 281474976710675] shutting down 2024-11-21T17:47:09.314481Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229357, txId: 281474976710678] shutting down 2024-11-21T17:47:09.349670Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229392, txId: 281474976710681] shutting down 2024-11-21T17:47:09.385298Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229427, txId: 281474976710684] shutting down 2024-11-21T17:47:09.420806Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229462, txId: 281474976710687] shutting down 2024-11-21T17:47:09.460657Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229504, txId: 281474976710690] shutting down 2024-11-21T17:47:09.497404Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229539, txId: 281474976710693] shutting down 2024-11-21T17:47:09.537748Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229581, txId: 281474976710696] shutting down 2024-11-21T17:47:09.572340Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229616, txId: 281474976710699] shutting down 2024-11-21T17:47:09.610498Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229651, txId: 281474976710702] shutting down 2024-11-21T17:47:09.644940Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229686, txId: 281474976710705] shutting down 2024-11-21T17:47:09.683886Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229728, txId: 281474976710708] shutting down 2024-11-21T17:47:09.720515Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229763, txId: 281474976710711] shutting down 2024-11-21T17:47:09.756390Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229798, txId: 281474976710714] shutting down 2024-11-21T17:47:09.794573Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229840, txId: 281474976710717] shutting down 2024-11-21T17:47:09.828785Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229875, txId: 281474976710720] shutting down 2024-11-21T17:47:09.866659Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229910, txId: 281474976710723] shutting down 2024-11-21T17:47:09.906207Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229952, txId: 281474976710726] shutting down 2024-11-21T17:47:09.944092Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211229987, txId: 281474976710729] shutting down 2024-11-21T17:47:09.979250Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211230022, txId: 281474976710732] shutting down 2024-11-21T17:47:10.012352Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211230057, txId: 281474976710735] shutting down 2024-11-21T17:47:10.046529Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211230092, txId: 281474976710738] shutting down 2024-11-21T17:47:10.098532Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211230141, txId: 281474976710741] shutting down 2024-11-21T17:47:10.137994Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211230183, txId: 281474976710744] shutting down 2024-11-21T17:47:10.179974Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211230225, txId: 281474976710747] shutting down 2024-11-21T17:47:10.216854Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211230260, txId: 281474976710750] shutti ... T17:48:06.686161Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211286680, txId: 281474976713591] shutting down 2024-11-21T17:48:06.782089Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211286792, txId: 281474976713594] shutting down 2024-11-21T17:48:06.865267Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211286869, txId: 281474976713597] shutting down 2024-11-21T17:48:06.940784Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211286953, txId: 281474976713600] shutting down 2024-11-21T17:48:07.027126Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211287030, txId: 281474976713603] shutting down 2024-11-21T17:48:07.114101Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211287121, txId: 281474976713606] shutting down 2024-11-21T17:48:07.199449Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211287212, txId: 281474976713609] shutting down 2024-11-21T17:48:07.295938Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211287296, txId: 281474976713612] shutting down 2024-11-21T17:48:07.400417Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211287394, txId: 281474976713615] shutting down 2024-11-21T17:48:07.489074Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211287499, txId: 281474976713618] shutting down 2024-11-21T17:48:07.563856Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211287576, txId: 281474976713621] shutting down 2024-11-21T17:48:07.656412Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211287660, txId: 281474976713624] shutting down 2024-11-21T17:48:07.748060Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211287758, txId: 281474976713627] shutting down 2024-11-21T17:48:07.828451Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211287835, txId: 281474976713630] shutting down 2024-11-21T17:48:07.905672Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211287919, txId: 281474976713633] shutting down 2024-11-21T17:48:07.985786Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211288003, txId: 281474976713636] shutting down 2024-11-21T17:48:08.053692Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211288073, txId: 281474976713639] shutting down 2024-11-21T17:48:08.132193Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211288150, txId: 281474976713642] shutting down 2024-11-21T17:48:08.216272Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211288220, txId: 281474976713645] shutting down 2024-11-21T17:48:08.316414Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211288318, txId: 281474976713648] shutting down 2024-11-21T17:48:08.437988Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211288430, txId: 281474976713651] shutting down 2024-11-21T17:48:08.523920Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211288528, txId: 281474976713654] shutting down 2024-11-21T17:48:08.617678Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211288619, txId: 281474976713657] shutting down 2024-11-21T17:48:08.699861Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211288710, txId: 281474976713660] shutting down 2024-11-21T17:48:08.773921Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211288787, txId: 281474976713663] shutting down 2024-11-21T17:48:08.862280Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211288864, txId: 281474976713666] shutting down 2024-11-21T17:48:08.947579Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211288955, txId: 281474976713669] shutting down Trying to start YDB, gRPC: 17621, MsgBus: 21966 2024-11-21T17:48:09.290755Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790838822660316:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:09.290960Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001767/r3tmp/tmpe45Fh6/pdisk_1.dat 2024-11-21T17:48:09.297604Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17621, node 2 2024-11-21T17:48:09.307143Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:09.307156Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:09.307158Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:09.307191Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21966 TClient is connected to server localhost:21966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:09.390783Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:09.390840Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:09.391888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:09.393090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:09.402457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:09.410904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:09.426522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:09.437066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:09.564942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790838822661844:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:09.564979Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:09.570371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:09.576783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:09.586607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:09.601294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:09.616346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:09.628721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:09.644838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790838822662357:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:09.644868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790838822662362:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:09.644878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:09.645715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:09.648702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790838822662364:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::CreateIndexedTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:20.131092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:20.131109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:20.131112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:20.131116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:20.131119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:20.131121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:20.131127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:20.131180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:20.139079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:20.139093Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:20.140778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:20.140864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:20.140893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:20.143473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:20.143546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:20.143651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:20.143892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:20.144795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:20.145100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:20.145114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:20.145127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:20.145135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:20.145141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:20.145208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:20.146801Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:20.167006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:20.167074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.167121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:20.167178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:20.167184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.167750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:20.167774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:20.167831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.167839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:20.167843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:20.167848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:20.168273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.168284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:20.168288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:20.168712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.168727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.168732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:20.168738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:20.169303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:20.169713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:20.169748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:20.169902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:20.169925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:20.169932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:20.169987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:20.169994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:20.170031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:20.170042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:20.170431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:20.170438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:20.170463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:20.170468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:20.170525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.170531Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:20.170540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:20.170544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:20.170549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:20.170554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:20.170559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:20.170562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:20.170571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:20.170575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:20.170579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:17.902715Z node 222 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValues/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:48:17.902739Z node 222 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/Table1/UserDefinedIndexByValues/indexImplTable" took 25us result status StatusSuccess 2024-11-21T17:48:17.902804Z node 222 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValues/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:17.902876Z node 222 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:48:17.902904Z node 222 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1" took 30us result status StatusSuccess 2024-11-21T17:48:17.903002Z node 222 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1" PathDescription { Self { Name: "UserDefinedIndexByValue0CoveringValue1" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 9 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndexByValue0CoveringValue1" LocalPathId: 9 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "value1" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:17.903104Z node 222 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:48:17.903142Z node 222 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1/indexImplTable" took 42us result status StatusSuccess 2024-11-21T17:48:17.903222Z node 222 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB/Table1/UserDefinedIndexByValue0CoveringValue1/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 9 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2024-11-21T17:44:59.913280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:44:59.913314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:44:59.913319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:44:59.913324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:44:59.913338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:44:59.913342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:44:59.913352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:44:59.914048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:44:59.934405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:44:59.934431Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:44:59.938558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:44:59.938774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:44:59.938793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T17:44:59.940033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:44:59.940193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:44:59.940269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T17:44:59.940341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:44:59.940978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:44:59.941211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:44:59.941218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:44:59.941227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:44:59.941232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:44:59.941237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:44:59.941248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T17:44:59.993509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T17:44:59.993615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:59.993678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T17:44:59.993720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T17:44:59.993728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:59.995212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T17:44:59.995238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T17:44:59.995281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:59.995290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T17:44:59.995295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:44:59.995299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:44:59.996558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:59.996571Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T17:44:59.996575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:44:59.997116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:59.997125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:44:59.997130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:44:59.997155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:44:59.998214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:44:59.998600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:44:59.998803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:44:59.998977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:44:59.998983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T17:44:59.998987Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:00.205541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:00.205601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 4294969520 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T17:45:00.205613Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:00.205815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:00.205824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:00.205963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:00.205976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:00.212325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:00.212346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:00.212393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:00.212399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:241:2231], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T17:45:00.212478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.212487Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:00.212503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:00.212507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:00.212513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:00.212519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:00.212524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:00.212527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:00.212539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T17:45:00.212544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:45:00.212548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T17:45:00.213245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:00.213259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:00.213263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T17:45:00.213267Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T17:45:00.213272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:00.213301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T17:45:00.213306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:95:2130] 2024-11-21T1 ... update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:47:53.178820Z node 100 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046578944 2024-11-21T17:47:53.178877Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046578944 2024-11-21T17:47:53.178898Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046578944 2024-11-21T17:47:53.711846Z node 100 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2024-11-21T17:47:53.711900Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2024-11-21T17:47:53.711926Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2024-11-21T17:47:53.837822Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T17:47:53.838091Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T17:47:53.840466Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:47:53.840556Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:47:57.601128Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T17:47:57.601333Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T17:47:57.603315Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:47:57.603421Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:48:01.367203Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T17:48:01.367410Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T17:48:01.369208Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:48:01.369280Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:48:05.194908Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T17:48:05.195063Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T17:48:05.196501Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:48:05.196583Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:48:08.793405Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T17:48:08.793599Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T17:48:08.795357Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:48:08.795445Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:48:12.567773Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T17:48:12.567935Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T17:48:12.569433Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:48:12.569495Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:48:15.703650Z node 100 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046578944 2024-11-21T17:48:15.703693Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046578944 2024-11-21T17:48:15.703710Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046578944 2024-11-21T17:48:16.230939Z node 100 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2024-11-21T17:48:16.230984Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2024-11-21T17:48:16.231004Z node 100 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2024-11-21T17:48:16.355610Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" } } } 2024-11-21T17:48:16.355760Z node 100 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd-2" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd-2" NumGroups: 1000 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046578944 X2: 3 } } } } 2024-11-21T17:48:16.357240Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T17:48:16.357276Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] Test command err: 2024-11-21T17:48:06.260134Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790824258140447:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:06.260154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b89/r3tmp/tmpUcj3NW/pdisk_1.dat 2024-11-21T17:48:06.318500Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25351 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:48:06.343717Z node 1 :TX_PROXY DEBUG: actor# [1:7439790824258140668:2136] Handle TEvNavigate describe path dc-1 2024-11-21T17:48:06.343735Z node 1 :TX_PROXY DEBUG: Actor# [1:7439790824258141071:2409] HANDLE EvNavigateScheme dc-1 2024-11-21T17:48:06.343779Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790824258140710:2154], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:06.343787Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439790824258140710:2154], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:48:06.343835Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824258141072:2410][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:48:06.344190Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824258140341:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790824258141076:2410] 2024-11-21T17:48:06.344212Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790824258140341:2050] Subscribe: subscriber# [1:7439790824258141076:2410], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:06.344224Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824258140344:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790824258141077:2410] 2024-11-21T17:48:06.344240Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790824258140344:2053] Subscribe: subscriber# [1:7439790824258141077:2410], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:06.344245Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824258140347:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790824258141078:2410] 2024-11-21T17:48:06.344248Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790824258140347:2056] Subscribe: subscriber# [1:7439790824258141078:2410], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:06.344259Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824258141076:2410][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790824258140341:2050] 2024-11-21T17:48:06.344263Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824258141077:2410][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790824258140344:2053] 2024-11-21T17:48:06.344265Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824258141078:2410][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790824258140347:2056] 2024-11-21T17:48:06.344270Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824258141072:2410][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790824258141073:2410] 2024-11-21T17:48:06.344275Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824258141072:2410][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790824258141074:2410] 2024-11-21T17:48:06.344283Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439790824258141072:2410][/dc-1] Set up state: owner# [1:7439790824258140710:2154], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:06.344320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824258141072:2410][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790824258141075:2410] 2024-11-21T17:48:06.344326Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439790824258141072:2410][/dc-1] Path was already updated: owner# [1:7439790824258140710:2154], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:06.344332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824258141076:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790824258141073:2410], cookie# 1 2024-11-21T17:48:06.344335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824258141077:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790824258141074:2410], cookie# 1 2024-11-21T17:48:06.344337Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824258141078:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790824258141075:2410], cookie# 1 2024-11-21T17:48:06.344342Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824258140341:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790824258141076:2410] 2024-11-21T17:48:06.344345Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824258140341:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790824258141076:2410], cookie# 1 2024-11-21T17:48:06.344348Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824258140344:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790824258141077:2410] 2024-11-21T17:48:06.344350Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824258140344:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790824258141077:2410], cookie# 1 2024-11-21T17:48:06.344353Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824258140347:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790824258141078:2410] 2024-11-21T17:48:06.344355Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824258140347:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790824258141078:2410], cookie# 1 2024-11-21T17:48:06.348165Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824258141076:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790824258140341:2050], cookie# 1 2024-11-21T17:48:06.348185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824258141077:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790824258140344:2053], cookie# 1 2024-11-21T17:48:06.348188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824258141078:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790824258140347:2056], cookie# 1 2024-11-21T17:48:06.348195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824258141072:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790824258141073:2410], cookie# 1 2024-11-21T17:48:06.348201Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824258141072:2410][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:06.348204Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824258141072:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790824258141074:2410], cookie# 1 2024-11-21T17:48:06.348208Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824258141072:2410][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:06.348212Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824258141072:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790824258141075:2410], cookie# 1 2024-11-21T17:48:06.348214Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824258141072:2410][/dc-1] Unexpected sync response: sender# [1:7439790824258141075:2410], cookie# 1 2024-11-21T17:48:06.351355Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439790824258140710:2154], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:48:06.351441Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439790824258140710:2154], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439790828187043751:2056] 2024-11-21T17:48:08.051385Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439790832482012152:2723][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439790828187043748:2053] 2024-11-21T17:48:08.051387Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790828187043745:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439790832482012151:2723] 2024-11-21T17:48:08.051389Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790832482012147:2723][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439790832482012148:2723] 2024-11-21T17:48:08.051390Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790828187043751:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439790832482012154:2723] 2024-11-21T17:48:08.051396Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790832482012147:2723][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439790832482012150:2723] 2024-11-21T17:48:08.051400Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790832482012153:2724][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:48:08.051400Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7439790832482012147:2723][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [3:7439790828187044088:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:08.051403Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790832482012147:2723][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7439790832482012149:2723] 2024-11-21T17:48:08.051407Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7439790832482012147:2723][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7439790828187044088:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:08.051410Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790828187043748:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439790832482012152:2723] 2024-11-21T17:48:08.051415Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7439790828187044088:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2024-11-21T17:48:08.051421Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7439790828187044088:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7439790832482012147:2723] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:48:08.051430Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439790828187044088:2147], cacheItem# { Subscriber: { Subscriber: [3:7439790832482012147:2723] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:48:08.051433Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790828187043745:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7439790832482012158:2724] 2024-11-21T17:48:08.051436Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790828187043745:2050] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2024-11-21T17:48:08.051439Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790828187043748:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7439790832482012159:2724] 2024-11-21T17:48:08.051439Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790828187043745:2050] Subscribe: subscriber# [3:7439790832482012158:2724], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:08.051441Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790828187043748:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2024-11-21T17:48:08.051443Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790828187043748:2053] Subscribe: subscriber# [3:7439790832482012159:2724], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:08.051444Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790828187043751:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7439790832482012160:2724] 2024-11-21T17:48:08.051446Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790828187043751:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2024-11-21T17:48:08.051447Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439790832482012158:2724][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439790828187043745:2050] 2024-11-21T17:48:08.051449Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790828187043751:2056] Subscribe: subscriber# [3:7439790832482012160:2724], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:08.051450Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439790832482012159:2724][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439790828187043748:2053] 2024-11-21T17:48:08.051454Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790828187043745:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439790832482012158:2724] 2024-11-21T17:48:08.051454Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439790832482012160:2724][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439790828187043751:2056] 2024-11-21T17:48:08.051456Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790828187043748:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439790832482012159:2724] 2024-11-21T17:48:08.051459Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790828187043751:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439790832482012160:2724] 2024-11-21T17:48:08.051459Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790832482012153:2724][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439790832482012155:2724] 2024-11-21T17:48:08.051464Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790832482012153:2724][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439790832482012156:2724] 2024-11-21T17:48:08.051468Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7439790832482012153:2724][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7439790828187044088:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:08.051471Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439790832482012153:2724][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439790832482012157:2724] 2024-11-21T17:48:08.051473Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7439790828187044088:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T17:48:08.051474Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7439790832482012153:2724][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7439790828187044088:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:08.051477Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7439790828187044088:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7439790832482012153:2724] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:48:08.051483Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439790828187044088:2147], cacheItem# { Subscriber: { Subscriber: [3:7439790832482012153:2724] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:48:08.051492Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439790832482012161:2725], recipient# [3:7439790832482012145:2281], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2024-11-21T17:46:41.239358Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1732211201239350 2024-11-21T17:46:41.333292Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790460334521470:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:41.333822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:41.337033Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790460926201931:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001dfc/r3tmp/tmpvEwbeh/pdisk_1.dat 2024-11-21T17:46:41.368034Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:41.369124Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:41.371563Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:41.404011Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5410, node 1 2024-11-21T17:46:41.418970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001dfc/r3tmp/yandexNgcVWr.tmp 2024-11-21T17:46:41.418986Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001dfc/r3tmp/yandexNgcVWr.tmp 2024-11-21T17:46:41.419052Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001dfc/r3tmp/yandexNgcVWr.tmp 2024-11-21T17:46:41.419108Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:41.423670Z INFO: TTestServer started on Port 28920 GrpcPort 5410 TClient is connected to server localhost:28920 2024-11-21T17:46:41.433902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:41.434262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting PQClient connected to localhost:5410 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:46:41.435998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:41.470059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:41.473047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:41.473070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:41.475065Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:41.475400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... 2024-11-21T17:46:41.678927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790460334522381:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.678947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790460334522370:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.678961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.679541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:41.681445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790460334522413:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.681514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:41.684117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790460334522384:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T17:46:41.702026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.702345Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790460926202094:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:41.702423Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTNkMTAzOTUtNDI3YWU2MWUtYWJkMDc1MGItMTY3ODhiZQ==, ActorId: [2:7439790460926202067:2277], ActorState: ExecuteState, TraceId: 01jd7xanmv97ncs6d7h8ex4gb7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:41.702801Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:41.758525Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790460334522552:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:41.758606Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmYyZGNkNGUtZmUyM2MxY2YtZDhhMzRjNDctNDFiMmU5NWI=, ActorId: [1:7439790460334522367:2299], ActorState: ExecuteState, TraceId: 01jd7xanme8c94gb54t0q5dqt3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:41.758740Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:41.764000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:41.830475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:5410", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T17:46:41.871945Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd7xansvax0s7f0dmkf8n7c8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY0NzRhMGUtZWY1ZTU1MjktNmJhN2FkMzMtNzBmNTE3MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790460334522854:2935] 2024-11-21T17:46:46.334504Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790460334521470:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:46.334530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:46.336903Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790460926201931:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:46.337576Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:46:47.061237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:5410 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:47.149410Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQu ... ] server connected, pipe [3:7439790875374815109:2516], now have 1 active actors on pipe 2024-11-21T17:48:18.605620Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:48:18.605634Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:48:18.605644Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Created session shared/user_3_3_12311438828826164541_v1 on pipe: [3:7439790875374815109:2516] 2024-11-21T17:48:18.605677Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/user_3_3_12311438828826164541_v1:1 with generation 1 2024-11-21T17:48:18.605736Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user session is set to 0 (startOffset 0) session shared/user_3_3_12311438828826164541_v1 2024-11-21T17:48:18.605810Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:48:18.606974Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2024-11-21T17:48:18.606991Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:48:18.607138Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_12311438828826164541_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 0 WriteTimestampMS: 1732211295554 CreateTimestampMS: 1732211295554 SizeLag: 0 WriteTimestampEstimateMS: 1732211298565 } Cookie: 18446744073709551615 } 2024-11-21T17:48:18.607157Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_12311438828826164541_v1 INIT DONE TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2024-11-21T17:48:18.607187Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_12311438828826164541_v1 sending to client partition status >>> Got event: StartPartitionSession { Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc2 Database path: /Root Database id: account-dc2 CommittedOffset: 0 EndOffset: 0 } 2024-11-21T17:48:18.607506Z :INFO: [/Root] [/Root] [29e4f33e-52a8c198-1d5e8cc9-5dfe2c95] Closing read session. Close timeout: 0.000000s 2024-11-21T17:48:18.607518Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:48:18.607525Z :INFO: [/Root] [/Root] [29e4f33e-52a8c198-1d5e8cc9-5dfe2c95] Counters: { Errors: 0 CurrentSessionLifetimeMs: 7 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:18.607542Z :NOTICE: [/Root] [/Root] [29e4f33e-52a8c198-1d5e8cc9-5dfe2c95] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:48:18.607547Z :DEBUG: [/Root] [/Root] [29e4f33e-52a8c198-1d5e8cc9-5dfe2c95] [] Abort session to cluster 2024-11-21T17:48:18.607642Z :INFO: [/Root] [/Root] [b18670f0-b2875e0a-c404eaea-3144b1fd] Closing read session. Close timeout: 0.000000s 2024-11-21T17:48:18.607649Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2024-11-21T17:48:18.607654Z :INFO: [/Root] [/Root] [b18670f0-b2875e0a-c404eaea-3144b1fd] Counters: { Errors: 0 CurrentSessionLifetimeMs: 7 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:18.607660Z :NOTICE: [/Root] [/Root] [b18670f0-b2875e0a-c404eaea-3144b1fd] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:48:18.607663Z :DEBUG: [/Root] [/Root] [b18670f0-b2875e0a-c404eaea-3144b1fd] [] Abort session to cluster 2024-11-21T17:48:18.607680Z :INFO: [/Root] [/Root] [d17b0b8c-a02d8569-789a3566-b41f9276] Closing read session. Close timeout: 0.000000s 2024-11-21T17:48:18.607686Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:48:18.607689Z :INFO: [/Root] [/Root] [d17b0b8c-a02d8569-789a3566-b41f9276] Counters: { Errors: 0 CurrentSessionLifetimeMs: 7 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:18.607694Z :NOTICE: [/Root] [/Root] [d17b0b8c-a02d8569-789a3566-b41f9276] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:48:18.607697Z :DEBUG: [/Root] [/Root] [d17b0b8c-a02d8569-789a3566-b41f9276] [] Abort session to cluster 2024-11-21T17:48:18.607715Z :INFO: [/Root] [/Root] [d17b0b8c-a02d8569-789a3566-b41f9276] Closing read session. Close timeout: 0.000000s 2024-11-21T17:48:18.607717Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:48:18.607719Z :INFO: [/Root] [/Root] [d17b0b8c-a02d8569-789a3566-b41f9276] Counters: { Errors: 0 CurrentSessionLifetimeMs: 7 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:18.607724Z :NOTICE: [/Root] [/Root] [d17b0b8c-a02d8569-789a3566-b41f9276] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:48:18.607743Z :INFO: [/Root] [/Root] [b18670f0-b2875e0a-c404eaea-3144b1fd] Closing read session. Close timeout: 0.000000s 2024-11-21T17:48:18.607746Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2024-11-21T17:48:18.607749Z :INFO: [/Root] [/Root] [b18670f0-b2875e0a-c404eaea-3144b1fd] Counters: { Errors: 0 CurrentSessionLifetimeMs: 7 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:18.607753Z :NOTICE: [/Root] [/Root] [b18670f0-b2875e0a-c404eaea-3144b1fd] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:48:18.607762Z :INFO: [/Root] [/Root] [29e4f33e-52a8c198-1d5e8cc9-5dfe2c95] Closing read session. Close timeout: 0.000000s 2024-11-21T17:48:18.607765Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:48:18.607768Z :INFO: [/Root] [/Root] [29e4f33e-52a8c198-1d5e8cc9-5dfe2c95] Counters: { Errors: 0 CurrentSessionLifetimeMs: 7 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:18.607771Z :NOTICE: [/Root] [/Root] [29e4f33e-52a8c198-1d5e8cc9-5dfe2c95] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:48:18.607802Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_2329708101195171204_v1 grpc read done: success# 0, data# { } 2024-11-21T17:48:18.607810Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2329708101195171204_v1 grpc read failed 2024-11-21T17:48:18.607814Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2329708101195171204_v1 grpc closed 2024-11-21T17:48:18.607818Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_2329708101195171204_v1 is DEAD 2024-11-21T17:48:18.607913Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790875374815100:2506] disconnected; active server actors: 1 2024-11-21T17:48:18.607924Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790875374815100:2506] client user disconnected session shared/user_3_1_2329708101195171204_v1 2024-11-21T17:48:18.607929Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2024-11-21T17:48:18.607936Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=2, Families=1, UnradableFamilies=0 [], RequireBalancing=0 [] 2024-11-21T17:48:18.607939Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=2, desiredFamilyCount=0, allowPlusOne=1 2024-11-21T17:48:18.607942Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000005s 2024-11-21T17:48:18.608021Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_12311438828826164541_v1 grpc read done: success# 0, data# { } 2024-11-21T17:48:18.608031Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_12311438828826164541_v1 grpc read failed 2024-11-21T17:48:18.608034Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_12311438828826164541_v1 grpc closed 2024-11-21T17:48:18.608038Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_12311438828826164541_v1 is DEAD 2024-11-21T17:48:18.608201Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_126573817642559547_v1 grpc read done: success# 0, data# { } 2024-11-21T17:48:18.608209Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_126573817642559547_v1 grpc read failed 2024-11-21T17:48:18.608211Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_126573817642559547_v1 grpc closed 2024-11-21T17:48:18.608214Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_126573817642559547_v1 is DEAD 2024-11-21T17:48:18.608387Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790875374815102:2508] disconnected; active server actors: 1 2024-11-21T17:48:18.608395Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790875374815102:2508] client user disconnected session shared/user_3_3_12311438828826164541_v1 2024-11-21T17:48:18.608401Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2024-11-21T17:48:18.608405Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790875374815103:2507] disconnected; active server actors: 1 2024-11-21T17:48:18.608407Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439790875374815103:2507] client user disconnected session shared/user_3_2_126573817642559547_v1 2024-11-21T17:48:18.608488Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:18.608504Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_3_12311438828826164541_v1 2024-11-21T17:48:18.608521Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439790875374815109:2516] destroyed 2024-11-21T17:48:18.608539Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_3_12311438828826164541_v1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2024-11-21T17:46:55.253466Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1732211215253457 2024-11-21T17:46:55.352284Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790518347342974:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:55.352353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:55.377084Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790518865730854:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e2b/r3tmp/tmp3UzpAW/pdisk_1.dat 2024-11-21T17:46:55.392785Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:55.392501Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:55.393777Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:55.424067Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62316, node 1 2024-11-21T17:46:55.445315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000e2b/r3tmp/yandexppqAWA.tmp 2024-11-21T17:46:55.445334Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000e2b/r3tmp/yandexppqAWA.tmp 2024-11-21T17:46:55.445394Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000e2b/r3tmp/yandexppqAWA.tmp 2024-11-21T17:46:55.445437Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:55.451389Z INFO: TTestServer started on Port 8157 GrpcPort 62316 2024-11-21T17:46:55.451624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:55.451653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:55.457155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8157 PQClient connected to localhost:62316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:55.491051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:46:55.505071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:55.505096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:55.506903Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:55.507427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:46:55.712622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790518865730949:2277], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:55.712642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790518865730997:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:55.712653Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:55.713957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T17:46:55.718955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790518865731002:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T17:46:55.768165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2024-11-21T17:46:55.770128Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790518347343783:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:55.770648Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2NiZmVjOTUtOGIwNDY3Ni0zNDUxNzc4NC03ODM1OTc2MA==, ActorId: [1:7439790518347343733:2298], ActorState: ExecuteState, TraceId: 01jd7xb3bhejswpgg1a1tzbhp9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:55.771085Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:55.776833Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790518865731037:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:55.776929Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjUzOTIzMjctZTBlZDllYjMtOWIxZTcxZTMtOTJmMWI3N2Y=, ActorId: [2:7439790518865730947:2276], ActorState: ExecuteState, TraceId: 01jd7xb3ax9qjxv1pqajg26g42, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:55.777070Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:55.847012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:55.867436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:62316", true, true, 1000); 2024-11-21T17:46:55.957204Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jd7xb3j043x19t9t9pek08ka, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzMxYTU1NS01ZjlhMGI0MC1lMTZmMGI4Yi01MWQ5NTUwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790518347344155:2900] 2024-11-21T17:47:00.351635Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790518347342974:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:00.351663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:47:00.369638Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790518865730854:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:00.369683Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:00.965742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:62316 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:47:00.980626Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:62316 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" ... essionId: ydb://session/3?node_id=16&id=OGRiZjQ2YTEtMTMxMzJjYTctMjI0NGNhODAtMmJmMTY4MWI=, ActorId: [16:7439790832125175782:2472], ActorState: ExecuteState, TraceId: 01jd7xda2a1wgcnezafcf67zw5, Create QueryResponse for error on request, msg: 2024-11-21T17:48:08.331409Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd7xda376rr81vb8mf0phxnn" } } YdbStatus: UNAVAILABLE ConsumedRu: 18 } 2024-11-21T17:48:08.853432Z :INFO: [/Root] MessageGroupId [test-message-group-id] Running cds request ms 2024-11-21T17:48:08.854545Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e8e6ed5-27015a23-883d7982-906645e4_0] Got CDS response: write_sessions_clusters { clusters { endpoint: "localhost:9710" name: "dc1" available: true } primary_cluster_selection_reason: CLIENT_LOCATION } version: 1 2024-11-21T17:48:08.854556Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e8e6ed5-27015a23-883d7982-906645e4_0] Start write session. Will connect to endpoint: localhost:9710 2024-11-21T17:48:08.854861Z node 15 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:48:08.854897Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e8e6ed5-27015a23-883d7982-906645e4_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T17:48:08.854875Z node 15 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2024-11-21T17:48:08.855062Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T17:48:08.855085Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:36320 2024-11-21T17:48:08.855092Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:36320 proto=v1 topic=test-topic durationSec=0 2024-11-21T17:48:08.855095Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:48:08.855539Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2024-11-21T17:48:08.855585Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:48:08.855590Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:48:08.855591Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:48:08.855595Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439790831844085950:2559] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:48:08.855967Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439790831844085950:2559] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:48:09.017774Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720703. Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T17:48:09.017821Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439790831844085963:2561] TxId: 281474976720703. Ctx: { TraceId: 01jd7xdarrewpa9rbsfzhj3q52, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=NTQwYjk1NTctNjk3NmIzYWItMTBiODE1NTctZjQ1MDE2NzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T17:48:09.017915Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=NTQwYjk1NTctNjk3NmIzYWItMTBiODE1NTctZjQ1MDE2NzU=, ActorId: [15:7439790831844085951:2561], ActorState: ExecuteState, TraceId: 01jd7xdarrewpa9rbsfzhj3q52, Create QueryResponse for error on request, msg: 2024-11-21T17:48:09.018434Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7439790831844085950:2559] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NTQwYjk1NTctNjk3NmIzYWItMTBiODE1NTctZjQ1MDE2NzU=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd7xdarrewpa9rbsg1zv7t10" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2024-11-21T17:48:09.018477Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NTQwYjk1NTctNjk3NmIzYWItMTBiODE1NTctZjQ1MDE2NzU=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd7xdarrewpa9rbsg1zv7t10" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2024-11-21T17:48:09.018703Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2024-11-21T17:48:09.018948Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e8e6ed5-27015a23-883d7982-906645e4_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NTQwYjk1NTctNjk3NmIzYWItMTBiODE1NTctZjQ1MDE2NzU=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd7xdarrewpa9rbsg1zv7t10" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2024-11-21T17:48:09.018957Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e8e6ed5-27015a23-883d7982-906645e4_0] Write session will restart in 2.000000s 2024-11-21T17:48:09.018977Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e8e6ed5-27015a23-883d7982-906645e4_0] Write session: Do CDS request 2024-11-21T17:48:09.018983Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e8e6ed5-27015a23-883d7982-906645e4_0] Do schedule cds request after 2000 ms 2024-11-21T17:48:09.425332Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720705. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:48:09.425378Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439790836139053325:2563] TxId: 281474976720705. Ctx: { TraceId: 01jd7xdb4t1xytnc7j3jzxtkqp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MmZjMjBmNjYtZjdkMTA0ZDYtNGVmMGE2MzItODZhODMyZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:48:09.425511Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=MmZjMjBmNjYtZjdkMTA0ZDYtNGVmMGE2MzItODZhODMyZDY=, ActorId: [15:7439790836139053305:2563], ActorState: ExecuteState, TraceId: 01jd7xdb4t1xytnc7j3jzxtkqp, Create QueryResponse for error on request, msg: 2024-11-21T17:48:09.425800Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd7xdb5m9jsdszwtf56sd35e" } } YdbStatus: UNAVAILABLE ConsumedRu: 16 } 2024-11-21T17:48:09.526225Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715688. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:48:09.526337Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7439790836420143188:2481] TxId: 281474976715688. Ctx: { TraceId: 01jd7xdb7m7d3th25y4bdsppm0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=NjFiYThjMWYtMWU2Mzk1YTYtNWI2MjI5YzEtNmNjNjk0ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:48:09.526573Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=NjFiYThjMWYtMWU2Mzk1YTYtNWI2MjI5YzEtNmNjNjk0ZTI=, ActorId: [16:7439790836420143175:2481], ActorState: ExecuteState, TraceId: 01jd7xdb7m7d3th25y4bdsppm0, Create QueryResponse for error on request, msg: 2024-11-21T17:48:09.526969Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd7xdb8ndg97s7e6xs2nz183" } } YdbStatus: UNAVAILABLE ConsumedRu: 21 } 2024-11-21T17:48:09.854085Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e8e6ed5-27015a23-883d7982-906645e4_0] Write session: close. Timeout = 0 ms 2024-11-21T17:48:09.854103Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e8e6ed5-27015a23-883d7982-906645e4_0] Write session will now close 2024-11-21T17:48:09.854183Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e8e6ed5-27015a23-883d7982-906645e4_0] Write session: aborting 2024-11-21T17:48:09.854405Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e8e6ed5-27015a23-883d7982-906645e4_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T17:48:09.854411Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4e8e6ed5-27015a23-883d7982-906645e4_0] Write session: destroy 2024-11-21T17:48:09.993559Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720707. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:48:09.993614Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439790836139053417:2572] TxId: 281474976720707. Ctx: { TraceId: 01jd7xdbq8bj9tmtmvqvsrmkfe, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=YzY3NWM2NDEtYjhlMDhkNWMtY2I4YmFkNzktZTQzZmU2ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:48:09.993737Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=YzY3NWM2NDEtYjhlMDhkNWMtY2I4YmFkNzktZTQzZmU2ODY=, ActorId: [15:7439790836139053414:2572], ActorState: ExecuteState, TraceId: 01jd7xdbq8bj9tmtmvqvsrmkfe, Create QueryResponse for error on request, msg: 2024-11-21T17:48:09.994094Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd7xdbq8bj9tmtmvqwybq56e" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] Test command err: 2024-11-21T17:48:06.433801Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790824025279024:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:06.433856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b77/r3tmp/tmpyf1n2O/pdisk_1.dat 2024-11-21T17:48:06.496270Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:06.536722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:06.536763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:28370 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:48:06.545147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:06.548434Z node 1 :TX_PROXY DEBUG: actor# [1:7439790824025279243:2137] Handle TEvNavigate describe path dc-1 2024-11-21T17:48:06.548450Z node 1 :TX_PROXY DEBUG: Actor# [1:7439790824025279652:2417] HANDLE EvNavigateScheme dc-1 2024-11-21T17:48:06.548500Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790824025279289:2159], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:06.548511Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439790824025279289:2159], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:48:06.548562Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824025279653:2418][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:48:06.549090Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824025278914:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790824025279657:2418] 2024-11-21T17:48:06.549114Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790824025278914:2051] Subscribe: subscriber# [1:7439790824025279657:2418], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:06.549131Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824025278917:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790824025279658:2418] 2024-11-21T17:48:06.549134Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790824025278917:2054] Subscribe: subscriber# [1:7439790824025279658:2418], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:06.549171Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824025278920:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790824025279659:2418] 2024-11-21T17:48:06.549174Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790824025278920:2057] Subscribe: subscriber# [1:7439790824025279659:2418], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:06.549221Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824025279657:2418][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790824025278914:2051] 2024-11-21T17:48:06.549226Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824025279658:2418][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790824025278917:2054] 2024-11-21T17:48:06.549229Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824025279659:2418][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790824025278920:2057] 2024-11-21T17:48:06.549411Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824025279653:2418][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790824025279654:2418] 2024-11-21T17:48:06.549420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824025279653:2418][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790824025279655:2418] 2024-11-21T17:48:06.549432Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439790824025279653:2418][/dc-1] Set up state: owner# [1:7439790824025279289:2159], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:06.549526Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824025279653:2418][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790824025279656:2418] 2024-11-21T17:48:06.549533Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439790824025279653:2418][/dc-1] Path was already updated: owner# [1:7439790824025279289:2159], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:06.549541Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824025279657:2418][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790824025279654:2418], cookie# 1 2024-11-21T17:48:06.549544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824025279658:2418][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790824025279655:2418], cookie# 1 2024-11-21T17:48:06.549547Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824025279659:2418][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790824025279656:2418], cookie# 1 2024-11-21T17:48:06.549570Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824025278914:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790824025279657:2418] 2024-11-21T17:48:06.549574Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824025278914:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790824025279657:2418], cookie# 1 2024-11-21T17:48:06.549607Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824025278917:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790824025279658:2418] 2024-11-21T17:48:06.549611Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824025278917:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790824025279658:2418], cookie# 1 2024-11-21T17:48:06.549615Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824025278920:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790824025279659:2418] 2024-11-21T17:48:06.549638Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790824025278920:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790824025279659:2418], cookie# 1 2024-11-21T17:48:06.552923Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824025279657:2418][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790824025278914:2051], cookie# 1 2024-11-21T17:48:06.552940Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824025279658:2418][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790824025278917:2054], cookie# 1 2024-11-21T17:48:06.552942Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790824025279659:2418][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790824025278920:2057], cookie# 1 2024-11-21T17:48:06.552949Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824025279653:2418][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790824025279654:2418], cookie# 1 2024-11-21T17:48:06.552956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824025279653:2418][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:06.552964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824025279653:2418][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790824025279655:2418], cookie# 1 2024-11-21T17:48:06.552967Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824025279653:2418][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:06.552972Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824025279653:2418][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790824025279656:2418], cookie# 1 2024-11-21T17:48:06.552974Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790824025279653:2418][/dc-1] Unexpected sync response: sender# [1:7439790824025279656:2418], cookie# 1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2024-11-21T17:48:06.559829Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439790824025279289:2159], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { ... TICE: [main][11:7439790873864695245:2178][/dc-1/USER_0/.metadata/initialization/migrations] Set up state: owner# [11:7439790869569727791:2102], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:17.297472Z node 11 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][11:7439790873864695245:2178][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [11:7439790873864695248:2178] 2024-11-21T17:48:17.297480Z node 11 :SCHEME_BOARD_SUBSCRIBER INFO: [main][11:7439790873864695245:2178][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [11:7439790869569727791:2102], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:17.297480Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [11:7439790869569727791:2102], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 0 } 2024-11-21T17:48:17.297492Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [11:7439790869569727791:2102], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [11:7439790873864695245:2178] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:48:17.297506Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7439790869569727791:2102], cacheItem# { Subscriber: { Subscriber: [11:7439790873864695245:2178] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:48:17.297529Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7439790873864695252:2179], recipient# [11:7439790873864695244:2281], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:17.297621Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:17.301821Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [13:7439790866513381609:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:17.301860Z node 13 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [13:7439790866513381609:2102], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2024-11-21T17:48:17.301944Z node 13 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][13:7439790870808349061:2181][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:48:17.302023Z node 13 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][13:7439790870808349061:2181][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [13:7439790870808349062:2181] 2024-11-21T17:48:17.302052Z node 13 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][13:7439790870808349061:2181][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [13:7439790870808349063:2181] 2024-11-21T17:48:17.302064Z node 13 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][13:7439790870808349061:2181][/dc-1/USER_0/.metadata/initialization/migrations] Set up state: owner# [13:7439790866513381609:2102], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:17.302074Z node 13 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][13:7439790870808349061:2181][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [13:7439790870808349064:2181] 2024-11-21T17:48:17.302083Z node 13 :SCHEME_BOARD_SUBSCRIBER INFO: [main][13:7439790870808349061:2181][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [13:7439790866513381609:2102], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:17.302093Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [13:7439790866513381609:2102], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 0 } 2024-11-21T17:48:17.302112Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [13:7439790866513381609:2102], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [13:7439790870808349061:2181] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:48:17.302132Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7439790866513381609:2102], cacheItem# { Subscriber: { Subscriber: [13:7439790870808349061:2181] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:48:17.302154Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7439790870808349068:2182], recipient# [13:7439790870808349060:2280], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:17.302240Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:17.585981Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7439790869569727791:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:17.586032Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7439790869569727791:2102], cacheItem# { Subscriber: { Subscriber: [11:7439790869569727841:2114] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:48:17.586071Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7439790873864695254:2180], recipient# [11:7439790873864695253:2282], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:17.602375Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [13:7439790866513381609:2102], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:17.602419Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7439790866513381609:2102], cacheItem# { Subscriber: { Subscriber: [13:7439790866513381654:2114] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:48:17.602450Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7439790870808349070:2183], recipient# [13:7439790870808349069:2281], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2024-11-21T17:48:17.838606Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790872497501184:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:17.838750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b51/r3tmp/tmp1LE6sY/pdisk_1.dat 2024-11-21T17:48:17.885314Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30153 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:48:17.899371Z node 1 :TX_PROXY DEBUG: actor# [1:7439790872497501422:2100] Handle TEvNavigate describe path dc-1 2024-11-21T17:48:17.899397Z node 1 :TX_PROXY DEBUG: Actor# [1:7439790872497501685:2247] HANDLE EvNavigateScheme dc-1 2024-11-21T17:48:17.899442Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790872497501511:2140], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:17.899450Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439790872497501511:2140], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:48:17.899553Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790872497501686:2248][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:48:17.899886Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790872497501147:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790872497501691:2248] 2024-11-21T17:48:17.899896Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790872497501144:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790872497501690:2248] 2024-11-21T17:48:17.899910Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790872497501147:2052] Subscribe: subscriber# [1:7439790872497501691:2248], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:17.899913Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790872497501144:2049] Subscribe: subscriber# [1:7439790872497501690:2248], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:17.899925Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790872497501150:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790872497501692:2248] 2024-11-21T17:48:17.899928Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790872497501150:2055] Subscribe: subscriber# [1:7439790872497501692:2248], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:17.899932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790872497501691:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790872497501147:2052] 2024-11-21T17:48:17.899942Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790872497501690:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790872497501144:2049] 2024-11-21T17:48:17.899944Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790872497501147:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790872497501691:2248] 2024-11-21T17:48:17.899951Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790872497501144:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790872497501690:2248] 2024-11-21T17:48:17.899952Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790872497501692:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790872497501150:2055] 2024-11-21T17:48:17.899955Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790872497501150:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790872497501692:2248] 2024-11-21T17:48:17.899961Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790872497501686:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790872497501688:2248] 2024-11-21T17:48:17.899965Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790872497501686:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790872497501687:2248] 2024-11-21T17:48:17.899976Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439790872497501686:2248][/dc-1] Set up state: owner# [1:7439790872497501511:2140], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:17.900018Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790872497501686:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790872497501689:2248] 2024-11-21T17:48:17.900029Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439790872497501686:2248][/dc-1] Path was already updated: owner# [1:7439790872497501511:2140], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:17.900036Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790872497501690:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790872497501687:2248], cookie# 1 2024-11-21T17:48:17.900044Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790872497501691:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790872497501688:2248], cookie# 1 2024-11-21T17:48:17.900047Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790872497501692:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790872497501689:2248], cookie# 1 2024-11-21T17:48:17.900053Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790872497501144:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790872497501690:2248], cookie# 1 2024-11-21T17:48:17.900063Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790872497501147:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790872497501691:2248], cookie# 1 2024-11-21T17:48:17.900067Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790872497501150:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790872497501692:2248], cookie# 1 2024-11-21T17:48:17.900073Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790872497501690:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790872497501144:2049], cookie# 1 2024-11-21T17:48:17.900080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790872497501691:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790872497501147:2052], cookie# 1 2024-11-21T17:48:17.900082Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790872497501692:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790872497501150:2055], cookie# 1 2024-11-21T17:48:17.900086Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790872497501686:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790872497501687:2248], cookie# 1 2024-11-21T17:48:17.900090Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790872497501686:2248][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:17.900097Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790872497501686:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790872497501688:2248], cookie# 1 2024-11-21T17:48:17.900106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790872497501686:2248][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:17.900109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790872497501686:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790872497501689:2248], cookie# 1 2024-11-21T17:48:17.900110Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790872497501686:2248][/dc-1] Unexpected sync response: sender# [1:7439790872497501689:2248], cookie# 1 2024-11-21T17:48:17.905620Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439790872497501511:2140], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:48:17.905684Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439790872497501511:2140], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... n: 5 Partial: 0 }: sender# [5:7439790881775856588:2054], cookie# 6 2024-11-21T17:48:19.814394Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [5:7439790881775856928:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [5:7439790881775857610:2666] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 6 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732211299490 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 3] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:48:19.814396Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][5:7439790881775857616:2666][/dc-1/USER_1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [5:7439790881775856591:2057], cookie# 6 2024-11-21T17:48:19.814399Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7439790881775857610:2666][/dc-1/USER_1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [5:7439790881775857611:2666], cookie# 6 2024-11-21T17:48:19.814398Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7439790881775856928:2148], cacheItem# { Subscriber: { Subscriber: [5:7439790881775857610:2666] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 6 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732211299490 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 3] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 5 IsSync: true Partial: 0 } 2024-11-21T17:48:19.814400Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7439790881775857610:2666][/dc-1/USER_1] Sync is in progress: cookie# 6, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:19.814401Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [5:7439790881775856928:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/a/table PathId: Partial: 0 } 2024-11-21T17:48:19.814402Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7439790881775857610:2666][/dc-1/USER_1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [5:7439790881775857612:2666], cookie# 6 2024-11-21T17:48:19.814404Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [5:7439790881775856928:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/a/table PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [5:7439790881775858241:3198] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 3 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211299700 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2024-11-21T17:48:19.814405Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7439790881775857610:2666][/dc-1/USER_1] Sync is done: cookie# 6, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:19.814407Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7439790881775856928:2148], cacheItem# { Subscriber: { Subscriber: [5:7439790881775858241:3198] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 3 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211299700 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/a/table TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 3 IsSync: true Partial: 0 } 2024-11-21T17:48:19.814408Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7439790881775857610:2666][/dc-1/USER_1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [5:7439790881775857613:2666], cookie# 6 2024-11-21T17:48:19.814410Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [5:7439790881775856928:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_1 PathId: Partial: 0 } 2024-11-21T17:48:19.814410Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7439790881775857610:2666][/dc-1/USER_1] Unexpected sync response: sender# [5:7439790881775857613:2666], cookie# 6 2024-11-21T17:48:19.814412Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [5:7439790881775856928:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [5:7439790881775857610:2666] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 6 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732211299490 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 3] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:48:19.814415Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7439790881775856928:2148], cacheItem# { Subscriber: { Subscriber: [5:7439790881775857610:2666] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 6 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732211299490 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 3] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 6 IsSync: true Partial: 0 } 2024-11-21T17:48:19.814448Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7439790881775858604:3522], recipient# [5:7439790881775858603:3521], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/table TableId: [72057594046644480:6:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } },{ Path: dc-1/USER_1 TableId: [72057594046644480:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037892 Coordinators: 72075186224037893 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037894 Mediators: 72075186224037895 } ServerlessComputeResourcesMode: (empty maybe) } },{ Path: dc-1/USER_0/a/table TableId: [72057594046644480:7:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } },{ Path: dc-1/USER_1 TableId: [72057594046644480:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 3] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037892 Coordinators: 72075186224037893 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037894 Mediators: 72075186224037895 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:48:19.814461Z node 5 :TX_PROXY DEBUG: Actor# [5:7439790881775858603:3521] txid# 281474976715672 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:48:19.814469Z node 5 :TX_PROXY ERROR: Access denied for user1@builtin with access SelectRow to path dc-1/USER_0/table 2024-11-21T17:48:19.814474Z node 5 :TX_PROXY DEBUG: Actor# [5:7439790881775858603:3521] txid# 281474976715672 SEND to# [5:7439790881775858602:3520] Source {TEvProposeTransactionStatus Status# 5} 2024-11-21T17:48:19.821375Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439790881775856585:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [7:7439790882309431730:2100] 2024-11-21T17:48:19.821378Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439790881775856588:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_1 }: sender# [6:7439790879259601543:2099] 2024-11-21T17:48:19.821387Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7439790881775856588:2054] Unsubscribe: subscriber# [6:7439790879259601543:2099], path# /dc-1/USER_1 2024-11-21T17:48:19.821392Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439790881775856588:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [7:7439790882309431731:2100] 2024-11-21T17:48:19.821394Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7439790881775856585:2051] Unsubscribe: subscriber# [7:7439790882309431730:2100], path# /dc-1/USER_0 2024-11-21T17:48:19.821394Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7439790881775856588:2054] Unsubscribe: subscriber# [7:7439790882309431731:2100], path# /dc-1/USER_0 2024-11-21T17:48:19.821397Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439790881775856591:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_1 }: sender# [6:7439790879259601544:2099] 2024-11-21T17:48:19.821400Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439790881775856585:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_1 }: sender# [6:7439790879259601542:2099] 2024-11-21T17:48:19.821400Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7439790881775856591:2057] Unsubscribe: subscriber# [6:7439790879259601544:2099], path# /dc-1/USER_1 2024-11-21T17:48:19.821402Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7439790881775856585:2051] Unsubscribe: subscriber# [6:7439790879259601542:2099], path# /dc-1/USER_1 2024-11-21T17:48:19.821404Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7439790881775856591:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [7:7439790882309431732:2100] 2024-11-21T17:48:19.821405Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7439790881775856591:2057] Unsubscribe: subscriber# [7:7439790882309431732:2100], path# /dc-1/USER_0 2024-11-21T17:48:19.821471Z node 5 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 6 2024-11-21T17:48:19.821613Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:48:19.821644Z node 5 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2024-11-21T17:48:19.822060Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected >> LocalPartition::WithoutPartitionUnknownEndpoint [GOOD] >> LocalPartition::WithoutPartitionDeadNode >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped >> TTablesWithReboots::AlterTableSchemaFreezeUnfreezeWithReboots |78.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |78.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service >> TTablesWithReboots::AlterTableFollowersWithReboots >> TTablesWithReboots::CreateTableWithReboots >> TTablesWithReboots::AlterTableConfigWithReboots |78.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TCdcStreamTests::Basic >> TRestoreWithRebootsTests::ShouldFailOnOutboundKey[Zstd] [GOOD] >> TCdcStreamTests::VirtualTimestamps >> TxUsage::WriteToTopic_Demo_42 [GOOD] |78.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |78.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |78.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit [GOOD] |78.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit >> TxUsage::WriteToTopic_Demo_43 >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::Attributes >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps >> TxUsage::WriteToTopic_Demo_12 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreWithRebootsTests::ShouldFailOnOutboundKey[Zstd] [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:42.712876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:42.712899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:42.712904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:42.712909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:42.712915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:42.712918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:42.712926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:42.713008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:42.721926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:42.721946Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:42.724170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:42.724332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:42.724374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:42.727300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:42.727377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:42.727456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:42.727622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:42.728205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:42.728490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:42.728501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:42.728514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:42.728520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:42.728526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:42.728565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:42.732571Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:42.749355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:42.749431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.749484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:42.749522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:42.749528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.750297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:42.750325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:42.750383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.750393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:42.750397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:42.750402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:42.750813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.750824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:42.750829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:42.751171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.751182Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.751187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:42.751194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:42.751709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:42.752063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:42.752101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:42.752276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:42.752296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:42.752301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:42.752342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:42.752347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:42.752373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:42.752384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:42.752762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:42.752770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:42.752805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:42.752809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:42.752892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:42.752898Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:42.752909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:42.752913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:42.752919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:42.752924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:42.752929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:42.752932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:42.752944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:42.752949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:42.752953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... _COORDINATOR: Erasing txId 1003 2024-11-21T17:48:21.454628Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:21.454701Z node 143 :DATASHARD_RESTORE DEBUG: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 } } 2024-11-21T17:48:21.454707Z node 143 :DATASHARD_RESTORE NOTICE: [Import] [s3:1003] Process download info at 'DownloadInfo': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 } 2024-11-21T17:48:21.454718Z node 143 :DATASHARD_RESTORE DEBUG: [Import] [s3:1003] GetObject: key# /data_01.csv, range# 0-22 2024-11-21T17:48:21.454774Z node 143 :DATASHARD_RESTORE DEBUG: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 } } 2024-11-21T17:48:21.454777Z node 143 :DATASHARD_RESTORE NOTICE: [Import] [s3:1003] Process download info at 'DownloadInfo': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 } 2024-11-21T17:48:21.454784Z node 143 :DATASHARD_RESTORE DEBUG: [Import] [s3:1003] GetObject: key# /data_00.csv.zst, range# 0-22 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:48:21.454845Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:48:21.454852Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:48:21.454922Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:48:21.454926Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:48:21.454930Z node 143 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1003, at schemeshard: 72057594046678944 REQUEST: GET /data_01.csv HTTP/1.1 HEADERS: Host: localhost:25345 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 037FBCA0-EDEB-413E-8F98-F9AD6A4F06F5 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-22 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_01.csv / 23 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:25345 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A00787DB-2AB0-46D7-BADF-F22414266F03 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-22 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2024-11-21T17:48:21.455313Z node 143 :DATASHARD_RESTORE DEBUG: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 7443c2f403aa74cff1f199511bd22374 Body: 23b } 2024-11-21T17:48:21.455320Z node 143 :DATASHARD_RESTORE TRACE: [Import] [s3:1003] Content size: processed-bytes# 0, content-length# 23, body-size# 23 2024-11-21T17:48:21.455342Z node 143 :DATASHARD_RESTORE NOTICE: [Import] [s3:1003] Finish: success# 0, error# Value parse error: '(/ q"a1"' 12TBasicStringIcNSt4__y111char_traitsIcEEE is expected. on line: (/ q"a1","value1", writtenBytes# 0, writtenRows# 0 2024-11-21T17:48:21.455351Z node 143 :DATASHARD_RESTORE INFO: [Import] [s3:1003] Upload rows: count# 0, size# 8 2024-11-21T17:48:21.455395Z node 143 :DATASHARD_RESTORE DEBUG: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 7443c2f403aa74cff1f199511bd22374 Body: 23b } 2024-11-21T17:48:21.455398Z node 143 :DATASHARD_RESTORE TRACE: [Import] [s3:1003] Content size: processed-bytes# 0, content-length# 23, body-size# 23 2024-11-21T17:48:21.455419Z node 143 :DATASHARD_RESTORE INFO: [Import] [s3:1003] Upload rows: count# 1, size# 34 2024-11-21T17:48:21.457683Z node 143 :DATASHARD_RESTORE DEBUG: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 } } 2024-11-21T17:48:21.457696Z node 143 :DATASHARD_RESTORE NOTICE: [Import] [s3:1003] Process download info at 'UploadResponse': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 } 2024-11-21T17:48:21.457701Z node 143 :DATASHARD_RESTORE NOTICE: [Import] [s3:1003] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2024-11-21T17:48:21.460448Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 345 RawX2: 614180325656 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "Value parse error: \'(\265/\375 \016q\000\000\"a1\"\' 12TBasicStringIcNSt4__y111char_traitsIcEEE is expected. on line: (\265/\375 \016q\000\000\"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:48:21.460461Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2024-11-21T17:48:21.460480Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 345 RawX2: 614180325656 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "Value parse error: \'(\265/\375 \016q\000\000\"a1\"\' 12TBasicStringIcNSt4__y111char_traitsIcEEE is expected. on line: (\265/\375 \016q\000\000\"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:48:21.460495Z node 143 :FLAT_TX_SCHEMESHARD INFO: TRestore TProposedWaitParts, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 345 RawX2: 614180325656 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "Value parse error: \'(\265/\375 \016q\000\000\"a1\"\' 12TBasicStringIcNSt4__y111char_traitsIcEEE is expected. on line: (\265/\375 \016q\000\000\"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:48:21.460509Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.460538Z node 143 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.461142Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 343 RawX2: 614180325655 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2024-11-21T17:48:21.461154Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:21.461167Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 343 RawX2: 614180325655 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2024-11-21T17:48:21.461177Z node 143 :FLAT_TX_SCHEMESHARD INFO: TRestore TProposedWaitParts, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 343 RawX2: 614180325655 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2024-11-21T17:48:21.461183Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.461186Z node 143 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.461191Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:48:21.461195Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:48:21.461200Z node 143 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T17:48:21.461238Z node 143 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.461301Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.461554Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.461574Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.461581Z node 143 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:48:21.461592Z node 143 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:48:21.461595Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:21.461601Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:48:21.461613Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [143:454:2417] message: TxId: 1003 2024-11-21T17:48:21.461618Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:21.461624Z node 143 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:48:21.461628Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:48:21.461653Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:48:21.462231Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:48:21.462240Z node 143 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [143:501:2462] TestWaitNotification: OK eventTxId 1003 >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi >> TxUsage::WriteToTopic_Demo_13 |78.4%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::RetentionPeriod >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative |78.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |78.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |78.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest >> TConsistentOpsWithReboots::DropIndexedTableWithReboots [GOOD] |78.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |78.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo |78.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream |78.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest >> TxUsage::WriteToTopic_Demo_31 [GOOD] >> TOlap::AlterStore >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream |78.4%| [TA] $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TOlap::CreateStore >> TxUsage::WriteToTopic_Demo_32 >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute >> TOlap::CustomDefaultPresets >> KqpQueryService::Tcl >> KqpQueryService::AlterTempTable >> KqpQueryService::QueryOnClosedSession >> TOlap::CustomDefaultPresets [GOOD] >> TOlap::CreateStore [GOOD] >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-true [GOOD] >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::Negative ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::DropIndexedTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:20.873126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:20.873150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:20.873155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:20.873160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:20.873166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:20.873170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:20.873180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:20.873261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:20.884335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:20.884357Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:20.886413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:20.886498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:20.886525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:20.888744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:20.888808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:20.888870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:20.889018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:20.889533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:20.889779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:20.889788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:20.889799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:20.889805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:20.889811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:20.889844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:20.890812Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:20.901905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:20.901984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.902028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:20.902068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:20.902072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.902625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:20.902644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:20.902689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.902698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:20.902701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:20.902706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:20.903011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.903020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:20.903024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:20.903263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.903271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.903276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:20.903282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:20.903745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:20.904057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:20.904094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:20.904267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:20.904285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:20.904290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:20.904328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:20.904332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:20.904352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:20.904360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:20.904635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:20.904641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:20.904668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:20.904671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:20.904722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:20.904726Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:20.904732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:20.904735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:20.904738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:20.904741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:20.904746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:20.904749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:20.904758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:20.904763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:20.904767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 2.629256Z node 233 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:22.629775Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:22.630021Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:22.630141Z node 233 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:48:22.630183Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:22.630238Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:48:22.630282Z node 233 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:48:22.630826Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:48:22.630867Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 Forgetting tablet 72075186233409546 2024-11-21T17:48:22.631215Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:22.631221Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T17:48:22.631232Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:48:22.631237Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:48:22.631242Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 Forgetting tablet 72075186233409547 2024-11-21T17:48:22.631324Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:22.631346Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:22.631357Z node 233 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2024-11-21T17:48:22.631742Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:48:22.631791Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 1 2024-11-21T17:48:22.631848Z node 233 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2024-11-21T17:48:22.632286Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:48:22.632334Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 1 2024-11-21T17:48:22.633189Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:48:22.633206Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:48:22.633476Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:48:22.633485Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:48:22.633509Z node 233 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046678944 2024-11-21T17:48:22.633534Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:22.633540Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 10], at schemeshard: 72057594046678944 2024-11-21T17:48:22.633557Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 1 2024-11-21T17:48:22.633564Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 9], at schemeshard: 72057594046678944 2024-11-21T17:48:22.633570Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:48:22.633574Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2024-11-21T17:48:22.633580Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T17:48:22.633583Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T17:48:22.633588Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:48:22.633593Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:48:22.633598Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:48:22.633739Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:48:22.633746Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:48:22.633757Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:48:22.633762Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:48:22.634124Z node 233 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:48:22.634181Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:48:22.634188Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:48:22.634257Z node 233 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:48:22.634279Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:48:22.634283Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [233:787:2728] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:48:22.634351Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:22.634392Z node 233 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 53us result status StatusSuccess 2024-11-21T17:48:22.634475Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:22.634529Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:22.634548Z node 233 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB/Table1" took 20us result status StatusPathDoesNotExist 2024-11-21T17:48:22.634565Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirB/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirB\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirB/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirB" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T17:48:22.634608Z node 233 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:48:22.634620Z node 233 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:48:22.634626Z node 233 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 >> KqpQueryService::ExecuteQueryWithWorkloadManager ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CustomDefaultPresets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:23.969551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:23.969567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:23.969570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:23.969573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:23.969577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:23.969579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:23.969584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:23.969639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:23.976759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:23.976774Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:23.979360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:23.979987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:23.980012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:23.981465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:23.981649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:23.981744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:23.981802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:23.982903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:23.983203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:23.983214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:23.983255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:23.983262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:23.983269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:23.983280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.984686Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:24.000730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:24.000822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.000873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:24.000917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:24.000926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.001541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:24.001569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:24.001605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.001613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:24.001617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:24.001622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:24.002025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.002038Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:24.002042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:24.002420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.002431Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.002438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:24.002444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:24.003049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:24.003448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:24.003492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:24.003641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:24.003665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:24.003675Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:24.003727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:24.003734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:24.003761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:24.003774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:24.004172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:24.004181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:24.004220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:24.004242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:24.004319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.004325Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:24.004336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:24.004341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:24.004345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:24.004349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:24.004352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:24.004354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:24.004363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:24.004367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:24.004369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:24.004594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:24.004607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:24.004612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:24.004616Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:24.004620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:24.004634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... efCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:48:24.098337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:48:24.098863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:24.098872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:48:24.098924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:48:24.098950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:24.098955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:48:24.098960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T17:48:24.099027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.099034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId#102:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:48:24.099043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId#102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T17:48:24.099210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:48:24.099221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:48:24.099225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:48:24.099230Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:48:24.099235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:48:24.099367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:48:24.099376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:48:24.099379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:48:24.099382Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:48:24.099385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:48:24.099395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T17:48:24.099750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T17:48:24.099768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697639 2024-11-21T17:48:24.099785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2024-11-21T17:48:24.100007Z node 1 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2024-11-21T17:48:24.100032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.100044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2024-11-21T17:48:24.100177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:48:24.100438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:48:24.100725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.111543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2024-11-21T17:48:24.111566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:24.111584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2024-11-21T17:48:24.111592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2024-11-21T17:48:24.111661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2024-11-21T17:48:24.111663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:24.111669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:48:24.112170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.112245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.112267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.112273Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:48:24.112299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:48:24.112302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:48:24.112306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:48:24.112316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2343] message: TxId: 102 2024-11-21T17:48:24.112321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:48:24.112325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:48:24.112328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:48:24.112351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:48:24.112746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:48:24.112759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:418:2397] TestWaitNotification: OK eventTxId 102 2024-11-21T17:48:24.112904Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:24.112977Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 83us result status StatusSuccess 2024-11-21T17:48:24.113130Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |78.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |78.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateStore [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:23.924991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:23.925015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:23.925019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:23.925023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:23.925027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:23.925030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:23.925036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:23.925098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:23.933981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:23.934001Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:23.937056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:23.937886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:23.937916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:23.939617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:23.939836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:23.939958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:23.940065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:23.941077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:23.941323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:23.941333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:23.941390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:23.941401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:23.941410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:23.941425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.942668Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:23.956786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:23.956859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.956904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:23.956937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:23.956943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.957549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:23.957568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:23.957600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.957607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:23.957610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:23.957613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:23.957902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.957908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:23.957910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:23.958165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.958171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.958175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:23.958181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:23.958570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:23.958925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:23.958968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:23.959108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:23.959127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:23.959134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:23.959169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:23.959173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:23.959194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:23.959203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:23.959524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:23.959529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:23.959559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:23.959563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:23.959623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.959628Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:23.959636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:23.959639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:23.959642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:23.959645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:23.959649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:23.959651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:23.959658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:23.959662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:23.959665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:23.959881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:23.959891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:23.959894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:23.959898Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:23.959901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:23.959911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... n: 72057594037968897 2024-11-21T17:48:24.037608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 102:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:48:24.037614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 102:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T17:48:24.037630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 2 -> 3 2024-11-21T17:48:24.038036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:48:24.038061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:48:24.038540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.038590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.038596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#102:0 ProgressState at tabletId# 72057594046678944 2024-11-21T17:48:24.038620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TConfigureParts operationId#102:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409547 2024-11-21T17:48:24.039909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382272 2024-11-21T17:48:24.039943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72075186233409547 2024-11-21T17:48:24.043545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:48:24.043579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:48:24.043616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:48:24.043633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:48:24.043651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:48:24.043672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:48:24.043687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:48:24.043702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:48:24.043721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:48:24.043735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:48:24.043751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:48:24.043767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:48:24.045049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:48:24.045099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:48:24.045111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:48:24.045117Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:48:24.045130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:48:24.045137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:48:24.045141Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:48:24.045159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:48:24.045166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:48:24.045170Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:48:24.045182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:48:24.045189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:48:24.045193Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:48:24.045204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:48:24.045213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:48:24.045218Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:48:24.045226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:48:24.045233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:48:24.045237Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:48:24.045287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:48:24.045295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:48:24.045299Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:48:24.045318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:48:24.045325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:48:24.045329Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:48:24.045353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:48:24.045360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:48:24.045364Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:48:24.045383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:48:24.045390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:48:24.045394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:48:24.045407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:48:24.045414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; TestModificationResult got TxId: 102, wait until txId: 102 |78.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |78.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |78.5%| [TA] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::RebootSchemeShard >> TOlap::AlterTtl [GOOD] |78.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |78.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move >> KqpQueryService::Tcl [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectOlap >> KqpQueryService::AlterTempTable [GOOD] >> KqpQueryService::CTASWithoutPerStatement |78.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:23.846841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:23.846871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:23.846877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:23.846882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:23.846889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:23.846893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:23.846902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:23.846987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:23.858559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:23.858583Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:23.861944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:23.862852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:23.862893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:23.864417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:23.864652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:23.864755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:23.864829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:23.865807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:23.866110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:23.866122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:23.866186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:23.866194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:23.866201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:23.866217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.867513Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:23.882101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:23.882214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.882268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:23.882307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:23.882313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.883252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:23.883284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:23.883356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.883367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:23.883371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:23.883377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:23.883889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.883901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:23.883905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:23.884303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.884314Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.884321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:23.884328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:23.884909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:23.885260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:23.885302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:23.885457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:23.885483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:23.885490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:23.885543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:23.885549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:23.885577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:23.885587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:23.886136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:23.886147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:23.886191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:23.886198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:23.886273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:23.886280Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:23.886292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:23.886296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:23.886302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:23.886307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:23.886313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:23.886317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:23.886329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:23.886334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:23.886338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:23.886666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:23.886681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:23.886685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:23.886691Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:23.886695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:23.886708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... rd: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2024-11-21T17:48:24.545398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:24.545429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2024-11-21T17:48:24.545437Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T17:48:24.545449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.545454Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2024-11-21T17:48:24.546082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.546149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.546157Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId#105:0 HandleReply ProgressState at tablet: 72057594046678944 2024-11-21T17:48:24.546172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2024-11-21T17:48:24.546210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:24.546725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2024-11-21T17:48:24.546762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 105 at step: 5000006 2024-11-21T17:48:24.546912Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:24.546934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:24.546943Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId#105:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000006 2024-11-21T17:48:24.547084Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2024-11-21T17:48:24.547117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:48:24.547130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:48:24.547298Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186233409546;tx_state=execute;fline=manager.cpp:215;path_id=3;tiering_name=Tiering1;event=not_found; 2024-11-21T17:48:24.547315Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186233409546;tx_state=execute;fline=manager.cpp:215;path_id=3;tiering_name=Tiering1;event=not_found; FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2024-11-21T17:48:24.548203Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:24.548219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:48:24.548297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:48:24.548337Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:24.548344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 2 2024-11-21T17:48:24.548354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T17:48:24.548481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.548488Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TProposedWaitParts operationId#105:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:48:24.548499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterColumnTable TProposedWaitParts operationId#105:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T17:48:24.548672Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:48:24.548690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:48:24.548698Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:48:24.548706Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2024-11-21T17:48:24.548715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:48:24.548799Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:48:24.548807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:48:24.548815Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:48:24.548819Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 14 2024-11-21T17:48:24.548822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:48:24.548831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T17:48:24.549301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T17:48:24.549706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:48:24.549733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:48:24.571337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 Step: 5000006 2024-11-21T17:48:24.571363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:24.571387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 Step: 5000006 2024-11-21T17:48:24.571395Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 Step: 5000006 2024-11-21T17:48:24.571516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 105 2024-11-21T17:48:24.571520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:24.571528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 105 FAKE_COORDINATOR: Erasing txId 105 2024-11-21T17:48:24.572377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.572527Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.572560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:48:24.572570Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T17:48:24.572595Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T17:48:24.572600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:48:24.572607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2024-11-21T17:48:24.572631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:334:2314] message: TxId: 105 2024-11-21T17:48:24.572640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:48:24.572646Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T17:48:24.572651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T17:48:24.572691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:48:24.573335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:48:24.573351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:512:2490] TestWaitNotification: OK eventTxId 105 >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:47.481790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:47.481824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:47.481865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:47.481875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:47.481898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:47.481904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:47.481921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:47.482042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:47.490782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:47.490803Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:47.493279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:47.493388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:47.493433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:47.497858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:47.497950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:47.498057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.498302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:47.498936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.499223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:47.499231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.499241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:47.499247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:47.499251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:47.499290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:47.500476Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:47.513293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:47.513410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.513489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:47.513533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:47.513543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.514361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.514388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:47.514439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.514448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:47.514451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:47.514456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:47.514828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.514840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:47.514844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:47.515272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.515290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.515297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.515306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.515995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:47.516782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:47.516853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:47.517089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.517122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:47.517130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.517197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:47.517206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.517243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:47.517258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:47.517804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:47.517818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:47.517883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.517890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:47.517987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.517997Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:47.518011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:47.518016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.518022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:47.518027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.518032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:47.518037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:47.518051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:47.518057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:47.518062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 1T17:48:24.326766Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:1, at tablet 72057594046678944 2024-11-21T17:48:24.326789Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 10 2024-11-21T17:48:24.326813Z node 146 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[146:445:2404], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:48:24.327166Z node 146 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:24.327174Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:48:24.327221Z node 146 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:24.327226Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [146:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:48:24.327303Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T17:48:24.327311Z node 146 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 1003:1, ProgressState, NeedSyncHive: 0 2024-11-21T17:48:24.327315Z node 146 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 240 -> 240 2024-11-21T17:48:24.327410Z node 146 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:24.327421Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:24.327425Z node 146 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:48:24.327429Z node 146 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:48:24.327435Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 11 2024-11-21T17:48:24.327448Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/2, is published: true 2024-11-21T17:48:24.327899Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T17:48:24.327910Z node 146 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T17:48:24.327923Z node 146 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 2/2 2024-11-21T17:48:24.327927Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/2 2024-11-21T17:48:24.327933Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/2, is published: true 2024-11-21T17:48:24.327938Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/2 2024-11-21T17:48:24.327946Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:48:24.327951Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:48:24.327972Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 10 2024-11-21T17:48:24.327976Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T17:48:24.327978Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T17:48:24.327988Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T17:48:24.328088Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:48:24.328358Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:48:24.328366Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:48:24.328421Z node 146 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:48:24.328435Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:48:24.328438Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [146:690:2592] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:48:24.328501Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:24.328538Z node 146 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 47us result status StatusSuccess 2024-11-21T17:48:24.328599Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 Coordinators: 72075186234409548 Coordinators: 72075186234409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409550 Mediators: 72075186234409551 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:24.328652Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2024-11-21T17:48:24.328664Z node 146 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 11us result status StatusSuccess 2024-11-21T17:48:24.328686Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 Coordinators: 72075186234409548 Coordinators: 72075186234409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409550 Mediators: 72075186234409551 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 2024-11-21T17:48:24.328714Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:24.328726Z node 146 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 13us result status StatusSuccess 2024-11-21T17:48:24.328759Z node 146 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_41 [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable >> KqpQueryService::ExecuteQueryWithWorkloadManager [GOOD] >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail >> KqpQueryService::CTASWithoutPerStatement [GOOD] >> KqpQueryService::CheckIsolationLevelFroPerStatementMode >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream >> KqpQueryService::FlowControllOnHugeRealTable-LongRow >> KqpService::SessionBusy >> TxUsage::WriteToTopic_Demo_20_RestartNo [GOOD] >> LocalPartition::DescribeHang [GOOD] >> LocalPartition::DiscoveryHang >> KqpQueryService::TableSink_OlapInsert >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap >> KqpQueryService::QueryOnClosedSession [GOOD] >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_41 [GOOD] Test command err: 2024-11-21T17:47:18.239423Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790619365541212:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:18.239502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013ef/r3tmp/tmpWJmDEq/pdisk_1.dat 2024-11-21T17:47:18.273330Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:18.303415Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18752, node 1 2024-11-21T17:47:18.316868Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/0013ef/r3tmp/yandexCS4dpN.tmp 2024-11-21T17:47:18.316883Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/0013ef/r3tmp/yandexCS4dpN.tmp 2024-11-21T17:47:18.316942Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/0013ef/r3tmp/yandexCS4dpN.tmp 2024-11-21T17:47:18.316977Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:18.320990Z INFO: TTestServer started on Port 3127 GrpcPort 18752 TClient is connected to server localhost:3127 2024-11-21T17:47:18.345023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:18.345059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:18.346111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:18752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:18.390876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:18.393940Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.404442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.471659Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.569096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790619365541822:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.569115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790619365541830:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.569121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.569909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:18.570037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790619365541864:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.570055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.571647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790619365541836:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:47:18.599052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.605557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.616798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.641062Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790619365542109:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:18.641189Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmFlM2VhOTktNmZlZGZhYzctNjc0N2E4YmUtZTBkNzc5YjY=, ActorId: [1:7439790619365541819:2304], ActorState: ExecuteState, TraceId: 01jd7xbsn8f410e9vfp1xnp9pc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:18.641641Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439790619365542179:2596] 2024-11-21T17:47:23.238508Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790619365541212:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:23.238539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:23.783752Z :WriteToTopic_Demo_4 INFO: TTopicSdkTestSetup started 2024-11-21T17:47:23.787525Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:47:23.795863Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439790640840378977:2773] connected; active server actors: 1 2024-11-21T17:47:23.795904Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:47:23.796048Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T17:47:23.796116Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T17:47:23.796259Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T17:47:23.796291Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T17:47:23.796369Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:47:23.796406Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T17:47:23.796409Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:47:23.796411Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T17:47:23.796414Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:47:23.796419Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:47:23.796424Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T17:47:23.796425Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T17:47:23.796450Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T17:47:23.796528Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T17:47:23.796533Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.796537Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790640840378976:2772], now have 1 active actors on pipe 2024-11-21T17:47:23.796539Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.796543Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790640840379012:2428], now have 1 active actors on pipe 2024-11-21T17:47:23.798929Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439790619365541487 RawX2: 4294969494 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: ... , 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 82, partNo: 0, Offset: 81 is stored on disk 2024-11-21T17:48:25.222252Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.222258Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 82, partNo: 1, Offset: 81 is stored on disk 2024-11-21T17:48:25.222261Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.222265Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 83, partNo: 0, Offset: 82 is stored on disk 2024-11-21T17:48:25.222268Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.222273Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 83, partNo: 1, Offset: 82 is stored on disk 2024-11-21T17:48:25.222275Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.222279Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 84, partNo: 0, Offset: 83 is stored on disk 2024-11-21T17:48:25.222282Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.222286Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 84, partNo: 1, Offset: 83 is stored on disk 2024-11-21T17:48:25.222289Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.222294Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 85, partNo: 0, Offset: 84 is stored on disk 2024-11-21T17:48:25.222296Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.222304Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 85, partNo: 1, Offset: 84 is stored on disk 2024-11-21T17:48:25.222306Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.222311Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 86, partNo: 0, Offset: 85 is stored on disk 2024-11-21T17:48:25.222314Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.222319Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 86, partNo: 1, Offset: 85 is stored on disk 2024-11-21T17:48:25.223150Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715675}, 100000} part blob processing sourceId '\0test-message_group_id' seqNo 87 partNo 0 2024-11-21T17:48:25.223159Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715675}, 100000} part blob processing sourceId '\0test-message_group_id' seqNo 87 partNo 1 2024-11-21T17:48:25.223513Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715675}, 100000} part blob complete sourceId '\0test-message_group_id' seqNo 87 partNo 1 FormedBlobsCount 0 NewHead: Offset 86 PartNo 0 PackedSize 1000253 count 1 nextOffset 87 batches 2 2024-11-21T17:48:25.223654Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Add new write blob: topic 'topic_A' partition {0, {9, 281474976715675}, 100000} compactOffset 86,1 HeadOffset 81 endOffset 86 curOffset 87 D0000100000_00000000000000000086_00000_0000000001_00001| size 1000243 WTime 1732211305223 2024-11-21T17:48:25.223824Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 163 requestId: cookie: 82 2024-11-21T17:48:25.223835Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 166 requestId: cookie: 83 2024-11-21T17:48:25.223841Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 167 requestId: cookie: 84 2024-11-21T17:48:25.223848Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 169 requestId: cookie: 85 2024-11-21T17:48:25.223854Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 171 requestId: cookie: 86 2024-11-21T17:48:25.223862Z node 9 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id|6106df36-2517286e-5a1cde82-4958586a_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:48:25.223870Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|6106df36-2517286e-5a1cde82-4958586a_0 grpc closed 2024-11-21T17:48:25.223873Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|6106df36-2517286e-5a1cde82-4958586a_0 is DEAD 2024-11-21T17:48:25.224439Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:48:25.224764Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:48:25.224773Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:48:25.224777Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:48:25.224781Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:48:25.224785Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:48:25.224816Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:25.224823Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:25.225092Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:25.225103Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439790906444936519:2475] destroyed 2024-11-21T17:48:25.225106Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:25.225108Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439790906444936522:2475] destroyed 2024-11-21T17:48:25.225237Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:48:25.225249Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:48:25.225692Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvLongTxService::TEvLockStatus LockId: 281474976715675 LockNode: 9 Status: STATUS_NOT_FOUND 2024-11-21T17:48:25.225698Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete partitions for WriteId {9, 281474976715675} 2024-11-21T17:48:25.225704Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvPQ::TEvDeletePartition to partition {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.225716Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1000117 2024-11-21T17:48:25.225725Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.225734Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 87, partNo: 0, Offset: 86 is stored on disk 2024-11-21T17:48:25.225739Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.225743Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715675}, 100000}, SeqNo: 87, partNo: 1, Offset: 86 is stored on disk 2024-11-21T17:48:25.225908Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715675}, 100000}, State: StateIdle] Handle TEvPQ::TEvDeletePartition 2024-11-21T17:48:25.225927Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 173 requestId: cookie: 87 2024-11-21T17:48:25.226760Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvDeletePartitionDone {0, {9, 281474976715675}, 100000} 2024-11-21T17:48:25.226778Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvUnsubscribeLock for WriteId {9, 281474976715675} 2024-11-21T17:48:25.226795Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T17:48:25.227160Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) >> KqpQueryServiceScripts::ParseScript >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit |78.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |78.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt >> KqpQueryService::DdlColumnTable |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled >> KqpService::SessionBusy [GOOD] >> KqpService::SessionBusyRetryOperation >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] Test command err: Trying to start YDB, gRPC: 24230, MsgBus: 26805 2024-11-21T17:48:24.045057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790904611356039:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:24.045083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012cb/r3tmp/tmpPfDl34/pdisk_1.dat 2024-11-21T17:48:24.109452Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24230, node 1 2024-11-21T17:48:24.124604Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:24.124618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:24.124620Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:24.124665Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26805 2024-11-21T17:48:24.145226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:24.145263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:24.146768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:24.190369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.193102Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:24.383661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790904611356637:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.383718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.383958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790904611356649:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.384862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:48:24.387012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790904611356651:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:48:24.497973Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2024-11-21T17:48:24.498730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:1, at schemeshard: 72057594046644480 2024-11-21T17:48:24.564249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.584481Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T17:48:24.588451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.627902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.648705Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2024-11-21T17:48:24.649172Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790904611357204:2374], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:21: Error: At function: KiReadTable!
:3:21: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:48:24.649232Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWQwOGIwOWEtNzM3ZjNkMWEtOWE0YzA5MGYtZTU4NzdlZGI=, ActorId: [1:7439790904611357200:2373], ActorState: ExecuteState, TraceId: 01jd7xdt67a4k9b9de2weg209d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:48:24.654620Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790904611357222:2379], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:48:24.654682Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTlkMDMzOGQtMzFjZmU3M2EtNGIxOGM5Yy04ZDc0NzdmNw==, ActorId: [1:7439790904611357220:2378], ActorState: ExecuteState, TraceId: 01jd7xdt6cfv084ymh96zzyd4n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 23357, MsgBus: 15715 2024-11-21T17:48:24.939421Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790903029968955:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:24.944282Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012cb/r3tmp/tmpuqs3MA/pdisk_1.dat 2024-11-21T17:48:24.953608Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23357, node 2 2024-11-21T17:48:24.970125Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:24.970138Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:24.970140Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:24.970173Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15715 TClient is connected to server localhost:15715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:25.036936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:25.036966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:25.037440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:25.037928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:48:25.044794Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:25.216271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790907324936682:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:25.216302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:25.216511Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790907324936709:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:25.217205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:48:25.219119Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:48:25.219216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790907324936711:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:48:25.310777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12454, MsgBus: 12499 2024-11-21T17:48:25.645912Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790905548608969:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:25.646104Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012cb/r3tmp/tmpLklx2X/pdisk_1.dat 2024-11-21T17:48:25.653661Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12454, node 3 2024-11-21T17:48:25.668738Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:25.668753Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:25.668754Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:25.668791Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12499 TClient is connected to server localhost:12499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:25.746369Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:25.746402Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:25.747506Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:25.748274Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:25.757702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:25.766794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:25.785169Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:25.795377Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:25.955061Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790905548610504:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:25.955082Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:25.958305Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:25.965614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:25.976358Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:25.988321Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.007405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.017339Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.033196Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790909843578313:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.033219Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.033256Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790909843578318:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.033876Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:26.043458Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790909843578320:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:26.248382Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.267976Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.278267Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.322866Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.356552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.373451Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.392836Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.422864Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.441154Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 |78.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> KqpQueryServiceScripts::ParseScript [GOOD] >> KqpQueryServiceScripts::ListScriptExecutions |78.5%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> KqpQueryServiceScripts::ValidateScript >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream |78.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |78.5%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut >> KqpService::SessionBusyRetryOperation [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TCdcStreamWithInitialScanTests::DropStream >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier [GOOD] >> KqpQueryService::ExecuteRetryQuery >> TxUsage::WriteToTopic_Demo_13 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpService::SessionBusyRetryOperation [GOOD] Test command err: Trying to start YDB, gRPC: 27946, MsgBus: 12108 2024-11-21T17:48:25.891937Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790908842502843:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:25.891990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001299/r3tmp/tmpPDnVJ7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27946, node 1 2024-11-21T17:48:25.956071Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:25.964259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:25.964272Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:25.964273Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:25.964305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12108 2024-11-21T17:48:25.991887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:25.991910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:25.992379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:26.024718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.028646Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:26.048922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.073077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.138078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.160248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.221500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790913137471535:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.221543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.251776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.257886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.313045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.323095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.331872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.345518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.368960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790913137472050:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.368985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.369024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790913137472055:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.369653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:26.371710Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:48:26.371765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790913137472057:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:26.564779Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYwMDU5MzUtZDY0OWE1NmYtOTUwZGM3MTMtYWFkMTg3OWQ=, ActorId: [1:7439790913137472343:2454], ActorState: ExecuteState, TraceId: 01jd7xdw24d91cg1hhpd23gz0j, Reply query error, msg: Pending previous query completion proxyRequestId: 7 2024-11-21T17:48:26.564822Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYwMDU5MzUtZDY0OWE1NmYtOTUwZGM3MTMtYWFkMTg3OWQ=, ActorId: [1:7439790913137472343:2454], ActorState: ExecuteState, TraceId: 01jd7xdw24d91cg1hhpd23gz0j, Reply query error, msg: Pending previous query completion proxyRequestId: 8 2024-11-21T17:48:26.564832Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYwMDU5MzUtZDY0OWE1NmYtOTUwZGM3MTMtYWFkMTg3OWQ=, ActorId: [1:7439790913137472343:2454], ActorState: ExecuteState, TraceId: 01jd7xdw24d91cg1hhpd23gz0j, Reply query error, msg: Pending previous query completion proxyRequestId: 9 2024-11-21T17:48:26.564839Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYwMDU5MzUtZDY0OWE1NmYtOTUwZGM3MTMtYWFkMTg3OWQ=, ActorId: [1:7439790913137472343:2454], ActorState: ExecuteState, TraceId: 01jd7xdw24d91cg1hhpd23gz0j, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2024-11-21T17:48:26.564846Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYwMDU5MzUtZDY0OWE1NmYtOTUwZGM3MTMtYWFkMTg3OWQ=, ActorId: [1:7439790913137472343:2454], ActorState: ExecuteState, TraceId: 01jd7xdw24d91cg1hhpd23gz0j, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2024-11-21T17:48:26.564863Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYwMDU5MzUtZDY0OWE1NmYtOTUwZGM3MTMtYWFkMTg3OWQ=, ActorId: [1:7439790913137472343:2454], ActorState: ExecuteState, TraceId: 01jd7xdw24d91cg1hhpd23gz0j, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2024-11-21T17:48:26.565792Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYwMDU5MzUtZDY0OWE1NmYtOTUwZGM3MTMtYWFkMTg3OWQ=, ActorId: [1:7439790913137472343:2454], ActorState: ExecuteState, TraceId: 01jd7xdw24d91cg1hhpd23gz0j, Reply query error, msg: Pending previous query completion proxyRequestId: 13 2024-11-21T17:48:26.565836Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYwMDU5MzUtZDY0OWE1NmYtOTUwZGM3MTMtYWFkMTg3OWQ=, ActorId: [1:7439790913137472343:2454], ActorState: ExecuteState, TraceId: 01jd7xdw24d91cg1hhpd23gz0j, Reply query error, msg: Pending previous query completion proxyRequestId: 14 2024-11-21T17:48:26.565851Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYwMDU5MzUtZDY0OWE1NmYtOTUwZGM3MTMtYWFkMTg3OWQ=, ActorId: [1:7439790913137472343:2454], ActorState: ExecuteState, TraceId: 01jd7xdw24d91cg1hhpd23gz0j, Reply query error, msg: Pending previous query completion proxyRequestId: 15 Trying to start YDB, gRPC: 2861, MsgBus: 6900 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001299/r3tmp/tmpZtKYQ2/pdisk_1.dat 2024-11-21T17:48:26.796187Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:26.803401Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2861, node 2 2024-11-21T17:48:26.814590Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:26.814604Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:26.814605Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:26.814644Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6900 TClient is connected to server localhost:6900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVers ... y query error, msg: Pending previous query completion proxyRequestId: 14 2024-11-21T17:48:27.403349Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODZkNjFkZGUtMmVjNDMzNDQtMWNkM2NmZGMtYmQwN2ZhMzg=, ActorId: [2:7439790917087940351:2454], ActorState: ExecuteState, TraceId: 01jd7xdww06jtpebhz9m0zph87, Reply query error, msg: Pending previous query completion proxyRequestId: 15 2024-11-21T17:48:27.425045Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDhhNmM5ZGMtY2IzZjBhMGUtZTNkNTQzZjAtN2I0NDU1Zjc=, ActorId: [2:7439790917087940397:2473], ActorState: ExecuteState, TraceId: 01jd7xdwx04f9ab3p3s5b27vnn, Reply query error, msg: Pending previous query completion proxyRequestId: 18 2024-11-21T17:48:27.425088Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDhhNmM5ZGMtY2IzZjBhMGUtZTNkNTQzZjAtN2I0NDU1Zjc=, ActorId: [2:7439790917087940397:2473], ActorState: ExecuteState, TraceId: 01jd7xdwx04f9ab3p3s5b27vnn, Reply query error, msg: Pending previous query completion proxyRequestId: 19 2024-11-21T17:48:27.425410Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDhhNmM5ZGMtY2IzZjBhMGUtZTNkNTQzZjAtN2I0NDU1Zjc=, ActorId: [2:7439790917087940397:2473], ActorState: ExecuteState, TraceId: 01jd7xdwx04f9ab3p3s5b27vnn, Reply query error, msg: Pending previous query completion proxyRequestId: 20 2024-11-21T17:48:27.425419Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDhhNmM5ZGMtY2IzZjBhMGUtZTNkNTQzZjAtN2I0NDU1Zjc=, ActorId: [2:7439790917087940397:2473], ActorState: ExecuteState, TraceId: 01jd7xdwx04f9ab3p3s5b27vnn, Reply query error, msg: Pending previous query completion proxyRequestId: 21 2024-11-21T17:48:27.425424Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDhhNmM5ZGMtY2IzZjBhMGUtZTNkNTQzZjAtN2I0NDU1Zjc=, ActorId: [2:7439790917087940397:2473], ActorState: ExecuteState, TraceId: 01jd7xdwx04f9ab3p3s5b27vnn, Reply query error, msg: Pending previous query completion proxyRequestId: 22 2024-11-21T17:48:27.425432Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDhhNmM5ZGMtY2IzZjBhMGUtZTNkNTQzZjAtN2I0NDU1Zjc=, ActorId: [2:7439790917087940397:2473], ActorState: ExecuteState, TraceId: 01jd7xdwx04f9ab3p3s5b27vnn, Reply query error, msg: Pending previous query completion proxyRequestId: 23 2024-11-21T17:48:27.425439Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDhhNmM5ZGMtY2IzZjBhMGUtZTNkNTQzZjAtN2I0NDU1Zjc=, ActorId: [2:7439790917087940397:2473], ActorState: ExecuteState, TraceId: 01jd7xdwx04f9ab3p3s5b27vnn, Reply query error, msg: Pending previous query completion proxyRequestId: 24 2024-11-21T17:48:27.425445Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDhhNmM5ZGMtY2IzZjBhMGUtZTNkNTQzZjAtN2I0NDU1Zjc=, ActorId: [2:7439790917087940397:2473], ActorState: ExecuteState, TraceId: 01jd7xdwx04f9ab3p3s5b27vnn, Reply query error, msg: Pending previous query completion proxyRequestId: 25 2024-11-21T17:48:27.443334Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWY3NDRhYzEtZDZkZGY2YzYtNDg3NzYwZWUtMTE2OTBhNzc=, ActorId: [2:7439790917087940445:2488], ActorState: ExecuteState, TraceId: 01jd7xdwxkdmfysgjtj7mv1zpz, Reply query error, msg: Pending previous query completion proxyRequestId: 28 2024-11-21T17:48:27.443387Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWY3NDRhYzEtZDZkZGY2YzYtNDg3NzYwZWUtMTE2OTBhNzc=, ActorId: [2:7439790917087940445:2488], ActorState: ExecuteState, TraceId: 01jd7xdwxkdmfysgjtj7mv1zpz, Reply query error, msg: Pending previous query completion proxyRequestId: 29 2024-11-21T17:48:27.443396Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWY3NDRhYzEtZDZkZGY2YzYtNDg3NzYwZWUtMTE2OTBhNzc=, ActorId: [2:7439790917087940445:2488], ActorState: ExecuteState, TraceId: 01jd7xdwxkdmfysgjtj7mv1zpz, Reply query error, msg: Pending previous query completion proxyRequestId: 30 2024-11-21T17:48:27.443404Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWY3NDRhYzEtZDZkZGY2YzYtNDg3NzYwZWUtMTE2OTBhNzc=, ActorId: [2:7439790917087940445:2488], ActorState: ExecuteState, TraceId: 01jd7xdwxkdmfysgjtj7mv1zpz, Reply query error, msg: Pending previous query completion proxyRequestId: 31 2024-11-21T17:48:27.443412Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWY3NDRhYzEtZDZkZGY2YzYtNDg3NzYwZWUtMTE2OTBhNzc=, ActorId: [2:7439790917087940445:2488], ActorState: ExecuteState, TraceId: 01jd7xdwxkdmfysgjtj7mv1zpz, Reply query error, msg: Pending previous query completion proxyRequestId: 32 2024-11-21T17:48:27.443419Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWY3NDRhYzEtZDZkZGY2YzYtNDg3NzYwZWUtMTE2OTBhNzc=, ActorId: [2:7439790917087940445:2488], ActorState: ExecuteState, TraceId: 01jd7xdwxkdmfysgjtj7mv1zpz, Reply query error, msg: Pending previous query completion proxyRequestId: 33 2024-11-21T17:48:27.443425Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWY3NDRhYzEtZDZkZGY2YzYtNDg3NzYwZWUtMTE2OTBhNzc=, ActorId: [2:7439790917087940445:2488], ActorState: ExecuteState, TraceId: 01jd7xdwxkdmfysgjtj7mv1zpz, Reply query error, msg: Pending previous query completion proxyRequestId: 34 2024-11-21T17:48:27.463511Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTg4ODVlY2YtM2MzNGFkZmUtNGRiZjg5MTctMzdmMTZm, ActorId: [2:7439790917087940487:2502], ActorState: ExecuteState, TraceId: 01jd7xdwy71wbgt0wd0xjwe34p, Reply query error, msg: Pending previous query completion proxyRequestId: 37 2024-11-21T17:48:27.463545Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTg4ODVlY2YtM2MzNGFkZmUtNGRiZjg5MTctMzdmMTZm, ActorId: [2:7439790917087940487:2502], ActorState: ExecuteState, TraceId: 01jd7xdwy71wbgt0wd0xjwe34p, Reply query error, msg: Pending previous query completion proxyRequestId: 38 2024-11-21T17:48:27.463551Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTg4ODVlY2YtM2MzNGFkZmUtNGRiZjg5MTctMzdmMTZm, ActorId: [2:7439790917087940487:2502], ActorState: ExecuteState, TraceId: 01jd7xdwy71wbgt0wd0xjwe34p, Reply query error, msg: Pending previous query completion proxyRequestId: 39 2024-11-21T17:48:27.463559Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTg4ODVlY2YtM2MzNGFkZmUtNGRiZjg5MTctMzdmMTZm, ActorId: [2:7439790917087940487:2502], ActorState: ExecuteState, TraceId: 01jd7xdwy71wbgt0wd0xjwe34p, Reply query error, msg: Pending previous query completion proxyRequestId: 40 2024-11-21T17:48:27.463760Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTg4ODVlY2YtM2MzNGFkZmUtNGRiZjg5MTctMzdmMTZm, ActorId: [2:7439790917087940487:2502], ActorState: ExecuteState, TraceId: 01jd7xdwy71wbgt0wd0xjwe34p, Reply query error, msg: Pending previous query completion proxyRequestId: 41 2024-11-21T17:48:27.463885Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTg4ODVlY2YtM2MzNGFkZmUtNGRiZjg5MTctMzdmMTZm, ActorId: [2:7439790917087940487:2502], ActorState: ExecuteState, TraceId: 01jd7xdwy71wbgt0wd0xjwe34p, Reply query error, msg: Pending previous query completion proxyRequestId: 42 2024-11-21T17:48:27.489514Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmUzZjI1NTYtZmEzN2VhYTItMzJjMzE2OTktYzBmOTMzNjc=, ActorId: [2:7439790917087940524:2515], ActorState: ExecuteState, TraceId: 01jd7xdwyz2d6mt3rqqhz0t2dm, Reply query error, msg: Pending previous query completion proxyRequestId: 45 2024-11-21T17:48:27.492962Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmUzZjI1NTYtZmEzN2VhYTItMzJjMzE2OTktYzBmOTMzNjc=, ActorId: [2:7439790917087940524:2515], ActorState: ExecuteState, TraceId: 01jd7xdwyz2d6mt3rqqhz0t2dm, Reply query error, msg: Pending previous query completion proxyRequestId: 46 2024-11-21T17:48:27.492999Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmUzZjI1NTYtZmEzN2VhYTItMzJjMzE2OTktYzBmOTMzNjc=, ActorId: [2:7439790917087940524:2515], ActorState: ExecuteState, TraceId: 01jd7xdwyz2d6mt3rqqhz0t2dm, Reply query error, msg: Pending previous query completion proxyRequestId: 47 2024-11-21T17:48:27.493004Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmUzZjI1NTYtZmEzN2VhYTItMzJjMzE2OTktYzBmOTMzNjc=, ActorId: [2:7439790917087940524:2515], ActorState: ExecuteState, TraceId: 01jd7xdwyz2d6mt3rqqhz0t2dm, Reply query error, msg: Pending previous query completion proxyRequestId: 48 2024-11-21T17:48:27.493009Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmUzZjI1NTYtZmEzN2VhYTItMzJjMzE2OTktYzBmOTMzNjc=, ActorId: [2:7439790917087940524:2515], ActorState: ExecuteState, TraceId: 01jd7xdwyz2d6mt3rqqhz0t2dm, Reply query error, msg: Pending previous query completion proxyRequestId: 49 2024-11-21T17:48:27.513291Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmZhY2MwYmQtNWNkMWRjMjktZTRiOTVhMzMtZjAwNWZiYzM=, ActorId: [2:7439790917087940557:2527], ActorState: ExecuteState, TraceId: 01jd7xdwzsed5gvngqvjyh07xh, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2024-11-21T17:48:27.513317Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmZhY2MwYmQtNWNkMWRjMjktZTRiOTVhMzMtZjAwNWZiYzM=, ActorId: [2:7439790917087940557:2527], ActorState: ExecuteState, TraceId: 01jd7xdwzsed5gvngqvjyh07xh, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2024-11-21T17:48:27.513322Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmZhY2MwYmQtNWNkMWRjMjktZTRiOTVhMzMtZjAwNWZiYzM=, ActorId: [2:7439790917087940557:2527], ActorState: ExecuteState, TraceId: 01jd7xdwzsed5gvngqvjyh07xh, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2024-11-21T17:48:27.513332Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmZhY2MwYmQtNWNkMWRjMjktZTRiOTVhMzMtZjAwNWZiYzM=, ActorId: [2:7439790917087940557:2527], ActorState: ExecuteState, TraceId: 01jd7xdwzsed5gvngqvjyh07xh, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2024-11-21T17:48:27.531164Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmJmYjExOTUtYTI4MjAwODQtN2E0N2IxYmYtYmU1MWMwOQ==, ActorId: [2:7439790917087940586:2538], ActorState: ExecuteState, TraceId: 01jd7xdx0a73n90k6nkkngkm5f, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2024-11-21T17:48:27.532517Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmJmYjExOTUtYTI4MjAwODQtN2E0N2IxYmYtYmU1MWMwOQ==, ActorId: [2:7439790917087940586:2538], ActorState: ExecuteState, TraceId: 01jd7xdx0a73n90k6nkkngkm5f, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2024-11-21T17:48:27.532687Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmJmYjExOTUtYTI4MjAwODQtN2E0N2IxYmYtYmU1MWMwOQ==, ActorId: [2:7439790917087940586:2538], ActorState: ExecuteState, TraceId: 01jd7xdx0a73n90k6nkkngkm5f, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2024-11-21T17:48:27.550300Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjVhZjcxNjEtYWMwYWVhNmYtMzgzYjNiMWYtNDg1NTZiY2I=, ActorId: [2:7439790917087940612:2547], ActorState: ExecuteState, TraceId: 01jd7xdx0x3y5b0gngqsvra1v3, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2024-11-21T17:48:27.550338Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjVhZjcxNjEtYWMwYWVhNmYtMzgzYjNiMWYtNDg1NTZiY2I=, ActorId: [2:7439790917087940612:2547], ActorState: ExecuteState, TraceId: 01jd7xdx0x3y5b0gngqsvra1v3, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2024-11-21T17:48:27.592399Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmEyYzAxYjMtZmYwMjVmZjYtMjVkZGUzMjQtODE4NTZiYWY=, ActorId: [2:7439790917087940639:2557], ActorState: ExecuteState, TraceId: 01jd7xdx287xg0hpc87wtsjvk2, Reply query error, msg: Pending previous query completion proxyRequestId: 67 >> KqpQueryServiceScripts::ValidateScript [GOOD] >> KqpQueryServiceScripts::TestTruncatedBySize >> KqpQueryServiceScripts::ExecuteScriptPg >> TxUsage::WriteToTopic_Demo_14 >> BasicUsage::ReadSessionCorrectClose [GOOD] >> BasicUsage::ConflictingWrites |78.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |78.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |78.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart |78.5%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup >> KqpQueryService::ExecuteRetryQuery [GOOD] >> TxUsage::WriteToTopic_Demo_32 [GOOD] >> KqpQueryServiceScripts::ListScriptExecutions [GOOD] >> KqpQueryServiceScripts::InvalidFetchToken >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless >> TxUsage::WriteToTopic_Demo_33 >> KqpQueryService::FlowControllOnHugeRealTable-LongRow [GOOD] >> KqpQueryService::IssuesInCaseOfSuccess >> KqpService::ToDictCache+UseCache |78.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |78.5%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |78.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |78.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsBasic >> TxUsage::WriteToTopic_Demo_43 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteRetryQuery [GOOD] Test command err: Trying to start YDB, gRPC: 25711, MsgBus: 25187 2024-11-21T17:48:24.554688Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790903961974848:2061];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:24.554849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012ba/r3tmp/tmpL0CT6R/pdisk_1.dat 2024-11-21T17:48:24.605346Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25711, node 1 2024-11-21T17:48:24.625597Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:24.625609Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:24.625612Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:24.625642Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25187 2024-11-21T17:48:24.654652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:24.654680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:24.655741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:24.674891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.683057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.746833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.763538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.777330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.841050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790903961976368:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.841112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.871740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.878981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.889068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.902928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.909189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.916640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.936346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790903961976868:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.936387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.936486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790903961976873:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.937397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:24.943827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790903961976875:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:48:25.180585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790908256944502:2469], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2024-11-21T17:48:25.180606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790908256944500:2467], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2024-11-21T17:48:25.180611Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439790908256944501:2468], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=NDk3NDg5ZmYtNDc2MTA2NGMtMjUxMzc0ZWQtOWEyOGZiYjY=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2024-11-21T17:48:25.180618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2024-11-21T17:48:25.180619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439790908256944501:2468], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=NDk3NDg5ZmYtNDc2MTA2NGMtMjUxMzc0ZWQtOWEyOGZiYjY=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2024-11-21T17:48:25.180632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7439790908256944498:2466]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2024-11-21T17:48:25.180643Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDk3NDg5ZmYtNDc2MTA2NGMtMjUxMzc0ZWQtOWEyOGZiYjY=, ActorId: [1:7439790908256944498:2466], ActorState: ExecuteState, TraceId: 01jd7xdtpv2qcqr8t1xcwnd65s, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2024-11-21T17:48:25.180683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7439790908256944498:2466]: Pool another_pool_id not found Trying to start YDB, gRPC: 63105, MsgBus: 20595 2024-11-21T17:48:25.555972Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790908028155700:2195];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:25.558414Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012ba/r3tmp/tmpP1KpLD/pdisk_1.dat 2024-11-21T17:48:25.576421Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63105, node 2 2024-11-21T17:48:25.585407Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:25.585424Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:25.585426Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:25.585479Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20595 TClient is connected to server localhost:20595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateSte ... ion type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:25.903615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:25.911205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:25.925412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:25.940280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:25.956679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790908028157595:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:25.956708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:25.956742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790908028157600:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:25.957436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:25.966442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790908028157602:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:26.201187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.562266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:1, at schemeshard: 72057594046644480 2024-11-21T17:48:26.627219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.688062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:1, at schemeshard: 72057594046644480 2024-11-21T17:48:26.755910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.826939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.886548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.142472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715699:0, at schemeshard: 72057594046644480 Wait resource pool classifier 0.014381s: status = SUCCESS, issues = 2024-11-21T17:48:28.166452Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2NjMWY0ZTAtMTBhNTE4YTYtZGJmYjA4MjQtNDlmNWRiM2U=, ActorId: [2:7439790920913060800:2691], ActorState: ExecuteState, TraceId: 01jd7xdxm58h3vvmwdsh3vyha5, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool MyPool Trying to start YDB, gRPC: 19244, MsgBus: 16202 2024-11-21T17:48:28.534611Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790920308369576:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:28.541563Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012ba/r3tmp/tmpSLLCpH/pdisk_1.dat 2024-11-21T17:48:28.564497Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19244, node 3 2024-11-21T17:48:28.575660Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:28.575676Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:28.575679Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:28.575718Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16202 TClient is connected to server localhost:16202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:48:28.640607Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:28.640638Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:28.641005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:28.641453Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:28.644363Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:28.655020Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:28.670892Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:28.695027Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:28.708398Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:28.880540Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790920308370974:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:28.880567Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:28.883565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:28.941763Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:28.952476Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:28.963145Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:28.978172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.036745Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.052665Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790924603338789:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.052700Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.052800Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790924603338794:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.053731Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:29.059873Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790924603338796:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated >> TxUsage::WriteToTopic_Demo_44 >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] |78.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |78.6%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut >> KqpQueryService::IssuesInCaseOfSuccess [GOOD] >> KqpQueryService::ForbidInteractiveTxOnImplicitSession >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] Test command err: Trying to start YDB, gRPC: 7113, MsgBus: 24575 2024-11-21T17:48:27.500308Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790914004769172:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:27.500392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001282/r3tmp/tmpHPYKZB/pdisk_1.dat 2024-11-21T17:48:27.573560Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7113, node 1 2024-11-21T17:48:27.596459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:27.596474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:27.596476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:27.596516Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:27.600625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:27.600656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:27.604679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24575 TClient is connected to server localhost:24575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:27.668781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:27.671295Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:27.681513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:48:27.706870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.727932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:27.740245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:27.905252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790914004770583:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:27.905287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:27.953642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.968947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.984726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.998962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:28.020893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:28.041981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:28.076955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790918299738394:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:28.076999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:28.077097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790918299738402:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:28.078053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:28.081572Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:48:28.083237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790918299738404:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 63604, MsgBus: 26229 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001282/r3tmp/tmp16kg6Y/pdisk_1.dat 2024-11-21T17:48:28.716777Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:28.718576Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63604, node 2 2024-11-21T17:48:28.726639Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:28.726651Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:28.726652Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:28.726691Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26229 TClient is connected to server localhost:26229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:28.807481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:28.807509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:28.808546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:48:28.810344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:28.813932Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:28.821205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:28.839880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:28.876901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:28.891039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:29.062060Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790922355484368:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.062080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.068437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.075762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.088245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.095197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.102567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.159651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.179848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790922355484875:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.179876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.179982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790922355484880:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.180798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:29.186116Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790922355484882:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:29.372323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.372685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.372895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.881490Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211309920, txId: 281474976715697] shutting down >> KqpQueryServiceScripts::ExecuteScriptStatsBasic [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsFull >> KqpDocumentApi::AllowRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 29207, MsgBus: 27245 2024-11-21T17:48:26.293587Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790909142924348:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:26.293603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00128e/r3tmp/tmp6QDHAC/pdisk_1.dat 2024-11-21T17:48:26.345561Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29207, node 1 2024-11-21T17:48:26.366962Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:26.366973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:26.366975Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:26.367018Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27245 2024-11-21T17:48:26.393860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:26.393887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:26.395015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:26.427415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.430026Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:48:26.435162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.453869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.473607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.485058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.617556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790909142925887:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.617587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.668311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.679505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.687657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.743635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.757634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.777661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.794331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790909142926404:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.794357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.794498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790909142926409:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.795302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:26.799572Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T17:48:26.799672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790909142926411:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 19434, MsgBus: 8167 2024-11-21T17:48:27.413150Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790917551280860:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:27.413436Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00128e/r3tmp/tmpi7jYKu/pdisk_1.dat 2024-11-21T17:48:27.427222Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19434, node 2 2024-11-21T17:48:27.437798Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:27.437812Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:27.437814Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:27.437861Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8167 TClient is connected to server localhost:8167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:48:27.514694Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:27.514727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:27.515783Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:27.516083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.517129Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:27.521028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:27.584074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:27.608844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:27.621268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.734975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790917551282393:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:27.735007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:27.740869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.748910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.758621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.765322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.779994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.801443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.817301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790917551282906:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:27.817348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:27.817481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790917551282911:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:27.818346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:27.821152Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:48:27.821258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790917551282913:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:28.091332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:48:28.091652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:48:28.091831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29975, MsgBus: 26901 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00128e/r3tmp/tmpQ9B6Na/pdisk_1.dat 2024-11-21T17:48:29.847535Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:29.848340Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29975, node 3 2024-11-21T17:48:29.859377Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:29.859388Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:29.859390Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:29.859431Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26901 TClient is connected to server localhost:26901 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:29.938001Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:29.938033Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:29.939763Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:29.941053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:29.953257Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:29.962972Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:29.980978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:29.994308Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:30.147547Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790928548771307:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.147569Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.154076Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.166559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.175765Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.232200Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.247661Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.261271Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.276795Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790928548771812:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.276832Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.276840Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790928548771817:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.277744Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:30.286348Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790928548771819:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:30.550279Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.550688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.550905Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit [GOOD] >> LocalPartition::DiscoveryHang [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] >> KqpQueryService::TableSink_OlapInsert [GOOD] >> KqpQueryService::TableSink_OlapUpdate >> TxUsage::WriteToTopic_Demo_24 >> KqpService::RangeCache+UseCache |78.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |78.6%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] Test command err: Trying to start YDB, gRPC: 20177, MsgBus: 62496 2024-11-21T17:48:25.885223Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790908198478956:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:25.885296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012b1/r3tmp/tmpignux2/pdisk_1.dat 2024-11-21T17:48:25.934349Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20177, node 1 2024-11-21T17:48:25.961263Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:25.961272Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:25.961273Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:25.961299Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62496 2024-11-21T17:48:25.990082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:25.990113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:25.992642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:26.012726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.016756Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:26.022106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.090149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.115663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.126556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.260472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790912493447641:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.260516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.298422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.304399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.316769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.377081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.386971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.401519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.417110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790912493448155:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.417136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.417262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790912493448160:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.417969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:26.421815Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:48:26.421924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790912493448163:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:26.628963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 121 Trying to start YDB, gRPC: 27548, MsgBus: 16933 2024-11-21T17:48:29.850017Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790922025673969:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:29.850180Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012b1/r3tmp/tmpb2mcpF/pdisk_1.dat 2024-11-21T17:48:29.862358Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27548, node 2 2024-11-21T17:48:29.876504Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:29.876523Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:29.876525Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:29.876563Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16933 TClient is connected to server localhost:16933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:29.953307Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:29.953342Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:29.953620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.954341Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:48:29.955373Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:29.962604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:29.973299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:29.998431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:30.011503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:30.191390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790926320642815:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.191439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.194169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.203829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.216813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.233619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.244446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.258140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.278006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790926320643310:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.278033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.278119Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790926320643315:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.278849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:30.284822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790926320643317:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:30.475534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.484020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.496189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3529, MsgBus: 1534 2024-11-21T17:48:31.049617Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790931291093057:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012b1/r3tmp/tmps0H1Kv/pdisk_1.dat 2024-11-21T17:48:31.053118Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:48:31.062352Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3529, node 3 2024-11-21T17:48:31.072402Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:31.072424Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:31.072426Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:31.072492Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1534 TClient is connected to server localhost:1534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:31.149228Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:31.149274Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:31.150376Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:31.150991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:31.162151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:31.171140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:31.186768Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:31.195632Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:31.399047Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790931291094468:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:31.399069Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:31.403362Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.410199Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.421529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.433824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.448029Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.461694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.477938Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790931291094959:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:31.477982Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:31.478697Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790931291094964:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:31.479558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:31.489373Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790931291094966:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpDocumentApi::AllowRead [GOOD] >> KqpDocumentApi::RestrictAlter >> TTablesWithReboots::CreateWithRebootsAtCommit [GOOD] >> KqpService::ToDictCache+UseCache [GOOD] >> KqpService::ToDictCache-UseCache >> LocalPartition::WithoutPartitionDeadNode [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation >> KqpQueryServiceScripts::ExecuteScriptStatsFull [GOOD] >> KqpService::SwitchCache+UseCache ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptStatsFull [GOOD] Test command err: Trying to start YDB, gRPC: 12194, MsgBus: 12975 2024-11-21T17:48:28.782240Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790920851116515:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:28.782410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00127c/r3tmp/tmpgPbRFN/pdisk_1.dat 2024-11-21T17:48:28.850679Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12194, node 1 2024-11-21T17:48:28.868436Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:28.868449Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:28.868450Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:28.868482Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:28.880554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:28.880577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:28.881704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12975 TClient is connected to server localhost:12975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:28.930267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:28.932395Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:28.946957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:28.962945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:28.989340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:29.001574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:29.086062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790925146085359:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.086107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.120885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.140587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.153219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.167147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.179624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.193810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.218244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790925146085863:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.218270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.218312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790925146085868:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:29.219063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:29.220775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790925146085870:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:29.419153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.419497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.419695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:48:29.780482Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211309822, txId: 281474976715690] shutting down Trying to start YDB, gRPC: 7983, MsgBus: 10099 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00127c/r3tmp/tmpvC7l2r/pdisk_1.dat 2024-11-21T17:48:30.133265Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:30.146773Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7983, node 2 2024-11-21T17:48:30.162480Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:30.162492Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:30.162493Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:30.162537Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10099 TClient is connected to server localhost:10099 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:30.234556Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:30.234580Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:48:30.234866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.236577Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:48:30.244884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.264649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:30.297993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:30.323858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:30.517362Z node 2 ... VICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790928992489756:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.517400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.520733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.533688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.566717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.587852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.600951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.622538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.644664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790928992490267:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.644753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.644956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790928992490274:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.645853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:30.648213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790928992490276:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:30.870250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.870549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:48:30.870731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15308, MsgBus: 28184 2024-11-21T17:48:31.515838Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790932738739284:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:31.516020Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00127c/r3tmp/tmpKSpjBm/pdisk_1.dat 2024-11-21T17:48:31.530998Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15308, node 3 2024-11-21T17:48:31.542550Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:31.542564Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:31.542565Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:31.542602Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28184 TClient is connected to server localhost:28184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:31.620349Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:31.620388Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:31.620717Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.621389Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:48:31.624660Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:31.640473Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.656084Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:31.674983Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:31.687227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:31.853090Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790932738740821:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:31.853233Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:31.856138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.872435Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.884529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.901239Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.911569Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.925120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.945250Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790932738741325:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:31.945280Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:31.945312Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790932738741330:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:31.946191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:31.951408Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790932738741332:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:32.182032Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.182369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.182686Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 |78.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |78.6%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CreateWithRebootsAtCommit [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:56.698424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:56.698446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:56.698451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:56.698456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:56.698467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:56.698471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:56.698480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:56.698555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:56.709155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:56.709193Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:56.711581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:56.711680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:56.711711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:56.714441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:56.714500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:56.714584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:56.714748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:56.715478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:56.715733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:56.715740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:56.715749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:56.715753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:56.715758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:56.715795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:56.717238Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:56.730504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:56.730565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.730630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:56.730672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:56.730680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.731272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:56.731296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:56.731339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.731348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:56.731352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:56.731356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:56.731698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.731706Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:56.731709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:56.731914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.731919Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.731923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:56.731928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:56.732318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:56.732604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:56.732651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:56.732788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:56.732803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:56.732808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:56.732843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:56.732847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:56.732870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:56.732880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:56.733132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:56.733137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:56.733168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:56.733172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:56.733240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.733246Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:56.733256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:56.733260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:56.733264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:56.733267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:56.733270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:56.733272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:56.733279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:56.733283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:56.733286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... age: Source { RawX1: 340 RawX2: 605590391061 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T17:48:32.589719Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:32.589738Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 605590391061 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T17:48:32.589745Z node 141 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:48:32.589752Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 340 RawX2: 605590391061 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T17:48:32.589764Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:32.589767Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T17:48:32.589834Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 342 RawX2: 605590391062 } Origin: 72075186233409547 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T17:48:32.589840Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409547, partId: 0 2024-11-21T17:48:32.589849Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 342 RawX2: 605590391062 } Origin: 72075186233409547 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T17:48:32.589853Z node 141 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:48:32.589859Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 342 RawX2: 605590391062 } Origin: 72075186233409547 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T17:48:32.589865Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:32.589869Z node 141 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:48:32.589878Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:48:32.589882Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:48:32.589887Z node 141 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2024-11-21T17:48:32.590167Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:48:32.590188Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:48:32.590714Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:48:32.590746Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:48:32.590759Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:48:32.590776Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:48:32.590874Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:48:32.590882Z node 141 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T17:48:32.590897Z node 141 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:48:32.590902Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:48:32.590908Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2024-11-21T17:48:32.590914Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:48:32.590921Z node 141 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:48:32.590925Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:48:32.590955Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification wait txId: 1002 2024-11-21T17:48:32.591570Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T17:48:32.591578Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T17:48:32.591638Z node 141 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T17:48:32.591655Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:48:32.591660Z node 141 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [141:451:2416] TestWaitNotification: OK eventTxId 1002 2024-11-21T17:48:32.591718Z node 141 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:32.591783Z node 141 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 76us result status StatusSuccess 2024-11-21T17:48:32.592029Z node 141 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\003\000\004\000\000\000\377\377\377\177\000\000\000\200\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpDocumentApi::RestrictAlter [GOOD] >> KqpDocumentApi::RestrictDrop >> KqpQueryService::SessionFromPoolError >> TTablesWithReboots::CreateDroppedTableWithReboots [GOOD] >> TTablesWithReboots::SimpleDropTableWithReboots2 [GOOD] >> TxUsage::WriteToTopic_Demo_14 [GOOD] >> KqpService::RangeCache+UseCache [GOOD] >> KqpService::RangeCache-UseCache >> KqpDocumentApi::RestrictDrop [GOOD] >> KqpQueryService::SessionFromPoolError [GOOD] >> KqpQueryService::SessionFromPoolSuccess >> TxUsage::WriteToTopic_Demo_15 >> TSchemeShardMoveTest::Reject >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CreateDroppedTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:55.527028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:55.527056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:55.527061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:55.527066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:55.527078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:55.527082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:55.527091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:55.527180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:55.538234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:55.538256Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:55.540723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:55.540825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:55.540860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:55.543608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:55.543697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:55.543792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:55.544041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:55.544893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:55.545173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:55.545183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:55.545195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:55.545202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:55.545209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:55.545263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:55.546933Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:55.561191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:55.561256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.561322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:55.561364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:55.561370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.561952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:55.561972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:55.561998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.562006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:55.562010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:55.562014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:55.562426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.562438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:55.562443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:55.562776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.562784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.562788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:55.562794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:55.563220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:55.563566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:55.563608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:55.563757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:55.563777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:55.563783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:55.563817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:55.563821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:55.563841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:55.563849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:55.564298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:55.564309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:55.564346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:55.564349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:55.564418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.564425Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:55.564436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:55.564440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:55.564446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:55.564452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:55.564456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:55.564460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:55.564471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:55.564477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:55.564481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... NTableState::TProposedWaitParts operationId# 1005:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:48:33.819028Z node 140 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:48:33.819031Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T17:48:33.819034Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:48:33.819036Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:48:33.819038Z node 140 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 240 2024-11-21T17:48:33.819167Z node 140 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:48:33.819178Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:48:33.819185Z node 140 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:48:33.819190Z node 140 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T17:48:33.819194Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:48:33.819261Z node 140 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:48:33.819267Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:48:33.819269Z node 140 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:48:33.819272Z node 140 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:48:33.819274Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 6 2024-11-21T17:48:33.819279Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T17:48:33.819904Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:48:33.819918Z node 140 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:33.820001Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2024-11-21T17:48:33.820030Z node 140 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T17:48:33.820034Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:48:33.820039Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T17:48:33.820044Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:48:33.820049Z node 140 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T17:48:33.820053Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T17:48:33.820079Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:48:33.822760Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:48:33.822805Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:48:33.827542Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 593 RawX2: 601295423987 } TabletId: 72075186233409548 State: 4 2024-11-21T17:48:33.827574Z node 140 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:33.827652Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 597 RawX2: 601295423989 } TabletId: 72075186233409549 State: 4 2024-11-21T17:48:33.827662Z node 140 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:33.827699Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 599 RawX2: 601295423990 } TabletId: 72075186233409550 State: 4 2024-11-21T17:48:33.827704Z node 140 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409550, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:33.828588Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:33.829225Z node 140 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:48:33.829299Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:48:33.829391Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 Forgetting tablet 72075186233409548 2024-11-21T17:48:33.830183Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:33.830223Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:33.830241Z node 140 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2024-11-21T17:48:33.830294Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:48:33.830345Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:48:33.830417Z node 140 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2024-11-21T17:48:33.830921Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T17:48:33.830960Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:48:33.831549Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:33.831560Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:48:33.831575Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:33.832585Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:48:33.832598Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:48:33.833295Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:48:33.833305Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:48:33.833323Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T17:48:33.833328Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T17:48:33.833617Z node 140 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T17:48:33.833681Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T17:48:33.833688Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T17:48:33.833756Z node 140 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T17:48:33.833774Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:48:33.833779Z node 140 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [140:840:2771] TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted 2024-11-21T17:48:33.833843Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:48:33.833856Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:48:33.833862Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:48:33.833871Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T17:48:33.833878Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T17:48:33.833886Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T17:48:33.833895Z node 140 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::SimpleDropTableWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:04.299968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:04.299988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:04.299993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:04.299997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:04.300007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:04.300011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:04.300019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:04.300081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:04.310936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:04.310953Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:04.313138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:04.313235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:04.313261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:04.315898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:04.315961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:04.316038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.316174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:04.316760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.317016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:04.317024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.317034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:04.317040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:04.317046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:04.317077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:04.318421Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:04.334843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:04.334903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.334946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:04.334986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:04.334993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.335597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.335622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:04.335656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.335665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:04.335670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:04.335674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:04.336122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.336132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:04.336138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:04.336508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.336518Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.336524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.336531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.337384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:04.337830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:04.337876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:04.338063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.338090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:04.338096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.338144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:04.338152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.338173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:04.338183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:04.338544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:04.338553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:04.338585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.338590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:04.338661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.338667Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:04.338676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:04.338680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.338685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:04.338690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.338694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:04.338697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:04.338707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:04.338712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:04.338715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 8944 2024-11-21T17:48:34.298349Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:34.298353Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:34.298379Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:48:34.298394Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:34.298397Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [117:202:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T17:48:34.298399Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [117:202:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:48:34.298462Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:34.298470Z node 117 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:48:34.298487Z node 117 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:34.298491Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:48:34.298495Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:48:34.298498Z node 117 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T17:48:34.298634Z node 117 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:34.298646Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:34.298650Z node 117 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:48:34.298654Z node 117 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T17:48:34.298659Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:48:34.298812Z node 117 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:34.298823Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:34.298830Z node 117 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:48:34.298834Z node 117 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:48:34.298838Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:48:34.298848Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:48:34.299160Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:34.299168Z node 117 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:34.299229Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:48:34.299249Z node 117 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:48:34.299252Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:34.299255Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:48:34.299258Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:34.299260Z node 117 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:48:34.299263Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:48:34.299281Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:48:34.299652Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:34.299878Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:34.301168Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 341 RawX2: 502511175956 } TabletId: 72075186233409546 State: 4 2024-11-21T17:48:34.301183Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:34.301225Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 343 RawX2: 502511175957 } TabletId: 72075186233409547 State: 4 2024-11-21T17:48:34.301228Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:34.301613Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:34.301670Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:34.301714Z node 117 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:48:34.301751Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:34.301792Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 2024-11-21T17:48:34.302376Z node 117 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2024-11-21T17:48:34.302458Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:48:34.302496Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:48:34.302992Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:34.303004Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:48:34.303018Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:34.304018Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:48:34.304029Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:48:34.304345Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:48:34.304356Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:48:34.304391Z node 117 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:48:34.304432Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:48:34.304438Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:48:34.304501Z node 117 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:48:34.304515Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:48:34.304519Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [117:541:2504] TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:48:34.304564Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:48:34.304573Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T17:48:34.304624Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:34.304652Z node 117 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 38us result status StatusPathDoesNotExist 2024-11-21T17:48:34.304687Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpDocumentApi::RestrictDrop [GOOD] Test command err: Trying to start YDB, gRPC: 23449, MsgBus: 22941 2024-11-21T17:48:31.681038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790930873058855:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:31.681135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001270/r3tmp/tmpIokMZP/pdisk_1.dat 2024-11-21T17:48:31.739670Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23449, node 1 2024-11-21T17:48:31.749763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:31.749774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:31.749775Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:31.749805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22941 2024-11-21T17:48:31.781002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:31.781026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:31.782194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:31.804594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:31.809265Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:48:31.816208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:31.837977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:31.899771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:48:31.916463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T17:48:31.997152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790930873060211:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:31.997190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:32.033819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.041632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.050522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.064308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.079995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.092715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.109386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790935168028012:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:32.109416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:32.109447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790935168028017:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:32.110234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:32.119629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790935168028019:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:48:32.341272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21208, MsgBus: 7703 2024-11-21T17:48:32.680852Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790935202687004:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001270/r3tmp/tmpXlg1sX/pdisk_1.dat 2024-11-21T17:48:32.686224Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:48:32.712682Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21208, node 2 2024-11-21T17:48:32.736410Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:32.736424Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:32.736425Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:32.736469Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:32.777028Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:32.777064Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:32.781749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7703 TClient is connected to server localhost:7703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:32.875900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:32.878787Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:32.917833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:32.943896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:32.993697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:33.018019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:33.100517Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790939497655690:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.100573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.103223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.112034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.127038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.141359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.161726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.177145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.251539Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790939497656207:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.251570Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.251747Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790939497656214:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.252510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:33.255416Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:48:33.255498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790939497656216:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:33.526102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:61: Error: At function: KiAlterTable!
:2:61: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2024-11-21T17:48:33.602031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9380, MsgBus: 19229 2024-11-21T17:48:33.819295Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790941206878036:2242];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001270/r3tmp/tmpFWw88z/pdisk_1.dat 2024-11-21T17:48:33.824430Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 9380, node 3 2024-11-21T17:48:33.841172Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:33.841185Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:33.841187Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:33.841233Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:33.841466Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19229 TClient is connected to server localhost:19229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:33.920432Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:33.920462Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:48:33.921809Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:33.922177Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.924619Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:33.931405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:33.947532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:33.971332Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:48:34.006717Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.186567Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790945501846672:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:34.186610Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:34.197246Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.217116Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.235155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.243004Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.256991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.273625Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.297648Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790945501847178:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:34.297675Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:34.297829Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790945501847183:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:34.298734Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:34.302113Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:48:34.302191Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790945501847185:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:34.479252Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:24: Error: At function: KiDropTable!
:2:24: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable |78.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |78.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> KqpService::ToDictCache-UseCache [GOOD] >> KqpOlapClickbench::ClickBenchSmoke [GOOD] >> TxUsage::WriteToTopic_Demo_33 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:21.758175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:21.758202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:21.758208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:21.758213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:21.758230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:21.758234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:21.758244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:21.758319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:21.769650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:21.769671Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:21.772775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:21.773533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:21.773566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:21.774899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:21.775150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:21.775253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.775343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:21.776460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.776749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:21.776759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.776800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:21.776807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.776813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:21.776828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.778028Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:21.792904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:21.792983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.793049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:21.793123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:21.793132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.793922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.793955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:21.794000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.794009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:21.794013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:21.794017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:21.794381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.794390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:21.794394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:21.794632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.794638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.794642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.794647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.795094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:21.795509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:21.795565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:21.795775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.795800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:21.795807Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.795859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:21.795868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.795895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:21.795904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:21.796324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:21.796330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.796372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.796377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:21.796460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.796466Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:21.796474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:21.796478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.796482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:21.796485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.796489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:21.796491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:21.796500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:21.796504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:21.796506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:21.796758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:21.796772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:21.796776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:21.796781Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:21.796786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:21.796798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 ProgressState at tablet: 72075186233409546 2024-11-21T17:48:30.777172Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72075186233409546 2024-11-21T17:48:30.777175Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:48:30.777183Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2024-11-21T17:48:30.777189Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2024-11-21T17:48:30.777191Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2024-11-21T17:48:30.777260Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2024-11-21T17:48:30.777271Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2024-11-21T17:48:30.777274Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2024-11-21T17:48:30.777279Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 5 2024-11-21T17:48:30.777283Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 4 2024-11-21T17:48:30.777345Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2024-11-21T17:48:30.777351Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2024-11-21T17:48:30.777353Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2024-11-21T17:48:30.777356Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 5 2024-11-21T17:48:30.777358Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 6 2024-11-21T17:48:30.777362Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2024-11-21T17:48:30.778302Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2024-11-21T17:48:30.778318Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2024-11-21T17:48:30.789447Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 260 } } 2024-11-21T17:48:30.789468Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2024-11-21T17:48:30.789498Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 260 } } 2024-11-21T17:48:30.789513Z node 18 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 260 } } 2024-11-21T17:48:30.789753Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 751 RawX2: 77309413971 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T17:48:30.789759Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2024-11-21T17:48:30.789768Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 751 RawX2: 77309413971 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T17:48:30.789773Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 2024-11-21T17:48:30.789777Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 751 RawX2: 77309413971 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T17:48:30.789788Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2024-11-21T17:48:30.789790Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T17:48:30.789793Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2024-11-21T17:48:30.789800Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2024-11-21T17:48:30.790789Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T17:48:30.790896Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T17:48:30.790974Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T17:48:30.790983Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2024-11-21T17:48:30.790999Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2024-11-21T17:48:30.791003Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2024-11-21T17:48:30.791010Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2024-11-21T17:48:30.791014Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2024-11-21T17:48:30.791019Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:48:30.791023Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T17:48:30.791033Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 3 2024-11-21T17:48:30.791040Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2024-11-21T17:48:30.791042Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2024-11-21T17:48:30.791053Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 5 2024-11-21T17:48:30.791056Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2024-11-21T17:48:30.791058Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2024-11-21T17:48:30.791061Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2024-11-21T17:48:33.195151Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T17:48:33.195253Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 118us result status StatusNameConflict 2024-11-21T17:48:33.195298Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T17:48:35.302012Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T17:48:35.302103Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 105us result status StatusNameConflict 2024-11-21T17:48:35.302147Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit [GOOD] >> TSchemeShardMoveTest::OneTable [GOOD] >> TTablesWithReboots::DropTableWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpService::ToDictCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 2686, MsgBus: 18456 2024-11-21T17:48:29.870861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790922769842578:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:29.870993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001276/r3tmp/tmpfm2QQR/pdisk_1.dat 2024-11-21T17:48:29.934922Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2686, node 1 2024-11-21T17:48:29.954670Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:29.954689Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:29.954691Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:29.954743Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18456 2024-11-21T17:48:29.972067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:29.972095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:29.976560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:30.012777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:30.016726Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:30.220848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790927064810490:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.220872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790927064810510:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.220880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790927064810511:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.220886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790927064810512:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.220895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.220899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790927064810515:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:30.221801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T17:48:30.223822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790927064810523:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:48:30.223839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790927064810520:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:48:30.223844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790927064810521:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:48:30.223849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790927064810522:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } took: 0.447469s took: 0.447727s took: 0.459992s took: 0.460365s took: 0.156908s took: 0.156383s took: 0.153893s took: 0.154154s took: 0.111550s took: 0.111615s took: 0.111855s took: 0.112465s took: 0.142974s took: 0.143126s took: 0.143364s took: 0.143423s took: 0.137189s took: 0.137181s took: 0.137191s took: 0.137174s took: 0.090946s took: 0.090903s took: 0.090939s took: 0.091118s took: 0.080808s took: 0.080858s took: 0.080873s took: 0.080991s took: 0.110361s took: 0.110549s took: 0.110365s took: 0.113136s took: 0.104321s took: 0.104376s took: 0.104371s took: 0.104650s took: 0.104396s took: 0.104526s took: 0.104780s took: 0.104824s took: 0.106723s took: 0.106731s took: 0.106595s took: 0.106734s took: 0.118244s took: 0.126353s took: 0.126919s took: 0.130403s took: 0.143900s took: 0.146057s took: 0.150759s took: 0.154983s took: 0.129522s took: 0.132595s took: 0.140694s took: 0.141075s took: 0.125732s took: 0.127103s took: 0.127627s took: 0.136108s took: 0.176130s took: 0.176416s took: 0.176509s took: 0.176684s Trying to start YDB, gRPC: 25545, MsgBus: 8144 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001276/r3tmp/tmpNLP9rl/pdisk_1.dat 2024-11-21T17:48:32.956414Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:32.972648Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25545, node 2 2024-11-21T17:48:32.996539Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:32.996555Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:32.996557Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:32.996613Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8144 2024-11-21T17:48:33.044708Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:33.044737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:33.048691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:33.080969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:33.088646Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:33.448450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790942113738301:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.448501Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.448587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790942113738319:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.448602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790942113738320:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.448638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790942113738324:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.448698Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.448728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790942113738330:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.448733Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790942113738332:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.449385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T17:48:33.451713Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2024-11-21T17:48:33.451735Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2024-11-21T17:48:33.451740Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2024-11-21T17:48:33.451746Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2024-11-21T17:48:33.451758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790942113738341:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:48:33.451771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790942113738331:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:48:33.451776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790942113738329:2318], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:48:33.451781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790942113738339:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } took: 0.541113s took: 0.540962s took: 0.557557s took: 0.582541s took: 0.107735s took: 0.107836s took: 0.108016s took: 0.108063s took: 0.120444s took: 0.120435s took: 0.120431s took: 0.121146s took: 0.105421s took: 0.105443s took: 0.105419s took: 0.105665s took: 0.154374s took: 0.154526s took: 0.154906s took: 0.155197s took: 0.171616s took: 0.171656s took: 0.171721s took: 0.171796s took: 0.128314s took: 0.129618s took: 0.129792s took: 0.132146s took: 0.113703s took: 0.113994s took: 0.114408s took: 0.114518s took: 0.109170s took: 0.109033s took: 0.109090s took: 0.109642s took: 0.093594s took: 0.093850s took: 0.103005s took: 0.103481s took: 0.141526s took: 0.141934s took: 0.142856s took: 0.143093s took: 0.120617s took: 0.121341s took: 0.125045s took: 0.125135s took: 0.128732s took: 0.128711s took: 0.129126s took: 0.129979s took: 0.109225s took: 0.109374s took: 0.111905s took: 0.112127s took: 0.163807s took: 0.163951s took: 0.164401s took: 0.164602s took: 0.151468s took: 0.162735s took: 0.163285s took: 0.163772s >> YdbYqlClient::CreateTableWithPartitionAtKeys >> YdbYqlClient::TestDecimal ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapClickbench::ClickBenchSmoke [GOOD] Test command err: Trying to start YDB, gRPC: 14640, MsgBus: 28310 2024-11-21T17:46:30.111984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790412100167155:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:30.112001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000db5/r3tmp/tmpqeFMFt/pdisk_1.dat 2024-11-21T17:46:30.155642Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14640, node 1 2024-11-21T17:46:30.170491Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:30.170502Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:30.170503Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:30.170538Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28310 TClient is connected to server localhost:28310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:30.212213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:30.212259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:30.213329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:30.239738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:30.241727Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:30.247537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:30.254875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790412100167788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:30.254933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790412100167788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:30.254970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790412100167788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:30.254996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790412100167788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:30.255016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790412100167788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:30.255035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790412100167788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:30.255056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790412100167788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:30.255078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790412100167788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:30.255099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790412100167788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:30.255119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790412100167788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:30.255142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790412100167788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:30.255163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439790412100167788:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:30.258424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790412100167793:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:30.258453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790412100167793:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:30.258483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790412100167793:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:30.258502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790412100167793:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:30.258519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790412100167793:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:30.258542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790412100167793:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:30.258557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790412100167793:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:30.258579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790412100167793:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:30.258607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790412100167793:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:30.258630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790412100167793:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:30.258651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790412100167793:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:30.258673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790412100167793:2291];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:30.260821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790412100167791:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:30.260843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790412100167791:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:30.260863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790412100167791:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:30.260873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790412100167791:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:30.260883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790412100167791:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:30.260896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790412100167791:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:30.260910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790412100167791:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:30.260948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790412100167791:2290];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... rsing;size=1125808;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1125808;columns=105; 2024-11-21T17:46:44.687310Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=929860f0-a83011ef-81b37450-c9e05d51;fline=with_appended.cpp:80;portions=9,;task_id=929860f0-a83011ef-81b37450-c9e05d51; 2024-11-21T17:46:44.786305Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=92a1e3dc-a83011ef-b8e3e3ef-31300654;fline=with_appended.cpp:80;portions=9,;task_id=92a1e3dc-a83011ef-b8e3e3ef-31300654; 2024-11-21T17:46:44.877151Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=92b353b0-a83011ef-9e8b6f74-a36a535a;fline=with_appended.cpp:80;portions=9,;task_id=92b353b0-a83011ef-9e8b6f74-a36a535a; 2024-11-21T17:46:44.943350Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=92bf569c-a83011ef-ba374e29-372875ab;fline=with_appended.cpp:80;portions=9,;task_id=92bf569c-a83011ef-ba374e29-372875ab; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1125808;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1125808;columns=105; 2024-11-21T17:46:45.105163Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=92d192b2-a83011ef-baaa765d-c58342a6;fline=with_appended.cpp:80;portions=11,;task_id=92d192b2-a83011ef-baaa765d-c58342a6; 2024-11-21T17:46:45.170222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=92e0acde-a83011ef-83ed3098-77b366eb;fline=with_appended.cpp:80;portions=11,;task_id=92e0acde-a83011ef-83ed3098-77b366eb; 2024-11-21T17:46:45.243141Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=92ee844e-a83011ef-9fa69270-51e8ef85;fline=with_appended.cpp:80;portions=11,;task_id=92ee844e-a83011ef-9fa69270-51e8ef85; 2024-11-21T17:46:45.321053Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=92f89f38-a83011ef-b150b3db-cf1d5c70;fline=with_appended.cpp:80;portions=11,;task_id=92f89f38-a83011ef-b150b3db-cf1d5c70; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1125808;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1125808;columns=105; 2024-11-21T17:46:45.468369Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=931152a8-a83011ef-93542313-dd8c4afb;fline=with_appended.cpp:80;portions=13,;task_id=931152a8-a83011ef-93542313-dd8c4afb; 2024-11-21T17:46:45.568134Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=931b3c14-a83011ef-a5967b6f-7d6aacda;fline=with_appended.cpp:80;portions=13,;task_id=931b3c14-a83011ef-a5967b6f-7d6aacda; 2024-11-21T17:46:45.646626Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=93266d64-a83011ef-a353b4ad-9b8ff482;fline=with_appended.cpp:80;portions=13,;task_id=93266d64-a83011ef-a353b4ad-9b8ff482; 2024-11-21T17:46:45.721647Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=93324544-a83011ef-ab54f296-e93d2927;fline=with_appended.cpp:80;portions=13,;task_id=93324544-a83011ef-ab54f296-e93d2927; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1125808;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1125808;columns=105; 2024-11-21T17:46:45.870228Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=9348c242-a83011ef-87c97b56-11c3bf7c;fline=with_appended.cpp:80;portions=15,;task_id=9348c242-a83011ef-87c97b56-11c3bf7c; 2024-11-21T17:46:45.962079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=9357fb18-a83011ef-83798be6-edc0c2e5;fline=with_appended.cpp:80;portions=15,;task_id=9357fb18-a83011ef-83798be6-edc0c2e5; 2024-11-21T17:46:46.059233Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=9363f12a-a83011ef-9b10dc61-7779ffd6;fline=with_appended.cpp:80;portions=15,;task_id=9363f12a-a83011ef-9b10dc61-7779ffd6; 2024-11-21T17:46:46.185172Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=936f6776-a83011ef-b6c32401-1f69c7bc;fline=with_appended.cpp:80;portions=15,;task_id=936f6776-a83011ef-b6c32401-1f69c7bc; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1181808;columns=105; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1181808;columns=105; 2024-11-21T17:46:46.389023Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=93861214-a83011ef-9f956476-33e4c107;fline=with_appended.cpp:80;portions=17,;task_id=93861214-a83011ef-9f956476-33e4c107; 2024-11-21T17:46:46.497176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=939412a6-a83011ef-80ecbbff-6701a6d1;fline=with_appended.cpp:80;portions=17,;task_id=939412a6-a83011ef-80ecbbff-6701a6d1; 2024-11-21T17:46:46.601121Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=93a2eff6-a83011ef-960e79cd-376e0c66;fline=with_appended.cpp:80;portions=17,;task_id=93a2eff6-a83011ef-960e79cd-376e0c66; 2024-11-21T17:46:46.678119Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=93b62102-a83011ef-86c25efc-4351c889;fline=with_appended.cpp:80;portions=17,;task_id=93b62102-a83011ef-86c25efc-4351c889; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT AdvEngineID, COUNT(*) as c FROM `/Root/benchTable` WHERE AdvEngineID != 0 GROUP BY AdvEngineID ORDER BY c DESC 2024-11-21T17:46:46.743123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2215:3334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:46.743161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2229:3341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:46.743284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:46.754628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:46:46.815652Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;task_id=93d53952-a83011ef-97d0aafb-2bbb9950;fline=with_appended.cpp:80;portions=19,;task_id=93d53952-a83011ef-97d0aafb-2bbb9950; 2024-11-21T17:46:46.843027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=93e5c18c-a83011ef-ab000bac-fb07c5f1;fline=with_appended.cpp:80;portions=19,;task_id=93e5c18c-a83011ef-ab000bac-fb07c5f1; 2024-11-21T17:46:46.866514Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;task_id=93f595ee-a83011ef-83f972f6-550f9bb;fline=with_appended.cpp:80;portions=19,;task_id=93f595ee-a83011ef-83f972f6-550f9bb; 2024-11-21T17:46:46.884720Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;task_id=940157bc-a83011ef-8af0a5fe-9348d089;fline=with_appended.cpp:80;portions=19,;task_id=940157bc-a83011ef-8af0a5fe-9348d089; 2024-11-21T17:46:47.146678Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2234:3344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:46:47.466518Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xatgra6d5ek7aq63jeync, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDQzYTEyZjEtNjgwYzI2ZWMtNTk1NzJmMWQtMWEzZTg1YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT RegionID, SUM(AdvEngineID), COUNT(*) AS c, avg(ResolutionWidth), COUNT(DISTINCT UserID) FROM `/Root/benchTable` GROUP BY RegionID ORDER BY c DESC LIMIT 10 2024-11-21T17:47:00.730675Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xb82f7v3hhxgbfsh2n9d2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTY2Y2RiNWQtMWZkOThhYzYtMzVjZTEyYjAtYjg2NzFlZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:47:10.700859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:47:10.700896Z node 2 :IMPORT WARN: Table profiles were not loaded REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SearchPhrase, count(*) AS c FROM `/Root/benchTable` WHERE SearchPhrase != '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10; 2024-11-21T17:47:17.043813Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xbr3x6s7b0f9h5r0q7d35, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWMwOGZiNS0yYTBmMDNiOS1lYWMzNWJlZC00ZTVkNmM3OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SearchEngineID, SearchPhrase, count(*) AS c FROM `/Root/benchTable` WHERE SearchPhrase != '' GROUP BY SearchEngineID, SearchPhrase ORDER BY c DESC LIMIT 10; 2024-11-21T17:47:31.045554Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xc5s1fzyex3d347zaj89y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODExMGJiNTgtYjA5NjYzMWMtOWEwNTZlNGEtNmNlOGJlOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT SearchPhrase, MIN(URL), MIN(Title), COUNT(*) AS c, COUNT(DISTINCT UserID) FROM `/Root/benchTable` WHERE Title LIKE '%Google%' AND URL NOT LIKE '%.google.%' AND SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10; 2024-11-21T17:47:47.182047Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd7xcnc6eb0330838vxrdrdz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTM1ZTQ0NzYtMTU0MmFhNGMtNDM4ZDZkODUtYTg3ZjMxYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT TraficSourceID, SearchEngineID, AdvEngineID, Src, Dst, COUNT(*) AS PageViews FROM `/Root/benchTable` WHERE CounterID = 62 AND EventDate >= Date('2013-07-01') AND EventDate <= Date('2013-07-31') AND IsRefresh == 0 GROUP BY TraficSourceID, SearchEngineID, AdvEngineID, IF (SearchEngineID = 0 AND AdvEngineID = 0, Referer, '') AS Src, URL AS Dst ORDER BY PageViews DESC LIMIT 10; 2024-11-21T17:48:09.296627Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd7xdb2y3tfms84xxnd3286a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWE3ODA4My0xYTcyNzgyMC01MjVjYzZhLWEzYzQ1ZTFk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit >> KqpQueryService::SessionFromPoolSuccess [GOOD] >> KqpQueryService::SeveralCTAS >> KqpService::SwitchCache+UseCache [GOOD] >> KqpService::SwitchCache-UseCache >> KqpService::RangeCache-UseCache [GOOD] >> YdbOlapStore::LogLast50 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:35.127398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:35.127423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:35.127428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:35.127433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:35.127450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:35.127453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:35.127462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:35.127555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:35.137823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:35.137844Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:35.140907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:35.141583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:35.141608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:35.142709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:35.142839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:35.142928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:35.142980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:35.143989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:35.144218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:35.144240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:35.144275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:35.144282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:35.144289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:35.144301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:35.145575Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:35.160091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:35.160176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:35.160255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:35.160296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:35.160304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:35.161130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:35.161153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:35.161194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:35.161204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:35.161208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:35.161213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:35.161620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:35.161631Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:35.161635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:35.161941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:35.161949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:35.161953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:35.161960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:35.162521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:35.162934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:35.162985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:35.163150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:35.163173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:35.163182Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:35.163229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:35.163236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:35.163264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:35.163275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:35.163686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:35.163693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:35.163732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:35.163738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:35.163803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:35.163809Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:35.163820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:35.163824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:35.163829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:35.163834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:35.163838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:35.163842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:35.163852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:35.163858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:35.163861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:35.164122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:35.164134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:35.164139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:35.164143Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:35.164147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:35.164162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... HARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:48:36.061242Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T17:48:36.061250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 108:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:48:36.061255Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 108, done: 0, blocked: 1 2024-11-21T17:48:36.061266Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 108 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T17:48:36.061298Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 137 -> 129 2024-11-21T17:48:36.061320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:36.061334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:48:36.061737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:48:36.061984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:48:36.062033Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:36.062044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:36.062088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:48:36.062118Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:36.062123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 108, path id: 1 2024-11-21T17:48:36.062128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 108, path id: 4 2024-11-21T17:48:36.062250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:48:36.062258Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:48:36.062278Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:48:36.062282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:48:36.062288Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2024-11-21T17:48:36.062442Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T17:48:36.062455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T17:48:36.062460Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2024-11-21T17:48:36.062465Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2024-11-21T17:48:36.062470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:36.062621Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T17:48:36.062633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T17:48:36.062636Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2024-11-21T17:48:36.062640Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:48:36.062644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:48:36.062656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2024-11-21T17:48:36.063307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:48:36.063321Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:36.063407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:48:36.063434Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2024-11-21T17:48:36.063438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T17:48:36.063443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2024-11-21T17:48:36.063457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:339:2314] message: TxId: 108 2024-11-21T17:48:36.063463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T17:48:36.063468Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2024-11-21T17:48:36.063471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2024-11-21T17:48:36.063493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:48:36.063624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2024-11-21T17:48:36.063995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2024-11-21T17:48:36.064314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T17:48:36.064328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:817:2778] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:48:36.064534Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:48:36.064549Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2024-11-21T17:48:36.086528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 8589936886 } TabletId: 72075186233409546 State: 4 2024-11-21T17:48:36.086572Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:36.086883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:36.086960Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:48:36.087011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:36.087085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409546 2024-11-21T17:48:36.087503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:36.087508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:48:36.087520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:36.088207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:48:36.088238Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:48:36.088503Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2024-11-21T17:48:36.088692Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:36.088749Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 82us result status StatusSuccess 2024-11-21T17:48:36.088859Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_33 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013e8/r3tmp/tmpdb77Ro/pdisk_1.dat 2024-11-21T17:47:18.258635Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:18.269597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:47:18.272157Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30097, node 1 2024-11-21T17:47:18.302933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/0013e8/r3tmp/yandexh7QMNv.tmp 2024-11-21T17:47:18.302942Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/0013e8/r3tmp/yandexh7QMNv.tmp 2024-11-21T17:47:18.302977Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/0013e8/r3tmp/yandexh7QMNv.tmp 2024-11-21T17:47:18.302986Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:18.304987Z INFO: TTestServer started on Port 11988 GrpcPort 30097 TClient is connected to server localhost:11988 2024-11-21T17:47:18.332500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:18.332524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:18.333737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:30097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:18.391023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.393099Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.402305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:18.467781Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.562807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790617033024825:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.562811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790617033024817:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.562824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.563206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790617033024856:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.563219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.563671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:18.565713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790617033024831:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:47:18.591824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.638536Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790617033024968:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:18.638630Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yzk2Y2NmZWItYjE3NjI2YTctOTllYjAwYjgtNDRiMmQzNTQ=, ActorId: [1:7439790617033024814:2304], ActorState: ExecuteState, TraceId: 01jd7xbsn0fzbx82f4yeeywf40, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:18.639073Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:47:18.646966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.659254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439790617033025171:2597] === CheckClustersList. Ok 2024-11-21T17:47:23.805240Z :WriteToTopic_Demo_3 INFO: TTopicSdkTestSetup started 2024-11-21T17:47:23.819458Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:47:23.823300Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439790638507861977:2783] connected; active server actors: 1 2024-11-21T17:47:23.823404Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T17:47:23.823483Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T17:47:23.823519Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T17:47:23.823586Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T17:47:23.824140Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:47:23.824202Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T17:47:23.824349Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:47:23.824378Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T17:47:23.824385Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:47:23.824387Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T17:47:23.824389Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:47:23.824393Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:47:23.824405Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T17:47:23.824411Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T17:47:23.833761Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T17:47:23.833769Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.833776Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790638507861993:2427], now have 1 active actors on pipe 2024-11-21T17:47:23.872690Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.872712Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790638507861976:2782], now have 1 active actors on pipe 2024-11-21T17:47:23.875569Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439790617033024465 RawX2: 4294969488 } TxId: 281474976715673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: t ... partition: 0 SourceId: '\0test-message_group_id' SeqNo: 2 partNo : 0 messageNo: 1 size 16457 offset: -1 2024-11-21T17:48:35.797049Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715679}, 100001} part blob processing sourceId '\0test-message_group_id' seqNo 2 partNo 0 2024-11-21T17:48:35.797092Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715679}, 100001} part blob complete sourceId '\0test-message_group_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 16546 count 1 nextOffset 1 batches 1 2024-11-21T17:48:35.797155Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Add new write blob: topic 'topic_A' partition {0, {9, 281474976715679}, 100001} compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 D0000100001_00000000000000000000_00000_0000000001_00000| size 16536 WTime 1732211315796 2024-11-21T17:48:35.797191Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:48:35.797664Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 16479 2024-11-21T17:48:35.797696Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715679}, 100001} 2024-11-21T17:48:35.797714Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715679}, 100001}, SeqNo: 2, partNo: 0, Offset: 0 is stored on disk 2024-11-21T17:48:35.797718Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 1 requestId: cookie: 2 2024-11-21T17:48:35.797735Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:48:35.797748Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715679}, 100001} user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:48:35.797758Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715679}, 100001} user test-consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T17:48:35.797791Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] read cookie 0 Topic 'topic_A' partition {0, {9, 281474976715679}, 100001} user test-consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T17:48:35.797799Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T17:48:35.797810Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T17:48:35.797815Z node 9 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:48:35.797833Z node 9 :PERSQUEUE DEBUG: Topic 'topic_A' partition {0, {9, 281474976715679}, 100001} user test-consumer readTimeStamp done, result 1732211315796 queuesize 0 startOffset 0 2024-11-21T17:48:35.798092Z :DEBUG: [/Root] SessionId [test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T17:48:35.798137Z :DEBUG: [/Root] SessionId [test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 2 written_in_tx { } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T17:48:35.798146Z :DEBUG: [/Root] SessionId [test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0] PartitionId [0] Generation [1] OnAck: seqNo=2, txId=01jd7xe52eabyqv48xzrxjqb3q, WriteCount=1, AckCount=1 2024-11-21T17:48:35.798403Z :DEBUG: [/Root] SessionId [test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0] PartitionId [0] Generation [1] Write session: acknoledged message 2 2024-11-21T17:48:35.799385Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439790950469457927 RawX2: 4503638282078656 } TxId: 281474976715680 Data { Operations { PartitionId: 0 Path: "/Root/topic_A" SupportivePartition: 100001 } Op: Commit SendingShards: 72075186224037894 ReceivingShards: 72075186224037894 Immediate: true WriteId { NodeId: 9 KeyId: 281474976715679 } } 2024-11-21T17:48:35.799398Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] PartitionId {0, {9, 281474976715679}, 100001} for WriteId {9, 281474976715679} 2024-11-21T17:48:35.799404Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715680 has WriteId {9, 281474976715679} 2024-11-21T17:48:35.799407Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] immediate transaction 2024-11-21T17:48:35.799444Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2024-11-21T17:48:35.799461Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2024-11-21T17:48:35.799469Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=1 2024-11-21T17:48:35.799481Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Head=Offset 0 PartNo 0 PackedSize 16536 count 1 nextOffset 1 batches 1, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2024-11-21T17:48:35.799507Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id' seqNo 2 partNo 0 2024-11-21T17:48:35.799541Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 part blob complete sourceId '\0test-message_group_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 16546 count 1 nextOffset 2 batches 1 2024-11-21T17:48:35.799557Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2024-11-21T17:48:35.799617Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Add new write blob: topic 'topic_A' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 16536 WTime 1732211315799 2024-11-21T17:48:35.799681Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:48:35.800040Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:48:35.800450Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvTransactionCompleted WriteId {9, 281474976715679} 2024-11-21T17:48:35.800456Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvPQ::TEvDeletePartition to partition {0, {9, 281474976715679}, 100001} 2024-11-21T17:48:35.800473Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715679}, 100001}, State: StateIdle] Handle TEvPQ::TEvDeletePartition 2024-11-21T17:48:35.800503Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvLongTxService::TEvLockStatus LockId: 281474976715679 LockNode: 9 Status: STATUS_NOT_FOUND 2024-11-21T17:48:35.800506Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] there is already a transaction TxId 281474976715680 for WriteId {9, 281474976715679} 2024-11-21T17:48:35.801111Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvDeletePartitionDone {0, {9, 281474976715679}, 100001} 2024-11-21T17:48:35.801131Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvUnsubscribeLock for WriteId {9, 281474976715679} 2024-11-21T17:48:35.801136Z node 9 :PERSQUEUE WARN: [PQ: 72075186224037894] Unknown transaction 281474976715680 2024-11-21T17:48:35.801152Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T17:48:35.801352Z :INFO: [/Root] SessionId [test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T17:48:35.801358Z :INFO: [/Root] SessionId [test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T17:48:35.801366Z :DEBUG: [/Root] SessionId [test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T17:48:35.801476Z :INFO: [/Root] SessionId [test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T17:48:35.801485Z :DEBUG: [/Root] SessionId [test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T17:48:35.801608Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:48:35.801800Z node 9 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0 grpc read done: success: 0 data: 2024-11-21T17:48:35.801805Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0 grpc read failed 2024-11-21T17:48:35.801810Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0 grpc closed 2024-11-21T17:48:35.801813Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|8d9368e8-9463dc3c-194c4b2d-aafbc657_0 is DEAD 2024-11-21T17:48:35.802023Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:35.802039Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:35.802042Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:35.802283Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:35.802296Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439790950469457905:2478] destroyed 2024-11-21T17:48:35.802300Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:35.802303Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439790950469457833:2478] destroyed 2024-11-21T17:48:35.802305Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:35.802308Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439790950469457836:2478] destroyed 2024-11-21T17:48:35.802435Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::DropTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:04.842153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:04.842173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:04.842178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:04.842182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:04.842191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:04.842195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:04.842203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:04.842270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:04.851155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:04.851170Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:04.853104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:04.853191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:04.853211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:04.855330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:04.855384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:04.855453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.855597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:04.856159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.856407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:04.856415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.856424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:04.856429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:04.856434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:04.856466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:04.857940Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:04.870616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:04.870672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.870720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:04.870759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:04.870764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.871334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.871353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:04.871379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.871386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:04.871389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:04.871392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:04.871707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.871716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:04.871720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:04.871976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.871982Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.871986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.871990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.872448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:04.872794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:04.872830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:04.872950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.872966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:04.872971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.873008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:04.873013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.873030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:04.873038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:04.873341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:04.873347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:04.873369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.873372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:04.873419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.873424Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:04.873431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:04.873433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.873436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:04.873440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.873442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:04.873445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:04.873452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:04.873456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:04.873459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... Id: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:36.148355Z node 117 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:48:36.148360Z node 117 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T17:48:36.148365Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:48:36.148457Z node 117 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:36.148465Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:36.148468Z node 117 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:48:36.148470Z node 117 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:48:36.148474Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 6 2024-11-21T17:48:36.148479Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T17:48:36.149073Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:36.149083Z node 117 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1004:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:36.149157Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2024-11-21T17:48:36.149188Z node 117 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:48:36.149191Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:48:36.149194Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T17:48:36.149197Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:48:36.149200Z node 117 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:48:36.149202Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:48:36.149223Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:48:36.149398Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:48:36.149410Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:48:36.150993Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 596 RawX2: 502511176180 } TabletId: 72075186233409549 State: 4 2024-11-21T17:48:36.151007Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:36.151057Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 599 RawX2: 502511176181 } TabletId: 72075186233409550 State: 4 2024-11-21T17:48:36.151061Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409550, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:36.151085Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 592 RawX2: 502511176178 } TabletId: 72075186233409548 State: 4 2024-11-21T17:48:36.151090Z node 117 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:36.151406Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:36.151683Z node 117 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T17:48:36.151732Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:48:36.151789Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 Forgetting tablet 72075186233409549 2024-11-21T17:48:36.152441Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:36.152470Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:36.152483Z node 117 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2024-11-21T17:48:36.152516Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T17:48:36.152550Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 Forgetting tablet 72075186233409550 2024-11-21T17:48:36.152620Z node 117 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2024-11-21T17:48:36.153354Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:48:36.153395Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:48:36.153814Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:36.153829Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:48:36.153845Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:36.154077Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:48:36.154088Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:48:36.154382Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T17:48:36.154390Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T17:48:36.154428Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:48:36.154434Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:48:36.154650Z node 117 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:48:36.154708Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:48:36.154715Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:48:36.154771Z node 117 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:48:36.154786Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:48:36.154789Z node 117 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [117:842:2772] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:48:36.154855Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:36.154890Z node 117 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 45us result status StatusPathDoesNotExist 2024-11-21T17:48:36.154924Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted 2024-11-21T17:48:36.154989Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:48:36.155037Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:48:36.155045Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:48:36.155052Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T17:48:36.155059Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T17:48:36.155066Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T17:48:36.155073Z node 117 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 13123, MsgBus: 16819 2024-11-21T17:48:32.392511Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790938195714945:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:32.392544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00126b/r3tmp/tmpCeyHWc/pdisk_1.dat 2024-11-21T17:48:32.567518Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13123, node 1 2024-11-21T17:48:32.596443Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:32.596455Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:32.596457Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:32.596492Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16819 TClient is connected to server localhost:16819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:32.724393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.725763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:32.725779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:48:32.732560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:32.733170Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:32.742291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.808268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:48:32.829445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:32.845092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.908530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790938195716339:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:32.908564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:32.952610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.973567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:32.997626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.013350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.040856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.104189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.128965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790942490684150:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.128992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.129107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790942490684155:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.130003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:33.136540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790942490684157:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } took: 1.094104s took: 1.093120s took: 1.092092s took: 1.095460s took: 1.095376s took: 1.096244s took: 1.096452s took: 1.095283s took: 1.096598s took: 1.096702s Trying to start YDB, gRPC: 26666, MsgBus: 3028 2024-11-21T17:48:34.742923Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790945230533914:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:34.745303Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00126b/r3tmp/tmpQaK9V4/pdisk_1.dat 2024-11-21T17:48:34.757678Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26666, node 2 2024-11-21T17:48:34.767333Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:34.767346Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:34.767348Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:34.767390Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3028 TClient is connected to server localhost:3028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:34.842153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:34.842185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:34.843224Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:34.844981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:34.848846Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:34.857902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.873577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:48:34.901504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.965591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:35.048827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790949525502609:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:35.048850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:35.053383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.059433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.073803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.089730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.104783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.120097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.130942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790949525503111:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:35.130964Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:35.130973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790949525503116:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:35.131641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:35.135513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790949525503118:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } took: 1.072658s took: 1.073303s took: 1.079010s took: 1.079297s took: 1.079568s took: 1.079936s took: 1.080156s took: 1.080525s took: 1.081838s took: 1.081886s >> YdbYqlClient::CreateTableWithPartitionAtKeys [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] Test command err: 2024-11-21T17:48:35.336054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:48:35.336566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:35.336591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001df9/r3tmp/tmp5eiGS8/pdisk_1.dat 2024-11-21T17:48:35.449264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.467662Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:35.515584Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:48:35.515952Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:48:35.516032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:35.516058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:35.528627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:35.642522Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:48:35.642556Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:48:35.642586Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T17:48:35.666648Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:48:35.666971Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:48:35.666990Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:48:35.667048Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:48:35.667116Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:48:35.667135Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:48:35.667224Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:48:35.667724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.668084Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:48:35.668102Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:48:35.685394Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:48:35.685659Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:48:35.685757Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:48:35.685813Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:35.697927Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:48:35.698212Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:35.698250Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:48:35.698440Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:48:35.698468Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:48:35.698475Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:48:35.698536Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:48:35.702541Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:48:35.702666Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:48:35.702706Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:48:35.702712Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:48:35.702718Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:48:35.702724Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:35.702935Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:35.702946Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:35.703136Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:48:35.703164Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:48:35.703180Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:35.703186Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:35.703193Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:48:35.703202Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:35.703210Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:35.703218Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:48:35.703224Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:48:35.703227Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:48:35.703233Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:48:35.703238Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:35.703259Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:48:35.703264Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:48:35.703292Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:48:35.703350Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:48:35.703362Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:48:35.703383Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:48:35.703392Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:48:35.703398Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:48:35.703404Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:48:35.703408Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:48:35.703460Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:48:35.703464Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:48:35.703470Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:48:35.703473Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:48:35.703487Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:48:35.703490Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:48:35.703494Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:48:35.703497Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:48:35.703502Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:48:35.703785Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:48:35.703793Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:35.714192Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:48:35.714229Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:48:35.714237Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:48:35.714251Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... ponse 2024-11-21T17:48:36.719641Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:984:2786], Recipient [1:984:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:36.719644Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:36.719651Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2024-11-21T17:48:36.719655Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:48:36.719660Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037892 for WaitForStreamClearance 2024-11-21T17:48:36.719663Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit WaitForStreamClearance 2024-11-21T17:48:36.719668Z node 1 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715665] at 72075186224037892 2024-11-21T17:48:36.719671Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is Executed 2024-11-21T17:48:36.719675Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037892 executing on unit WaitForStreamClearance 2024-11-21T17:48:36.719678Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037892 to execution unit ReadTableScan 2024-11-21T17:48:36.719682Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit ReadTableScan 2024-11-21T17:48:36.719712Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is Continue 2024-11-21T17:48:36.719716Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:48:36.719719Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2024-11-21T17:48:36.719723Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2024-11-21T17:48:36.719726Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2024-11-21T17:48:36.719733Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2024-11-21T17:48:36.719824Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [1:1084:2865], Recipient [1:930:2740]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715665 ShardId: 72075186224037892 2024-11-21T17:48:36.719831Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037892 2024-11-21T17:48:36.719838Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037892 2024-11-21T17:48:36.719854Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715665, MessageQuota: 1 2024-11-21T17:48:36.719896Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [1:1084:2865], Recipient [1:984:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T17:48:36.719901Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T17:48:36.719938Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037892, TxId: 281474976715665, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:48:36.719958Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [1:1084:2865], Recipient [1:930:2740]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037892 Status: RESPONSE_DATA TxId: 281474976715665 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2024-11-21T17:48:36.719962Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received stream data from ShardId# 72075186224037892 2024-11-21T17:48:36.719966Z node 1 :TX_PROXY TRACE: [ReadTable [1:930:2740] TxId# 281474976715662] Sending TEvStreamDataAck to [1:1084:2865] ShardId# 72075186224037892 2024-11-21T17:48:36.719985Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037892, TxId: 281474976715665, PendingAcks: 0 2024-11-21T17:48:36.719992Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [1:1084:2865], Recipient [1:930:2740]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715665 ShardId: 72075186224037892 2024-11-21T17:48:36.719995Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037892 2024-11-21T17:48:36.720042Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [1:929:2740], Recipient [1:930:2740]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2024-11-21T17:48:36.720046Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2024-11-21T17:48:36.720049Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037892 2024-11-21T17:48:36.720055Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715665, MessageQuota: 1 2024-11-21T17:48:36.720062Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037892, TxId: 281474976715665, MessageQuota: 1 2024-11-21T17:48:36.720080Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [1:1084:2865], Recipient [1:930:2740]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715665 ShardId: 72075186224037892 2024-11-21T17:48:36.720083Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received TEvStreamQuotaRelease from ShardId# 72075186224037892 2024-11-21T17:48:36.720087Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Released quota 1 reserved messages from ShardId# 72075186224037892 2024-11-21T17:48:36.720093Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2024-11-21T17:48:36.720097Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037892 2024-11-21T17:48:36.720120Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:984:2786], Recipient [1:984:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:36.720125Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:36.720132Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2024-11-21T17:48:36.720136Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:48:36.720141Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037892 for ReadTableScan 2024-11-21T17:48:36.720145Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit ReadTableScan 2024-11-21T17:48:36.720150Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715665] at 72075186224037892 error: , IsFatalError: 0 2024-11-21T17:48:36.720155Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is Executed 2024-11-21T17:48:36.720159Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037892 executing on unit ReadTableScan 2024-11-21T17:48:36.720163Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037892 to execution unit FinishPropose 2024-11-21T17:48:36.720167Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit FinishPropose 2024-11-21T17:48:36.720173Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is DelayComplete 2024-11-21T17:48:36.720176Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037892 executing on unit FinishPropose 2024-11-21T17:48:36.720179Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037892 to execution unit CompletedOperations 2024-11-21T17:48:36.720182Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037892 on unit CompletedOperations 2024-11-21T17:48:36.720189Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037892 is Executed 2024-11-21T17:48:36.720192Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037892 executing on unit CompletedOperations 2024-11-21T17:48:36.720195Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037892 has finished 2024-11-21T17:48:36.720198Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:36.720202Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037892 2024-11-21T17:48:36.720205Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2024-11-21T17:48:36.720208Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2024-11-21T17:48:36.720216Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2024-11-21T17:48:36.720220Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037892 on unit FinishPropose 2024-11-21T17:48:36.720225Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037892 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T17:48:36.720254Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2024-11-21T17:48:36.720298Z node 1 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [1:984:2786], Recipient [1:930:2740]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037892 Status: COMPLETE TxId: 281474976715665 Step: 0 OrderId: 281474976715665 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037892 CpuTimeUsec: 49 } } 2024-11-21T17:48:36.720302Z node 1 :TX_PROXY DEBUG: [ReadTable [1:930:2740] TxId# 281474976715662] Received stream complete from ShardId# 72075186224037892 2024-11-21T17:48:36.720319Z node 1 :TX_PROXY INFO: [ReadTable [1:930:2740] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.011192s execute time: 0.430577s total time: 0.441769s 2024-11-21T17:48:36.720428Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [1:930:2740], Recipient [1:846:2676]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2024-11-21T17:48:36.720497Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [1:930:2740], Recipient [1:848:2678]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2024-11-21T17:48:36.720536Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [1:930:2740], Recipient [1:982:2784]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2024-11-21T17:48:36.720562Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [1:930:2740], Recipient [1:984:2786]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 >> YdbYqlClient::ConnectDbAclIsStrictlyChecked >> TTableProfileTests::OverwriteCompactionPolicy >> KqpQueryService::SeveralCTAS [GOOD] >> YdbYqlClient::TestDecimal [GOOD] >> YdbYqlClient::TestDecimal1 |78.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |78.6%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::SeveralCTAS [GOOD] Test command err: Trying to start YDB, gRPC: 13744, MsgBus: 23199 2024-11-21T17:48:33.962968Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790939748456754:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:33.963087Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00125f/r3tmp/tmpU8FwWR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13744, node 1 2024-11-21T17:48:34.018409Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:34.027441Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:34.027455Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:34.027457Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:34.027493Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23199 2024-11-21T17:48:34.063317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:34.063344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:34.064714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:34.089967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:34.092366Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:34.100919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:34.133122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:48:34.173899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:34.201071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.312804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790944043425432:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:34.312834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:34.353743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.366309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.383906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.398846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.410820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.424132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:34.438531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790944043425943:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:34.438564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:34.438984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790944043425948:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:34.439728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:34.442473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790944043425950:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 9512, MsgBus: 62494 2024-11-21T17:48:34.854799Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790947353140897:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:34.856758Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00125f/r3tmp/tmp7PPOoo/pdisk_1.dat 2024-11-21T17:48:34.875902Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9512, node 2 2024-11-21T17:48:34.888820Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:34.888835Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:34.888837Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:34.888878Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62494 TClient is connected to server localhost:62494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:34.954189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:34.954217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:34.955352Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:34.958136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:34.959527Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:34.960485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:34.970619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:34.987889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:34.997713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:35.168442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790951648109590:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:35.168470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:35.173923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.181621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.192805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.199006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.206393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.213509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:35.221322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790951648110092:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:35.221353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790951648110097:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:35.221357Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:35.221977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:35.226445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790951648110099:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 17312, MsgBus: 28155 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00125f/r3tmp/tmpqfFrEF/pdisk_1.dat 2024-11-21T17:48:36.668760Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:36.668951Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17312, node 3 2024-11-21T17:48:36.687482Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:36.687500Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:36.687502Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:36.687545Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28155 2024-11-21T17:48:36.748445Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:36.748480Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:36.751366Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:36.771909Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:36.773464Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:36.994190Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790954970494567:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:36.994249Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:36.994386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790954970494602:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:36.995112Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:48:36.997680Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790954970494604:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:48:37.109997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:1, at schemeshard: 72057594046644480 2024-11-21T17:48:37.177097Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.246863Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] >> KqpQueryService::TableSink_OlapUpdate [GOOD] >> KqpQueryService::TableSink_OlapRWQueries >> YdbYqlClient::ConnectDbAclIsStrictlyChecked [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize [GOOD] >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] Test command err: 2024-11-21T17:47:18.239278Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790620821826093:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:18.239372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00137d/r3tmp/tmp6vXRvA/pdisk_1.dat 2024-11-21T17:47:18.308777Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:18.324777Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27961, node 1 2024-11-21T17:47:18.338196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:18.338221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:18.340963Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/00137d/r3tmp/yandexKnWmd7.tmp 2024-11-21T17:47:18.340993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/00137d/r3tmp/yandexKnWmd7.tmp 2024-11-21T17:47:18.341023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:47:18.341054Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/00137d/r3tmp/yandexKnWmd7.tmp 2024-11-21T17:47:18.341108Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:18.344225Z INFO: TTestServer started on Port 24331 GrpcPort 27961 TClient is connected to server localhost:24331 PQClient connected to localhost:27961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:18.391721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.394154Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.402217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:47:18.588139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790620821826690:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.588162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.588248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790620821826702:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.588863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:18.590642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790620821826704:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:47:18.617790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.623751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.649809Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790620821826907:2329], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:18.649906Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTBiNDg5Y2ItZmU1MGY0MzQtMjEwZDYwM2YtMThmY2Y4M2M=, ActorId: [1:7439790620821826687:2304], ActorState: ExecuteState, TraceId: 01jd7xbsntdrr5kjm18bcxvwnv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:18.650356Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:47:18.682370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439790620821827053:2597] 2024-11-21T17:47:23.238448Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790620821826093:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:23.238495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:23.798374Z :WriteRead INFO: TTopicSdkTestSetup started 2024-11-21T17:47:23.803131Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:47:23.806950Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439790642296663850:2776] connected; active server actors: 1 2024-11-21T17:47:23.807041Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T17:47:23.807130Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T17:47:23.807172Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T17:47:23.807273Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T17:47:23.807828Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:47:23.807906Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T17:47:23.808022Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:47:23.808052Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T17:47:23.808058Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:47:23.808060Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T17:47:23.808062Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:47:23.808066Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:47:23.808073Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T17:47:23.808080Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T17:47:23.817261Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T17:47:23.817296Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.817310Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790642296663869:2427], now have 1 active actors on pipe 2024-11-21T17:47:23.856035Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.856059Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790642296663849:2775], now have 1 active actors on pipe 2024-11-21T17:47:23.856065Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:47:23.858231Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7439790620821826320:2183] txId 281474976710673 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: ... Z node 10 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:48:37.799618Z node 10 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:48:37.799625Z node 10 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [10:7439790959774668970:2443] (SourceId=test-message_group_id, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:48:37.799628Z node 10 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2024-11-21T17:48:37.799876Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 10, Generation: 1 2024-11-21T17:48:37.799884Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:37.799891Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [10:7439790959774668973:2443], now have 1 active actors on pipe 2024-11-21T17:48:37.799961Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T17:48:37.799970Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T17:48:37.800000Z node 10 :PERSQUEUE INFO: new Cookie test-message_group_id|23886981-559aa442-14121b26-442dc69c_0 generated for partition 0 topic 'test-topic' owner test-message_group_id 2024-11-21T17:48:37.800029Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:48:37.800046Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:48:37.800136Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T17:48:37.800486Z :DEBUG: [/Root] SessionId [test-message_group_id|f7fac91d-d429ac34-850e6d2a-5beca30e_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T17:48:37.800141Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T17:48:37.800155Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:48:37.800173Z node 10 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: test-message_group_id|23886981-559aa442-14121b26-442dc69c_0 2024-11-21T17:48:37.800520Z :INFO: [/Root] SessionId [test-message_group_id|f7fac91d-d429ac34-850e6d2a-5beca30e_0] PartitionId [0] Generation [1] Write session established. Init response: session_id: "test-message_group_id|23886981-559aa442-14121b26-442dc69c_0" 2024-11-21T17:48:37.800526Z :TRACE: [/Root] TRACE_EVENT InitResponse partition_id=0 session_id=test-message_group_id|23886981-559aa442-14121b26-442dc69c_0 2024-11-21T17:48:37.800535Z :DEBUG: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] Write session: set DirectWriteToPartitionId 0 2024-11-21T17:48:37.800665Z :DEBUG: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] Write 1 messages with Id from 1 to 1 2024-11-21T17:48:37.800694Z :INFO: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] Write session: close. Timeout 18446744073709.551615s 2024-11-21T17:48:37.800866Z :DEBUG: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:48:37.800880Z :DEBUG: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 1 2024-11-21T17:48:37.804519Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|23886981-559aa442-14121b26-442dc69c_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:48:37.804659Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T17:48:37.804805Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T17:48:37.804819Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T17:48:37.804856Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T17:48:37.804874Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:48:37.804973Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T17:48:37.804978Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T17:48:37.804998Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: test-topic partition: 0 SourceId: '\0test-message_group_id' SeqNo: 1 partNo : 0 messageNo: 1 size 98 offset: -1 2024-11-21T17:48:37.805062Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 part blob processing sourceId '\0test-message_group_id' seqNo 1 partNo 0 2024-11-21T17:48:37.805114Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 part blob complete sourceId '\0test-message_group_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 187 count 1 nextOffset 1 batches 1 2024-11-21T17:48:37.805174Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'test-topic' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 175 WTime 1732211317804 2024-11-21T17:48:37.805198Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:48:37.805770Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 120 2024-11-21T17:48:37.805783Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:48:37.805794Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T17:48:37.805819Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:48:37.805824Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T17:48:37.805840Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T17:48:37.805869Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:48:37.806077Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'test-topic' partition 0 user test-consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T17:48:37.806085Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T17:48:37.806091Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T17:48:37.806096Z node 10 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:48:37.806113Z node 10 :PERSQUEUE DEBUG: Topic 'test-topic' partition 0 user test-consumer readTimeStamp done, result 1732211317804 queuesize 0 startOffset 0 2024-11-21T17:48:37.806217Z :DEBUG: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T17:48:37.806287Z :DEBUG: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T17:48:37.806294Z :DEBUG: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] OnAck: seqNo=1, txId=? 2024-11-21T17:48:37.806298Z :DEBUG: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] Write session: acknoledged message 1 2024-11-21T17:48:37.901003Z :INFO: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T17:48:37.901027Z :DEBUG: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T17:48:37.901247Z :INFO: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T17:48:37.901304Z :DEBUG: [/Root] SessionId [test-message_group_id|23886981-559aa442-14121b26-442dc69c_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T17:48:37.903544Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|23886981-559aa442-14121b26-442dc69c_0 grpc read done: success: 0 data: 2024-11-21T17:48:37.903556Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|23886981-559aa442-14121b26-442dc69c_0 grpc read failed 2024-11-21T17:48:37.903563Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|23886981-559aa442-14121b26-442dc69c_0 grpc closed 2024-11-21T17:48:37.903567Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|23886981-559aa442-14121b26-442dc69c_0 is DEAD 2024-11-21T17:48:37.904615Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:37.905821Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:37.905843Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [10:7439790959774668973:2443] destroyed 2024-11-21T17:48:37.905862Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TKesusTest::TestQuoterResourceCreation >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle [GOOD] >> KqpQueryService::ReadDatashardAndColumnshard >> YdbYqlClient::TestDecimal1 [GOOD] >> YdbYqlClient::TestDecimal35 |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterHDRRParametersValidation >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> BsControllerTest::TestLocalSelfHeal >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand >> KqpQueryService::TableSink_OlapRWQueries [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull >> TTablesWithReboots::AlterCopyWithReboots [GOOD] >> KqpService::SwitchCache-UseCache [GOOD] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapRWQueries [GOOD] Test command err: Trying to start YDB, gRPC: 3701, MsgBus: 5104 2024-11-21T17:48:26.057280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790910683680394:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:26.057344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001294/r3tmp/tmpAPAKIO/pdisk_1.dat 2024-11-21T17:48:26.119576Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3701, node 1 2024-11-21T17:48:26.151417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:26.151432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:26.151434Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:26.151486Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5104 TClient is connected to server localhost:5104 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:48:26.190738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:26.190768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:26.191942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:26.207114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.394873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790910683680853:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.394902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.424745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.436798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790910683681001:2303];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:48:26.436834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790910683681001:2303];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:48:26.436872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790910683681001:2303];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:48:26.436886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790910683681001:2303];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:48:26.436899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790910683681001:2303];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:48:26.436908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790910683681001:2303];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:48:26.436917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790910683681001:2303];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:48:26.436926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790910683681001:2303];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:48:26.436937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790910683681001:2303];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:48:26.436947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790910683681001:2303];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:48:26.436959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790910683681001:2303];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:48:26.436971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790910683681001:2303];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:48:26.437209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790910683681061:2312];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:48:26.437241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790910683681061:2312];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:48:26.437277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790910683681061:2312];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:48:26.437292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790910683681061:2312];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:48:26.437306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790910683681061:2312];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:48:26.437320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790910683681061:2312];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:48:26.437333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790910683681061:2312];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:48:26.437519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790910683681061:2312];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:48:26.437549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790910683681061:2312];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:48:26.437565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790910683681061:2312];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:48:26.437580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790910683681061:2312];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:48:26.437597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439790910683681061:2312];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:48:26.438033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:48:26.438040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:48:26.438051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:48:26.438056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:48:26.438069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:48:26.438076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:48:26.438084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:48:26.438093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:48:26.438117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186 ... .858861Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7439790961830988710:2305];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:48:38.858885Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7439790961830988710:2305];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:48:38.858909Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7439790961830988710:2305];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:48:38.858930Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7439790961830988710:2305];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:48:38.858942Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7439790961830988710:2305];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:48:38.858956Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7439790961830988710:2305];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:48:38.858968Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7439790961830988710:2305];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:48:38.858984Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7439790961830988710:2305];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:48:38.859000Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7439790961830988710:2305];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:48:38.859384Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:48:38.859399Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:48:38.859408Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:48:38.859411Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:48:38.859421Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:48:38.859424Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:48:38.859430Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:48:38.859438Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:48:38.859444Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:48:38.859446Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:48:38.859455Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:48:38.859459Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:48:38.859494Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:48:38.859504Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:48:38.859514Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:48:38.859521Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:48:38.859529Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:48:38.859533Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:48:38.859544Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:48:38.859546Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:48:38.859554Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:48:38.859557Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:48:38.905660Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790961830988817:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:38.905675Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790961830988822:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:38.905687Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:38.906325Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:48:38.907740Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790961830988824:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:48:39.103571Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037890;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 103 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_not_include":2},"id":281474976715661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"103;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715661}]},"p":{"include":2147483647}}]}; 2024-11-21T17:48:39.103571Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 104 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_not_include":2},"id":281474976715661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"104;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715661}]},"p":{"include":2147483647}}]}; 2024-11-21T17:48:39.103665Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=5;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 102 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_not_include":2},"id":281474976715661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"101;"}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"102;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715661}]},"p":{"include":2147483647}}]}; 2024-11-21T17:48:39.103716Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;local_tx_no=6;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 101 ] ;info={"intervals":[{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_not_include":2},"id":281474976715661}],"finishes":[]},"p":{"include":-1}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"101;"}},{"i":{"txs":[{"inc":{"count":2},"id":281474976715661}],"starts":[{"inc":{"count_include":1},"id":281474976715661}],"finishes":[{"inc":{"count_include":1},"id":281474976715661}]},"p":{"include":0,"pk":"102;"}},{"i":{"txs":[],"starts":[],"finishes":[{"inc":{"count_not_include":2},"id":281474976715661}]},"p":{"include":2147483647}}]}; |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings [GOOD] >> YdbYqlClient::CreateTableWithMESettings >> BsControllerTest::TestLocalSelfHeal [GOOD] >> TTableProfileTests::OverwriteCompactionPolicy [GOOD] >> TTableProfileTests::OverwriteExecutionPolicy >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] >> ColumnBuildTest::AlreadyExists >> TxUsage::WriteToTopic_Demo_24 [GOOD] >> YdbYqlClient::TestDecimal35 [GOOD] >> YdbYqlClient::TestDecimalFullStack >> TTablesWithReboots::SimultaneousDropForceDrop [GOOD] >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpService::SwitchCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 9544, MsgBus: 22341 2024-11-21T17:48:33.296745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790939337691262:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:33.296817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001265/r3tmp/tmp4elZwk/pdisk_1.dat 2024-11-21T17:48:33.388272Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9544, node 1 2024-11-21T17:48:33.395422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:33.395451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:33.396569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:33.412154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:33.412164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:33.412165Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:33.412191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22341 TClient is connected to server localhost:22341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:33.505597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:33.508937Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:33.529031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:33.559276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:48:33.581573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.595880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:33.709285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790939337692657:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.709314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.749634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.757115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.765563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.822141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.836529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.849724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:33.917231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790939337693181:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.917273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.917377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790939337693192:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:33.918267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:33.921105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790939337693194:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:34.178612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 took: 2.047482s took: 2.048075s took: 2.049218s took: 2.048663s took: 2.050163s took: 2.050447s took: 2.049564s took: 2.051265s took: 2.050529s took: 2.051278s Trying to start YDB, gRPC: 18117, MsgBus: 12431 2024-11-21T17:48:36.695230Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790952644340383:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:36.700655Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001265/r3tmp/tmpjwr2SM/pdisk_1.dat 2024-11-21T17:48:36.722558Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18117, node 2 2024-11-21T17:48:36.739646Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:36.739660Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:36.739662Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:36.739712Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12431 TClient is connected to server localhost:12431 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:36.798804Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:36.798840Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:36.799304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:36.799841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:48:36.800720Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:36.807912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:36.827321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:36.846601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:36.858781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:37.072147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790956939309097:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:37.072178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:37.079114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.088933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.099461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.122822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.130951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.143968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.158476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790956939309590:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:37.158550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:37.159626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790956939309595:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:37.164456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:37.169072Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:48:37.169195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439790956939309598:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:37.369312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 took: 2.245073s took: 2.246196s took: 2.246974s took: 2.247200s took: 2.247540s took: 2.248179s took: 2.248241s took: 2.248301s took: 2.248463s took: 2.250575s ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2024-11-21T17:48:39.482218Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T17:48:39.482240Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T17:48:39.482298Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T17:48:39.482304Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T17:48:39.482309Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T17:48:39.482312Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T17:48:39.482320Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T17:48:39.482324Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T17:48:39.482330Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T17:48:39.482333Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T17:48:39.482343Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T17:48:39.482349Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T17:48:39.482359Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T17:48:39.482367Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T17:48:39.482373Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T17:48:39.482377Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T17:48:39.482383Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T17:48:39.482387Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T17:48:39.482393Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T17:48:39.482397Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T17:48:39.482403Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T17:48:39.482407Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T17:48:39.482414Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T17:48:39.482417Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T17:48:39.482427Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T17:48:39.482431Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T17:48:39.482437Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T17:48:39.482441Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T17:48:39.482448Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T17:48:39.482452Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T17:48:39.482458Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2024-11-21T17:48:39.482462Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2024-11-21T17:48:39.482467Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2024-11-21T17:48:39.482471Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2024-11-21T17:48:39.482477Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2024-11-21T17:48:39.482481Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2024-11-21T17:48:39.482486Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2024-11-21T17:48:39.482490Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2024-11-21T17:48:39.482498Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2024-11-21T17:48:39.482502Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2024-11-21T17:48:39.482508Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2024-11-21T17:48:39.482512Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2024-11-21T17:48:39.482517Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2024-11-21T17:48:39.482521Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2024-11-21T17:48:39.482526Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2024-11-21T17:48:39.482529Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2024-11-21T17:48:39.482535Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2024-11-21T17:48:39.482539Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2024-11-21T17:48:39.482548Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2024-11-21T17:48:39.482552Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2024-11-21T17:48:39.482558Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2024-11-21T17:48:39.482562Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2024-11-21T17:48:39.482568Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2024-11-21T17:48:39.482573Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2024-11-21T17:48:39.482578Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2024-11-21T17:48:39.482582Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2024-11-21T17:48:39.482588Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2024-11-21T17:48:39.482592Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2024-11-21T17:48:39.482598Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2024-11-21T17:48:39.482602Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2024-11-21T17:48:39.482607Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2024-11-21T17:48:39.482611Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2024-11-21T17:48:39.482616Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2024-11-21T17:48:39.482619Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2024-11-21T17:48:39.482625Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2024-11-21T17:48:39.482629Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2024-11-21T17:48:39.482634Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2024-11-21T17:48:39.482638Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2024-11-21T17:48:39.482643Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2024-11-21T17:48:39.482647Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2024-11-21T17:48:39.482656Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2024-11-21T17:48:39.482659Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2024-11-21T17:48:39.485733Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2024-11-21T17:48:39.486002Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2024-11-21T17:48:39.486011Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2024-11-21T17:48:39.486017Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2024-11-21T17:48:39.486023Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2024-11-21T17:48:39.486029Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2024-11-21T17:48:39.486035Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2024-11-21T17:48:39.486041Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2024-11-21T17:48:39.486047Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2024-11-21T17:48:39.486053Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2024-11-21T17:48:39.486059Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2024-11-21T17:48:39.486065Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2024-11-21T17:48:39.486071Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2024-11-21T17:48:39.486076Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2024-11-21T17:48:39.486084Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2024-11-21T17:48:39.486090Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2024-11-21T17:48:39.486096Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2024-11-21T17:48:39.486102Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2024-11-21T17:48:39.486108Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2024-11-21T17:48:39.486114Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2024-11-21T17:48:39.486119Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2024-11-21T17:48:39.486125Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2024-11-21T17:48:39.486131Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2024-11-21T17:48:39.486136Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2024-11-21T17:48:39.486142Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2024-11-21T17:48:39.486148Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2024-11-21T17:48:39.486154Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2024-11-21T17:48:39.486159Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2024-11-21T17:48:39.486165Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2024-11-21T17:48:39.486171Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2024-11-21T17:48:39.486176Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2024-11-21T17:48:39.486182Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2024-11-21T17:48:39.486188Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2024-11-21T17:48:39.486211Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2024-11-21T17:48:39.486217Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 0.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:39.805081Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000003e:1:1:2:0] -> [8000003e:2:1:2:0] 2024-11-21T17:48:39.805089Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T17:48:39.805093Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000003e:1:2:0:0] -> [8000003e:2:2:0:0] 2024-11-21T17:48:39.805107Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2024-11-21T17:48:39.805135Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] VDiskId# [8000002e:1:2:1:0] -> [8000002e:2:2:1:0] 2024-11-21T17:48:39.805144Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T17:48:39.805148Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] VDiskId# [8000002e:1:0:2:0] -> [8000002e:2:0:2:0] 2024-11-21T17:48:39.805156Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T17:48:39.805161Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] VDiskId# [8000002e:1:1:0:0] -> [8000002e:2:1:0:0] 2024-11-21T17:48:39.805169Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T17:48:39.805174Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] VDiskId# [8000002e:1:1:1:0] -> [8000002e:2:1:1:0] 2024-11-21T17:48:39.805182Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T17:48:39.805186Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] VDiskId# [8000002e:1:0:0:0] -> [8000002e:2:0:0:0] 2024-11-21T17:48:39.805195Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:39.805199Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000002e:2:2:2:0] PDiskId# 1001 VSlotId# 1009 created 2024-11-21T17:48:39.805207Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000002e:2:2:2:0] status changed to INIT_PENDING 2024-11-21T17:48:39.805216Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T17:48:39.805221Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000002e:1:0:1:0] -> [8000002e:2:0:1:0] 2024-11-21T17:48:39.805229Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:39.805233Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000002e:1:1:2:0] -> [8000002e:2:1:2:0] 2024-11-21T17:48:39.805245Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T17:48:39.805249Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000002e:1:2:0:0] -> [8000002e:2:2:0:0] 2024-11-21T17:48:39.805263Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2024-11-21T17:48:39.805267Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] VDiskId# [8000001e:1:2:1:0] -> [8000001e:2:2:1:0] 2024-11-21T17:48:39.805274Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T17:48:39.805279Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] VDiskId# [8000001e:1:0:2:0] -> [8000001e:2:0:2:0] 2024-11-21T17:48:39.805286Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T17:48:39.805290Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] VDiskId# [8000001e:1:1:0:0] -> [8000001e:2:1:0:0] 2024-11-21T17:48:39.805298Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T17:48:39.805302Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] VDiskId# [8000001e:1:1:1:0] -> [8000001e:2:1:1:0] 2024-11-21T17:48:39.805309Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T17:48:39.805314Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] VDiskId# [8000001e:1:0:0:0] -> [8000001e:2:0:0:0] 2024-11-21T17:48:39.805322Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:39.805326Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000001e:2:2:2:0] PDiskId# 1002 VSlotId# 1009 created 2024-11-21T17:48:39.805338Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000001e:2:2:2:0] status changed to INIT_PENDING 2024-11-21T17:48:39.805347Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T17:48:39.805351Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000001e:1:0:1:0] -> [8000001e:2:0:1:0] 2024-11-21T17:48:39.805359Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:39.805366Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000001e:1:1:2:0] -> [8000001e:2:1:2:0] 2024-11-21T17:48:39.805374Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T17:48:39.805378Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000001e:1:2:0:0] -> [8000001e:2:2:0:0] 2024-11-21T17:48:39.805391Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2024-11-21T17:48:39.805396Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] VDiskId# [8000000e:1:2:1:0] -> [8000000e:2:2:1:0] 2024-11-21T17:48:39.805404Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T17:48:39.805408Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] VDiskId# [8000000e:1:0:2:0] -> [8000000e:2:0:2:0] 2024-11-21T17:48:39.805417Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T17:48:39.805422Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] VDiskId# [8000000e:1:1:0:0] -> [8000000e:2:1:0:0] 2024-11-21T17:48:39.805429Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T17:48:39.805434Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] VDiskId# [8000000e:1:1:1:0] -> [8000000e:2:1:1:0] 2024-11-21T17:48:39.805443Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T17:48:39.805447Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] VDiskId# [8000000e:1:0:0:0] -> [8000000e:2:0:0:0] 2024-11-21T17:48:39.805456Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:39.805460Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000000e:2:2:2:0] PDiskId# 1000 VSlotId# 1010 created 2024-11-21T17:48:39.805467Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000000e:2:2:2:0] status changed to INIT_PENDING 2024-11-21T17:48:39.805477Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T17:48:39.805483Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000000e:1:0:1:0] -> [8000000e:2:0:1:0] 2024-11-21T17:48:39.805491Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:39.805496Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000000e:1:1:2:0] -> [8000000e:2:1:2:0] 2024-11-21T17:48:39.805505Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T17:48:39.805510Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000000e:1:2:0:0] -> [8000000e:2:2:0:0] 2024-11-21T17:48:39.805525Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2024-11-21T17:48:39.805530Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] VDiskId# [8000007e:1:2:1:0] -> [8000007e:2:2:1:0] 2024-11-21T17:48:39.805539Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T17:48:39.805545Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] VDiskId# [8000007e:1:0:2:0] -> [8000007e:2:0:2:0] 2024-11-21T17:48:39.805567Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T17:48:39.805572Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] VDiskId# [8000007e:1:1:0:0] -> [8000007e:2:1:0:0] 2024-11-21T17:48:39.805585Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T17:48:39.805590Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] VDiskId# [8000007e:1:1:1:0] -> [8000007e:2:1:1:0] 2024-11-21T17:48:39.805601Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T17:48:39.805606Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] VDiskId# [8000007e:1:0:0:0] -> [8000007e:2:0:0:0] 2024-11-21T17:48:39.805616Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:39.805621Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000007e:2:2:2:0] PDiskId# 1001 VSlotId# 1010 created 2024-11-21T17:48:39.805628Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000007e:2:2:2:0] status changed to INIT_PENDING 2024-11-21T17:48:39.805638Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T17:48:39.805643Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000007e:1:0:1:0] -> [8000007e:2:0:1:0] 2024-11-21T17:48:39.805653Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:39.805660Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000007e:1:1:2:0] -> [8000007e:2:1:2:0] 2024-11-21T17:48:39.805669Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T17:48:39.805675Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000007e:1:2:0:0] -> [8000007e:2:2:0:0] 2024-11-21T17:48:39.806508Z 27 00h05m01.757048s :BS_NODE DEBUG: [27] VDiskId# [8000000e:2:2:2:0] status changed to REPLICATING 2024-11-21T17:48:39.806593Z 27 00h05m01.873048s :BS_NODE DEBUG: [27] VDiskId# [8000004e:2:2:2:0] status changed to REPLICATING 2024-11-21T17:48:39.806683Z 27 00h05m02.668048s :BS_NODE DEBUG: [27] VDiskId# [8000003e:2:2:2:0] status changed to REPLICATING 2024-11-21T17:48:39.806765Z 27 00h05m02.928536s :BS_NODE DEBUG: [27] VDiskId# [8000006e:2:2:2:0] status changed to REPLICATING 2024-11-21T17:48:39.806875Z 27 00h05m03.262048s :BS_NODE DEBUG: [27] VDiskId# [8000005e:2:2:2:0] status changed to REPLICATING 2024-11-21T17:48:39.806973Z 27 00h05m04.173048s :BS_NODE DEBUG: [27] VDiskId# [8000001e:2:2:2:0] status changed to REPLICATING 2024-11-21T17:48:39.807079Z 27 00h05m04.608048s :BS_NODE DEBUG: [27] VDiskId# [8000007e:2:2:2:0] status changed to REPLICATING 2024-11-21T17:48:39.807182Z 27 00h05m04.614048s :BS_NODE DEBUG: [27] VDiskId# [8000002e:2:2:2:0] status changed to REPLICATING 2024-11-21T17:48:39.807519Z 27 00h05m08.106048s :BS_NODE DEBUG: [27] VDiskId# [8000004e:2:2:2:0] status changed to READY 2024-11-21T17:48:39.809415Z 27 00h05m08.106560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:39.809434Z 27 00h05m08.106560s :BS_NODE DEBUG: [27] VDiskId# [8000004e:1:2:2:0] destroyed 2024-11-21T17:48:39.809576Z 27 00h05m10.842048s :BS_NODE DEBUG: [27] VDiskId# [8000003e:2:2:2:0] status changed to READY 2024-11-21T17:48:39.811402Z 27 00h05m10.842560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:39.811420Z 27 00h05m10.842560s :BS_NODE DEBUG: [27] VDiskId# [8000003e:1:2:2:0] destroyed 2024-11-21T17:48:39.811465Z 27 00h05m12.290048s :BS_NODE DEBUG: [27] VDiskId# [8000007e:2:2:2:0] status changed to READY 2024-11-21T17:48:39.813240Z 27 00h05m12.290560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:39.813258Z 27 00h05m12.290560s :BS_NODE DEBUG: [27] VDiskId# [8000007e:1:2:2:0] destroyed 2024-11-21T17:48:39.813293Z 27 00h05m12.666536s :BS_NODE DEBUG: [27] VDiskId# [8000006e:2:2:2:0] status changed to READY 2024-11-21T17:48:39.814938Z 27 00h05m12.667048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:39.814955Z 27 00h05m12.667048s :BS_NODE DEBUG: [27] VDiskId# [8000006e:1:2:2:0] destroyed 2024-11-21T17:48:39.814993Z 27 00h05m13.870048s :BS_NODE DEBUG: [27] VDiskId# [8000001e:2:2:2:0] status changed to READY 2024-11-21T17:48:39.816602Z 27 00h05m13.870560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:39.816617Z 27 00h05m13.870560s :BS_NODE DEBUG: [27] VDiskId# [8000001e:1:2:2:0] destroyed 2024-11-21T17:48:39.816855Z 27 00h05m22.349048s :BS_NODE DEBUG: [27] VDiskId# [8000000e:2:2:2:0] status changed to READY 2024-11-21T17:48:39.818470Z 27 00h05m22.349560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:39.818484Z 27 00h05m22.349560s :BS_NODE DEBUG: [27] VDiskId# [8000000e:1:2:2:0] destroyed 2024-11-21T17:48:39.818567Z 27 00h05m24.044048s :BS_NODE DEBUG: [27] VDiskId# [8000002e:2:2:2:0] status changed to READY 2024-11-21T17:48:39.820159Z 27 00h05m24.044560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:39.820172Z 27 00h05m24.044560s :BS_NODE DEBUG: [27] VDiskId# [8000002e:1:2:2:0] destroyed 2024-11-21T17:48:39.820528Z 27 00h05m31.634048s :BS_NODE DEBUG: [27] VDiskId# [8000005e:2:2:2:0] status changed to READY 2024-11-21T17:48:39.822135Z 27 00h05m31.634560s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:39.822149Z 27 00h05m31.634560s :BS_NODE DEBUG: [27] VDiskId# [8000005e:1:2:2:0] destroyed >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull [GOOD] >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterCopyWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:04.334537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:04.334571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:04.334579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:04.334588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:04.334607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:04.334614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:04.334630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:04.334739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:04.344915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:04.344932Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:04.346527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:04.346592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:04.346614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:04.348798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:04.348868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:04.348952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.349117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:04.349715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.349983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:04.349993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.350006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:04.350012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:04.350019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:04.350055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:04.351234Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:04.368171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:04.368275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.368339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:04.368385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:04.368394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.368948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.368974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:04.369007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.369018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:04.369022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:04.369026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:04.369428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.369439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:04.369444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:04.369764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.369772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.369778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.369785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.370255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:04.370692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:04.370756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:04.370906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.370927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:04.370932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.370998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:04.371006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.371029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:04.371040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:04.371509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:04.371521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:04.371552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.371555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:04.371616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.371622Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:04.371634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:04.371638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.371644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:04.371649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.371652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:04.371654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:04.371664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:04.371668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:04.371671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1006 Step: 5000007 OrderId: 1006 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 166 } } 2024-11-21T17:48:39.509955Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 471 RawX2: 566935685515 } Origin: 72075186233409547 State: 5 TxId: 1006 Step: 0 Generation: 2 2024-11-21T17:48:39.509960Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1006, tablet: 72075186233409547, partId: 0 2024-11-21T17:48:39.509970Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1006:0, at schemeshard: 72057594046678944, message: Source { RawX1: 471 RawX2: 566935685515 } Origin: 72075186233409547 State: 5 TxId: 1006 Step: 0 Generation: 2 2024-11-21T17:48:39.509974Z node 132 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1006:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T17:48:39.510562Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:48:39.510576Z node 132 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1006:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T17:48:39.510582Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1006:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:48:39.510587Z node 132 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1006, done: 0, blocked: 1 2024-11-21T17:48:39.510596Z node 132 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1006:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1006 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T17:48:39.510623Z node 132 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 137 -> 129 2024-11-21T17:48:39.510640Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:39.510649Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:48:39.510717Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:48:39.510766Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:48:39.511023Z node 132 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:39.511031Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:39.511055Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:48:39.511075Z node 132 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:39.511080Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [132:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2024-11-21T17:48:39.511083Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [132:203:2206], at schemeshard: 72057594046678944, txId: 1006, path id: 4 2024-11-21T17:48:39.511175Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:48:39.511181Z node 132 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1006:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:48:39.511194Z node 132 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:48:39.511199Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1006:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:48:39.511203Z node 132 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 129 -> 240 2024-11-21T17:48:39.511303Z node 132 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:48:39.511311Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:48:39.511316Z node 132 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:48:39.511320Z node 132 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T17:48:39.511324Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:48:39.511495Z node 132 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:48:39.511505Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:48:39.511509Z node 132 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:48:39.511513Z node 132 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:48:39.511516Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:48:39.511525Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T17:48:39.511945Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:48:39.511955Z node 132 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1006:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:39.512029Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:48:39.512054Z node 132 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T17:48:39.512058Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:48:39.512062Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: true 2024-11-21T17:48:39.512066Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:48:39.512070Z node 132 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T17:48:39.512074Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T17:48:39.512093Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:48:39.512265Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:48:39.512500Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:48:39.513524Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 471 RawX2: 566935685515 } TabletId: 72075186233409547 State: 4 2024-11-21T17:48:39.513540Z node 132 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:39.513821Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:39.513900Z node 132 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:48:39.513967Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:48:39.514029Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2024-11-21T17:48:39.514596Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:39.514605Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:48:39.514618Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:39.515173Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:48:39.515187Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:48:39.515255Z node 132 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T17:48:39.515310Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T17:48:39.515319Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T17:48:39.515367Z node 132 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T17:48:39.515382Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T17:48:39.515386Z node 132 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [132:709:2673] TestWaitNotification: OK eventTxId 1006 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:48:39.515432Z node 132 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:48:39.515443Z node 132 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> TxUsage::WriteToTopic_Demo_25 >> YdbOlapStore::LogLast50 [GOOD] >> YdbOlapStore::LogGrepNonExisting ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] Test command err: Trying to start YDB, gRPC: 8544, MsgBus: 9003 2024-11-21T17:48:24.147999Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790902028130207:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:24.148154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012c8/r3tmp/tmpkWSlJu/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8544, node 1 2024-11-21T17:48:24.208112Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:24.215673Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:24.215685Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:24.215686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:24.215715Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9003 2024-11-21T17:48:24.247946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:24.247968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:24.249051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:24.287464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.295591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.316223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.347200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.373057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.427567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790902028131732:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.427603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.468891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.475768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.530715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.538522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.547092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.561358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.576815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790902028132253:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.576839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.576958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790902028132258:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.577725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:24.589543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790902028132260:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 25572, MsgBus: 22018 2024-11-21T17:48:26.098060Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790911736539730:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:26.101431Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012c8/r3tmp/tmpL6OzBb/pdisk_1.dat 2024-11-21T17:48:26.118820Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25572, node 2 2024-11-21T17:48:26.126918Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:26.126933Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:26.126935Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:26.126984Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22018 TClient is connected to server localhost:22018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:26.198747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:26.198780Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:26.199840Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:26.203105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.204887Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:26.212437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.229464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.258447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.271069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.414796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790911736541115:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.414818Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.419750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe ... 0;self_id=[3:7439790967814109189:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:48:39.637448Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439790967814109189:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:48:39.637461Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439790967814109189:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:48:39.637473Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439790967814109189:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:48:39.637486Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439790967814109189:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:48:39.637501Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439790967814109189:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:48:39.637515Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439790967814109189:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:48:39.637529Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439790967814109189:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:48:39.637542Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7439790967814109189:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:48:39.639265Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:48:39.639276Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:48:39.639284Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:48:39.639287Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:48:39.639297Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:48:39.639300Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:48:39.639305Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:48:39.639308Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:48:39.639315Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:48:39.639321Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:48:39.639326Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:48:39.639329Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:48:39.639356Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:48:39.639363Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:48:39.639374Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:48:39.639381Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:48:39.639387Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:48:39.639390Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:48:39.639400Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:48:39.639407Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:48:39.639414Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:48:39.639420Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:48:39.639750Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:48:39.639765Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:48:39.639775Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:48:39.639894Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:48:39.639958Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:48:39.639965Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:48:39.639985Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:48:39.639999Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:48:39.640017Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:48:39.640033Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:48:39.640056Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:48:39.640068Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:48:39.640130Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:48:39.640143Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:48:39.640164Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:48:39.640175Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:48:39.640188Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:48:39.640200Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:48:39.640251Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:48:39.640262Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:48:39.640275Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:48:39.640285Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> ColumnBuildTest::ValidDefaultValue >> YdbYqlClient::CreateTableWithMESettings [GOOD] >> ColumnBuildTest::CancelBuild ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::SimultaneousDropForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:03.992122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:03.992143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:03.992147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:03.992151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:03.992160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:03.992163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:03.992170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:03.992254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:04.002078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:04.002093Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:04.003893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:04.003978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:04.004002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:04.006271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:04.006332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:04.006408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.006598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:04.007214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.007418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:04.007427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.007437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:04.007444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:04.007449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:04.007478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:04.009107Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:04.024426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:04.024494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.024540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:04.024580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:04.024587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.025174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.025193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:04.025222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.025229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:04.025233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:04.025238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:04.025572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.025579Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:04.025583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:04.025836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.025841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.025846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.025852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.026466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:04.026812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:04.026854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:04.027022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.027043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:04.027050Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.027093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:04.027099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.027122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:04.027133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:04.027438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:04.027444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:04.027477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.027482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:04.027549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.027555Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:04.027564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:04.027569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.027575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:04.027580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.027584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:04.027587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:04.027596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:04.027601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:04.027605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... wnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:40.189839Z node 139 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:48:40.189842Z node 139 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:48:40.189846Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:48:40.189854Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:48:40.190165Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.190172Z node 139 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:40.190224Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:48:40.190239Z node 139 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:48:40.190241Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:40.190245Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:48:40.190247Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:40.190249Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:48:40.190251Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:48:40.190264Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:48:40.190623Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:40.190647Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:40.191378Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 597000456459 } TabletId: 72075186233409546 State: 4 2024-11-21T17:48:40.191391Z node 139 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:40.191648Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:40.191701Z node 139 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:48:40.191743Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:40.191775Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:48:40.191830Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:40.191835Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:48:40.191844Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 Forgetting tablet 72075186233409546 2024-11-21T17:48:40.192686Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:48:40.192699Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:48:40.192777Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:48:40.192824Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:48:40.192829Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2024-11-21T17:48:40.192840Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:48:40.192844Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:48:40.193290Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpForceDropUnsafe Drop { Id: 3 } } TxId: 1004 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:40.193301Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: TDropForceUnsafe Propose, path: /, pathId: 3, opId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.193305Z node 139 :FLAT_TX_SCHEMESHARD WARN: UNSAFE DELETION IS CALLED. TDropForceUnsafe is UNSAFE operation. Usually it is called for deleting user's DB (tenant). But it could be triggered by administrator for special emergency cases. And there is that case. I hope you are aware of the problems with it. 1: Shared transactions among the tables could be broken if one of the tables is force dropped. Dependent transactions on other tables could be blocked forever. 2: Loans are going to be lost. Force dropped tablets are never return loans. Some tablets would be waiting for borrowed blocks forever. Details: path: /, pathId: 3, opId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.193313Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1004:1, propose status:StatusNameConflict, reason: Check failed: path: '', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp:175, at schemeshard: 72057594046678944 2024-11-21T17:48:40.193408Z node 139 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:48:40.193473Z node 139 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:48:40.193689Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1004, response: Status: StatusNameConflict Reason: "Check failed: path: \'\', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp:175" TxId: 1004 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:40.193700Z node 139 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1004, subject: , status: StatusNameConflict, reason: Check failed: path: '', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp:175, operation: DROP PATH UNSAFE, path: 2024-11-21T17:48:40.193719Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:48:40.193722Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [139:475:2450] 2024-11-21T17:48:40.193737Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:48:40.193739Z node 139 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [139:475:2450] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:48:40.193786Z node 139 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:48:40.193794Z node 139 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T17:48:40.193829Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:40.193846Z node 139 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 19us result status StatusPathDoesNotExist 2024-11-21T17:48:40.193864Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:48:40.193897Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:40.193912Z node 139 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 17us result status StatusSuccess 2024-11-21T17:48:40.193959Z node 139 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced >> YdbYqlClient::TestDecimalFullStack [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithMESettings [GOOD] Test command err: 2024-11-21T17:48:36.478073Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790955445808007:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:36.478749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001202/r3tmp/tmpL8GSOi/pdisk_1.dat 2024-11-21T17:48:36.568028Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4139, node 1 2024-11-21T17:48:36.597960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:36.597990Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:36.598007Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:36.598071Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17811 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:48:36.623204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:36.623240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:36.627238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:36.646142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:36.647204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:36.647215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:36.648207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:48:36.648303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:48:36.648308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:48:36.649179Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:48:36.649189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:48:36.649575Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:36.650748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211316696, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:36.650760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:48:36.650826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:48:36.651411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:36.651470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:36.651481Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:48:36.651494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:48:36.651504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:48:36.651518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T17:48:36.652151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:48:36.652161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:48:36.652166Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:48:36.652178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2024-11-21T17:48:36.653271Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:48:36.829450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:36.829613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:48:36.829840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:36.829848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:36.830798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:48:36.830884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:36.830952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:36.830972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:48:36.831433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:48:36.831450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:48:36.831455Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:48:36.831507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:48:36.831510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:48:36.831511Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:48:36.832877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:48:36.834072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:48:36.834147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:48:36.834169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:48:36.834193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T17:48:36.836384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:48:36.849442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:48:36.849456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:48:36.849543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:48:36.849544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:48:36.849561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:48:36.849562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:48:36.849566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T17:48:36.850063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:48:36.850990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211316899, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:36.850999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211316899 2024-11-21T17:48:36.851028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T17:48:36.851433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:36.851530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:36.851542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:48:36.851972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 720575940466444 ... CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:40.040411Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:40.040438Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:40.043040Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:40.046302Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.046443Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:40.046452Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.048603Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:48:40.048685Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:48:40.048691Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:48:40.064920Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:48:40.064942Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:48:40.068076Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:40.074136Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.084939Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211320126, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:40.084964Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:48:40.085073Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:48:40.091480Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:40.091576Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:40.091595Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:48:40.091620Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:48:40.091634Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:48:40.091670Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:48:40.092171Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:48:40.092182Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:48:40.092187Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:48:40.092204Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:48:40.273614Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.273728Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:48:40.273867Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:40.273879Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.274597Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:48:40.274662Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:40.274727Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:40.274753Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:48:40.274835Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:48:40.274939Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:40.274957Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:40.274968Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:48:40.275008Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:40.275017Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:40.275018Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:48:40.277574Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:48:40.277601Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:48:40.277986Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:48:40.330763Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:48:40.330777Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:48:40.330808Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:48:40.331349Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:48:40.332319Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211320378, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:40.332340Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211320378 2024-11-21T17:48:40.332375Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:48:40.332798Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:40.332898Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:40.332915Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:48:40.333128Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:40.333147Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:40.333152Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:48:40.333192Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:40.333201Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:40.333204Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:48:40.333705Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732211320378 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 434 } } 2024-11-21T17:48:40.333774Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:48:40.333787Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.333792Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T17:48:40.334471Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:48:40.334494Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:48:40.334510Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood >> TTablesWithReboots::CreateTableWithReboots [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute >> TTableProfileTests::OverwriteExecutionPolicy [GOOD] >> TTableProfileTests::OverwritePartitioningPolicy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestDecimalFullStack [GOOD] Test command err: 2024-11-21T17:48:36.689571Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790955450895737:2211];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:36.689625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00121b/r3tmp/tmpiIDvqs/pdisk_1.dat 2024-11-21T17:48:36.780136Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:36.790472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:36.790498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:36.797152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64567, node 1 2024-11-21T17:48:36.823459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:36.823472Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:36.823474Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:36.823510Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:36.851487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:36.852432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:36.852451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:36.853220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:48:36.853287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:48:36.853297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:48:36.854274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:48:36.854285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2024-11-21T17:48:36.854681Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:36.854929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:36.855510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211316899, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:36.855521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:48:36.855575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:48:36.855948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:36.855992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:36.856008Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:48:36.856019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:48:36.856027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:48:36.856040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:48:36.856541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:48:36.856577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:48:36.856586Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:48:36.856602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:48:37.059569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790959745863791:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:37.059615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:37.059749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790959745863803:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:37.060550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.060631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:37.060637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T17:48:37.060650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:37.060653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T17:48:37.060661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:37.060673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:48:37.060724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2024-11-21T17:48:37.060823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:37.060840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:48:37.069619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:48:37.069725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:37.069894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:37.069928Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T17:48:37.070005Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:48:37.070025Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:48:37.070036Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:48:37.070397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:37.070418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:37.070423Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:48:37.070479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:37.070482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:37.070484Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:48:37.070498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:37.070501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:37.070502Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11- ... SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T17:48:40.713530Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:48:40.713539Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:48:40.713541Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T17:48:40.713556Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:48:40.713564Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:48:40.713566Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T17:48:40.713581Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T17:48:40.715124Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211320763, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:40.715145Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211320763, at schemeshard: 72057594046644480 2024-11-21T17:48:40.715186Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T17:48:40.715214Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211320763, at schemeshard: 72057594046644480 2024-11-21T17:48:40.715236Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T17:48:40.715245Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211320763, at schemeshard: 72057594046644480 2024-11-21T17:48:40.715258Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T17:48:40.715272Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732211320763 2024-11-21T17:48:40.715283Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T17:48:40.716128Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:40.716285Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:40.716316Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T17:48:40.716331Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T17:48:40.716378Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T17:48:40.716385Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T17:48:40.716395Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T17:48:40.716404Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T17:48:40.716414Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T17:48:40.716420Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T17:48:40.716425Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:48:40.716437Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T17:48:40.716440Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T17:48:40.716442Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T17:48:40.716447Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T17:48:40.716849Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:48:40.716857Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:48:40.716860Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T17:48:40.716899Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:48:40.716902Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:48:40.716904Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T17:48:40.716915Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:48:40.716917Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:48:40.716918Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:48:40.716931Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:48:40.716933Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:48:40.716935Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T17:48:40.716947Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:48:40.716949Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:48:40.716951Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T17:48:40.716964Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T17:48:40.717715Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439790970087903417:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:48:40.794943Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:48:40.794988Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:48:40.795958Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:48:40.810408Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xe9w6adr2bh4whmm06s7d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NTk1NDk0NTctMzVjYjNiYjYtOGM0YmRmYjQtM2U5MDEyZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:48:40.832570Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xe9zh157g7e6bj2rbb6xc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NTk1NDk0NTctMzVjYjNiYjYtOGM0YmRmYjQtM2U5MDEyZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:48:40.851394Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xea081cx1vmydww662w5x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NTk1NDk0NTctMzVjYjNiYjYtOGM0YmRmYjQtM2U5MDEyZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:48:40.878176Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xea13e9zev2wm2r7qbp66, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NTk1NDk0NTctMzVjYjNiYjYtOGM0YmRmYjQtM2U5MDEyZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:48:40.901703Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xea1n1n4zg30gckm9f1w9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NTk1NDk0NTctMzVjYjNiYjYtOGM0YmRmYjQtM2U5MDEyZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:48:41.005686Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xea2c4116k4z9qzn2exrf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NTk1NDk0NTctMzVjYjNiYjYtOGM0YmRmYjQtM2U5MDEyZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:48:41.007550Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xea2c4116k4z9qzn2exrf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NTk1NDk0NTctMzVjYjNiYjYtOGM0YmRmYjQtM2U5MDEyZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |78.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> ColumnBuildTest::AlreadyExists [GOOD] >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CreateTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:21.531529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:21.531545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:21.531548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:21.531551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:21.531559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:21.531561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:21.531567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:21.531621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:21.540648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:21.540663Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:21.542573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:21.542664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:21.542688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:21.545146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:21.545204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:21.545280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.545432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:21.546132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.546354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:21.546362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.546372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:21.546378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.546384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:21.546419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:21.547503Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:21.563525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:21.563585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.563628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:21.563667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:21.563674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.564182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.564203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:21.564254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.564263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:21.564267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:21.564272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:21.564674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.564686Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:21.564690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:21.565026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.565035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.565041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.565046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.565597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:21.566012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:21.566056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:21.566221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.566243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:21.566249Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.566295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:21.566301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.566321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:21.566331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:21.566715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:21.566723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.566750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.566755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:21.566813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.566819Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:21.566828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:21.566832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.566837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:21.566843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.566847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:21.566851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:21.566861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:21.566866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:21.566870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... EMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:48:41.450127Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:48:41.451129Z node 71 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:41.451136Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:48:41.451156Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:48:41.451181Z node 71 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:41.451185Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [71:205:2208], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2024-11-21T17:48:41.451189Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [71:205:2208], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T17:48:41.451265Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.451270Z node 71 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T17:48:41.451630Z node 71 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:48:41.451642Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:48:41.451646Z node 71 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:48:41.451651Z node 71 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:48:41.451656Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:48:41.451724Z node 71 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:48:41.451732Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:48:41.451740Z node 71 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:48:41.451743Z node 71 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:48:41.451747Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:48:41.451754Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2024-11-21T17:48:41.451908Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 116 } } 2024-11-21T17:48:41.451918Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:41.451938Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 116 } } 2024-11-21T17:48:41.451945Z node 71 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 116 } } 2024-11-21T17:48:41.452006Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 337 RawX2: 304942680341 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T17:48:41.452009Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:41.452015Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 337 RawX2: 304942680341 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T17:48:41.452018Z node 71 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:48:41.452024Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 337 RawX2: 304942680341 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T17:48:41.452031Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:41.452034Z node 71 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.452038Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:48:41.452046Z node 71 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2024-11-21T17:48:41.452813Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:48:41.452835Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:48:41.452917Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.452938Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.452987Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.452993Z node 71 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T17:48:41.453003Z node 71 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:48:41.453007Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:48:41.453011Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2024-11-21T17:48:41.453024Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [71:304:2296] message: TxId: 1002 2024-11-21T17:48:41.453028Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:48:41.453032Z node 71 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:48:41.453035Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:48:41.453051Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:48:41.453472Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:48:41.453479Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [71:305:2297] TestWaitNotification: OK eventTxId 1002 2024-11-21T17:48:41.453546Z node 71 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:41.453573Z node 71 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/Table1" took 31us result status StatusSuccess 2024-11-21T17:48:41.453634Z node 71 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/Table1" PathDescription { Self { Name: "Table1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> LdapAuthProviderTest::LdapServerIsUnavailable >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:40.312383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:40.312405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:40.312409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:40.312413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:40.312419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:40.312421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:40.312428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:40.312496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:40.320508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:40.320528Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:40.323154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:40.323750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:40.323780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:40.326227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:40.326459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:40.326542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:40.326600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:40.327851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:40.328168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:40.328182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:40.328244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:40.328257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:40.328264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:40.328281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.329838Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:40.342931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:40.343017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.343069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:40.343104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:40.343110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.343852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:40.343875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:40.343910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.343927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:40.343930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:40.343934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:40.344283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.344292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:40.344295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:40.344620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.344630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.344635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:40.344643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:40.345081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:40.345436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:40.345480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:40.345604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:40.345621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:40.345629Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:40.345677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:40.345683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:40.345706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:40.345714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:40.346085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:40.346092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:40.346133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:40.346138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:40.346226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.346232Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:40.346241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:40.346245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:40.346251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:40.346255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:40.346260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:40.346263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:40.346275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:40.346281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:40.346285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:40.346525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:40.346535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:40.346538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:40.346541Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:40.346544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:40.346555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:793:2672] TestWaitNotification: OK eventTxId 105 2024-11-21T17:48:41.839167Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2024-11-21T17:48:41.839244Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 80us result status StatusSuccess 2024-11-21T17:48:41.839346Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2024-11-21T17:48:41.839566Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 106 DatabaseName: "/MyRoot/ServerLessDB" Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } 2024-11-21T17:48:41.840174Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T17:48:41.840197Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1145:3020], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:48:41.840224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 106, at schemeshard: 72075186233409549 2024-11-21T17:48:41.840255Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 106, txId# 281474976725757 2024-11-21T17:48:41.840263Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1145:3020], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:48:41.840717Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T17:48:41.840737Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1145:3020], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:48:41.841359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerLessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" Columns { Name: "value" Type: "Uint64" DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: true } } Internal: true } TxId: 281474976725757 TabletId: 72075186233409549 FailOnExist: true , at schemeshard: 72075186233409549 2024-11-21T17:48:41.841402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/ServerLessDB/Table, pathId: , opId: 281474976725757:0, at schemeshard: 72075186233409549 2024-11-21T17:48:41.841465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976725757:1, propose status:StatusInvalidParameter, reason: Cannot alter type for column 'value', at schemeshard: 72075186233409549 2024-11-21T17:48:41.841998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976725757, response: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2024-11-21T17:48:41.842027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976725757, database: /MyRoot/ServerLessDB, subject: , status: StatusInvalidParameter, reason: Cannot alter type for column 'value', operation: ALTER TABLE, path: /MyRoot/ServerLessDB/Table 2024-11-21T17:48:41.842051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976725757, status# StatusInvalidParameter 2024-11-21T17:48:41.842058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2024-11-21T17:48:41.842069Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 106, cookie: 106, txId: 281474976725757, status: StatusInvalidParameter 2024-11-21T17:48:41.842084Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1145:3020], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2024-11-21T17:48:41.842286Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuilder::TTxReply: ReplyOnCreation, BuildIndexId: 106, status: BAD_REQUEST, error: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column 'value', replyTo: [1:1145:3020] 2024-11-21T17:48:41.842353Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Message: TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex [GOOD] Test command err: 2024-11-21T17:48:37.600552Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790959074628014:2058];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:37.600812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001231/r3tmp/tmpERohws/pdisk_1.dat 2024-11-21T17:48:37.706833Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:37.709534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:37.709562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:37.719963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23512, node 1 2024-11-21T17:48:37.787026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:37.787038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:37.787040Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:37.787082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:37.829838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.830673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:37.830680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.832297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:48:37.832352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:48:37.832359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:48:37.832831Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:48:37.832839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2024-11-21T17:48:37.833124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.834041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211317879, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:37.834051Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:48:37.834108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:48:37.834527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:37.834567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:37.834575Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:48:37.834585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:48:37.834592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:48:37.834602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T17:48:37.835169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:48:37.835177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:48:37.835181Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:48:37.835198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T17:48:37.835899Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:37.852074Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:44520 Call 2024-11-21T17:48:37.854264Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:44520 2024-11-21T17:48:38.025941Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:44520 Call Call 2024-11-21T17:48:38.046628Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS INFO: User has no permission to perform query on this database, database: /Root, user: test_user@builtin, from ip: ipv6:[::1]:44520 2024-11-21T17:48:38.060311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:38.060422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:38.060433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:38.060452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:48:38.060482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:48:38.060491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T17:48:38.061242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +(ConnDB):test_user@builtin:- 2024-11-21T17:48:38.061287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:38.061356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:38.061606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:38.061620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:38.061625Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:48:38.061648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 2024-11-21T17:48:38.737849Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790963748761182:2058];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:38.738064Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001231/r3tmp/tmp0yYhk9/pdisk_1.dat 2024-11-21T17:48:38.761704Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31931, node 4 2024-11-21T17:48:38.780486Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:38.780500Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:38.780505Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:38.780557Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:38.838316Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:38.838343Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected ... ile: (empty maybe) 2024-11-21T17:48:40.441019Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:40.518217Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:40.518249Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:40.519799Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:40.521583Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.521686Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:40.521696Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.522216Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:48:40.522281Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:48:40.522289Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:48:40.522757Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:40.522842Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:48:40.522851Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:48:40.523264Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.524210Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211320567, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:40.524221Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:48:40.524290Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:48:40.524732Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:40.524776Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:40.524791Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:48:40.524806Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:48:40.524829Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:48:40.524844Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:48:40.525012Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:48:40.525024Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:48:40.525028Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:48:40.525040Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:26777 2024-11-21T17:48:40.562036Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.562149Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:40.562160Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.562743Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2024-11-21T17:48:40.564662Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211320609, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:40.565089Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:48:40.565104Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T17:48:40.565309Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:48:40.566265Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.566401Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:40.566411Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:48:40.566807Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2024-11-21T17:48:41.073772Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7439790976696542841:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:41.074878Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:41.074903Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:41.075888Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:48:41.076275Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2024-11-21T17:48:41.076573Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:41.100245Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:48:41.100655Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:41.100677Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:48:41.100740Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex, operationId: 281474976715660:1, transaction: WorkingDir: "/Root/ydb_ut_tenant/Table-1" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "MyIndex" KeyColumnNames: "Value" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2024-11-21T17:48:41.100787Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:41.100800Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex/indexImplTable, opId: 281474976715660:2, at schemeshard: 72057594046644480 2024-11-21T17:48:41.100966Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:41.102716Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/ydb_ut_tenant/Table-1 2024-11-21T17:48:41.486848Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211321533, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:41.487448Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:48:41.487491Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T17:48:41.487609Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T17:48:41.545016Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211321590, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:41.550703Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T17:48:41.550742Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2024-11-21T17:48:41.550746Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:2 2024-11-21T17:48:41.552219Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2024-11-21T17:48:41.552366Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapStats::AddRowsSomeTablesInTableStore [GOOD] Test command err: Trying to start YDB, gRPC: 28474, MsgBus: 9284 2024-11-21T17:46:14.114634Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790344162139449:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:14.114785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e4e/r3tmp/tmpw5Fk6P/pdisk_1.dat 2024-11-21T17:46:14.172187Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28474, node 1 2024-11-21T17:46:14.183543Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:14.183558Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:14.183560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:14.183589Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9284 2024-11-21T17:46:14.214916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:14.214942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:14.216010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:14.253140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLESTORE `/Root/TableStoreTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:46:14.413030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790344162140045:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.413074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:14.415907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:46:14.422514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344162140112:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:14.422543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344162140112:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:14.422565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344162140112:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:14.422583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344162140112:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:14.422605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344162140112:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:14.422629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344162140112:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:14.422642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344162140112:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:14.422652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344162140112:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:14.422666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344162140112:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:14.422680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344162140112:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.422696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344162140112:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:14.422716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790344162140112:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:14.423028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:46:14.423037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:46:14.423046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:46:14.423049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:46:14.423058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:46:14.423064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:46:14.423070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:46:14.423073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:46:14.423078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:46:14.423081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:46:14.423084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:46:14.423087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:46:14.423137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:46:14.423146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:46:14.423160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:46:14.423166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:46:14.423173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:46:14.423190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:46:14.423207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:46:14.423215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:46:14.423229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:46:14.423237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; CREATE TABLE ` ... 2TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=55496;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=55496;columns=3; >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood >> TxUsage::WriteToTopic_Demo_15 [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood >> ColumnBuildTest::CancelBuild [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer >> ColumnBuildTest::BaseCase >> LocalPartition::WithoutPartitionPartitionRelocation [GOOD] >> LocalPartition::WithoutPartitionWithSplit >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient >> TxUsage::WriteToTopic_Demo_16 >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:41.116141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:41.116159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:41.116162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:41.116165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:41.116169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:41.116171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:41.116177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:41.116265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:41.125284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:41.125304Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:41.127262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:41.127751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:41.127783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:41.129126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:41.129318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:41.129405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:41.129466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:41.130368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:41.130608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:41.130615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:41.130643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:41.130647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:41.130652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:41.130666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.131741Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:41.144029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:41.144122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.144184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:41.144241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:41.144249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.144946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:41.144969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:41.145012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.145028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:41.145032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:41.145036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:41.145388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.145398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:41.145402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:41.145718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.145727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.145732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:41.145738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:41.146275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:41.146674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:41.146718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:41.146845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:41.146862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:41.146869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:41.146908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:41.146913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:41.146936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:41.146944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:41.147296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:41.147303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:41.147342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:41.147345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:41.147414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:41.147419Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:41.147427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:41.147430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:41.147434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:41.147439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:41.147443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:41.147447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:41.147458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:41.147463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:41.147467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:41.147772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:41.147786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:41.147790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:41.147795Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:41.147799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:41.147813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... usAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:48:42.859478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T17:48:42.859502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:48:42.859533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T17:48:42.859537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:48:42.859540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2024-11-21T17:48:42.859603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:42.859622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:42.859628Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710761:0 HandleReply TEvOperationPlan: step# 5000007 2024-11-21T17:48:42.859633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 128 -> 240 2024-11-21T17:48:42.860028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T17:48:42.860038Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710761:0 ProgressState 2024-11-21T17:48:42.860048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T17:48:42.860051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:48:42.860067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: true 2024-11-21T17:48:42.860076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:121:2147] message: TxId: 281474976710761 2024-11-21T17:48:42.860080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:48:42.860083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T17:48:42.860086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T17:48:42.860098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T17:48:42.860474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T17:48:42.860490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2024-11-21T17:48:42.860499Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfoId: 102 2024-11-21T17:48:42.860509Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1135:2999], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:48:42.860840Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:48:42.860855Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1135:2999], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:48:42.860862Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2024-11-21T17:48:42.861092Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:48:42.861100Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1135:2999], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:48:42.861106Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T17:48:42.861125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:48:42.861129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1159:3023] TestWaitNotification: OK eventTxId 102 2024-11-21T17:48:42.861436Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2024-11-21T17:48:42.861519Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } 2024-11-21T17:48:42.861648Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:42.861688Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 44us result status StatusSuccess 2024-11-21T17:48:42.861782Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "DefaultValue" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ColumnBuildTest::ValidDefaultValue [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD >> BsControllerTest::SelfHealBlock4Plus2 >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> YdbOlapStore::LogGrepNonExisting [GOOD] >> YdbOlapStore::LogGrepExisting >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword >> BsControllerTest::DecommitRejected |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad >> TTableProfileTests::OverwritePartitioningPolicy [GOOD] >> TTableProfileTests::OverwriteCachingPolicy >> BsControllerTest::DecommitRejected [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::ValidDefaultValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:40.875908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:40.875932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:40.875936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:40.875940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:40.875945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:40.875947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:40.875954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:40.876022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:40.886130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:40.886156Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:40.890121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:40.891148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:40.891198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:40.893321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:40.893611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:40.893730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:40.893814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:40.895120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:40.895389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:40.895400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:40.895437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:40.895446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:40.895452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:40.895469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.896969Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:40.918751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:40.918862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.918932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:40.918982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:40.918991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.920036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:40.920067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:40.920119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.920140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:40.920145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:40.920151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:40.920695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.920709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:40.920714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:40.921125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.921136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.921142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:40.921149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:40.921751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:40.922152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:40.922216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:40.922392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:40.922416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:40.922426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:40.922476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:40.922482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:40.922511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:40.922521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:40.922896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:40.922904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:40.922945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:40.922950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:40.923033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.923040Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:40.923051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:40.923055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:40.923061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:40.923066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:40.923070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:40.923075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:40.923087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:40.923093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:40.923097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:40.923370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:40.923382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:40.923386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:40.923391Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:40.923396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:40.923410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3018], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2024-11-21T17:48:43.301673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2024-11-21T17:48:43.301678Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2024-11-21T17:48:43.301683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2024-11-21T17:48:43.301704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2024-11-21T17:48:43.302092Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T17:48:43.302106Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3018], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T17:48:43.302127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2024-11-21T17:48:43.302146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2024-11-21T17:48:43.302175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2024-11-21T17:48:43.302179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2024-11-21T17:48:43.302183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2024-11-21T17:48:43.312860Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1814:3681], Recipient [1:753:2645]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:48:43.312878Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:48:43.366426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2024-11-21T17:48:43.366488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 665 RawX2: 4294969873 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2024-11-21T17:48:43.366502Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2024-11-21T17:48:43.366509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2024-11-21T17:48:43.366993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2024-11-21T17:48:43.367004Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2024-11-21T17:48:43.367017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2024-11-21T17:48:43.367021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2024-11-21T17:48:43.367026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2024-11-21T17:48:43.367038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:569:2509] message: TxId: 281474976725761 2024-11-21T17:48:43.367043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2024-11-21T17:48:43.367047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2024-11-21T17:48:43.367053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2024-11-21T17:48:43.367065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2024-11-21T17:48:43.367560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2024-11-21T17:48:43.367576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2024-11-21T17:48:43.367589Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2024-11-21T17:48:43.367615Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3018], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T17:48:43.368050Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T17:48:43.368067Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3018], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T17:48:43.368075Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T17:48:43.368408Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T17:48:43.368421Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1143:3018], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T17:48:43.368428Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2024-11-21T17:48:43.368447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T17:48:43.368452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1161:3036] TestWaitNotification: OK eventTxId 106 2024-11-21T17:48:43.368749Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2024-11-21T17:48:43.368833Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } } Progress: 100 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:40.436967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:40.436994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:40.436997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:40.437001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:40.437005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:40.437008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:40.437016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:40.437122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:40.445142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:40.445165Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:40.453698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:40.454613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:40.454655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:40.456561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:40.456789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:40.456877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:40.456960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:40.457975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:40.458266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:40.458274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:40.458313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:40.458320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:40.458327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:40.458339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.459516Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:40.474230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:40.474336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.474410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:40.474458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:40.474467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.476520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:40.476554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:40.476609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.476630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:40.476635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:40.476641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:40.477283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.477300Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:40.477306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:40.477718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.477727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.477734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:40.477742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:40.478386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:40.478837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:40.478899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:40.479107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:40.479132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:40.479140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:40.479203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:40.479210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:40.479243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:40.479256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:40.479696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:40.479708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:40.479753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:40.479759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:40.479857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:40.479865Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:40.479877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:40.479882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:40.479888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:40.479893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:40.479898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:40.479902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:40.479915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:40.479921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:40.479925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:40.480224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:40.480254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:40.480259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:40.480273Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:40.480283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:40.480295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... MKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'27))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.254303Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2047:3914], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'28))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.254861Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2048:3915], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'29))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.255397Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2049:3916], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'30))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.255855Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2050:3917], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'31))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.256255Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2051:3918], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'32))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.256747Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2052:3919], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'33))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.257214Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2053:3920], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'34))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.257591Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2054:3921], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'35))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.258006Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2055:3922], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'36))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.258543Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2056:3923], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'37))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.258981Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2057:3924], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'38))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.259350Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2058:3925], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'39))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.259724Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2059:3926], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'40))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.260068Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2060:3927], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'41))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.260616Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2061:3928], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'42))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.261223Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2062:3929], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'43))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.261847Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2063:3930], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'44))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.262460Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2064:3931], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'45))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.263053Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2065:3932], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'46))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.263492Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2066:3933], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'47))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.263882Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2067:3934], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'48))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.264258Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2068:3935], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'49))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2024-11-21T17:48:43.264888Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2069:3936], Recipient [1:748:2640]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'50))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap [GOOD] Test command err: Trying to start YDB, gRPC: 1793, MsgBus: 12588 2024-11-21T17:48:24.018811Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790901710566895:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:24.019055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012d9/r3tmp/tmp9cDjJU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1793, node 1 2024-11-21T17:48:24.070982Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:24.079161Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:24.079173Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:24.079175Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:24.079211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12588 TClient is connected to server localhost:12588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:48:24.118942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:24.118975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:24.120084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:24.131233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.137929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.161372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.190153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.221574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.286463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790901710568427:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.286491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.328815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.338336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.353360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.367171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.378447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.434435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:24.460591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790901710568945:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.460626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.460829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790901710568950:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:24.461709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:24.464395Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T17:48:24.464501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790901710568952:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:48:24.656650Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790901710569245:2458], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: COMMIT not supported inside YDB query, code: 2008 2024-11-21T17:48:24.656742Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2I5YzJlOC02ZjAxMmY3Ni04ZTY5Njc1ZC1jN2I5ZDVmZQ==, ActorId: [1:7439790901710569238:2454], ActorState: ExecuteState, TraceId: 01jd7xdt6c0s4zj1a81whe6jkz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:48:24.659697Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790901710569251:2461], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: ROLLBACK not supported inside YDB query, code: 2008 2024-11-21T17:48:24.659772Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjA3OGYxODAtMWU2ZWZhNWEtNTU4ZWVjMmMtZTMzYjE2YjI=, ActorId: [1:7439790901710569249:2460], ActorState: ExecuteState, TraceId: 01jd7xdt6hfmagg4zf204m8n3x, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 18148, MsgBus: 17949 2024-11-21T17:48:24.893385Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790901483143163:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:24.895830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012d9/r3tmp/tmpTY5ann/pdisk_1.dat 2024-11-21T17:48:24.907948Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18148, node 2 2024-11-21T17:48:24.919049Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:24.919058Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:24.919060Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:24.919107Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17949 TClient is connected to server localhost:17949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:24.993012Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:24.993050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:24.994127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:24.995339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:24.996519Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:25.207538Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439790905778111040:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, i ... iority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=120192;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=120192;columns=2; 2024-11-21T17:48:37.064838Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790956628800343:14374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:37.064873Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:37.064969Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439790956628800348:14377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:37.066112Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T17:48:37.068917Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439790956628800350:14378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T17:48:41.080930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:48:41.080946Z node 3 :IMPORT WARN: Table profiles were not loaded |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2024-11-21T17:48:43.670282Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T17:48:43.670303Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T17:48:43.670342Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T17:48:43.670348Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T17:48:43.670355Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T17:48:43.670359Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T17:48:43.670366Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T17:48:43.670370Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T17:48:43.670376Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T17:48:43.670380Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T17:48:43.670386Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T17:48:43.670390Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T17:48:43.670396Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T17:48:43.670400Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T17:48:43.670406Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T17:48:43.670410Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T17:48:43.670416Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T17:48:43.670420Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T17:48:43.670426Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T17:48:43.670430Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T17:48:43.670436Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T17:48:43.670440Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T17:48:43.670446Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T17:48:43.670450Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T17:48:43.670469Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T17:48:43.670473Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T17:48:43.670481Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T17:48:43.670485Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T17:48:43.670491Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T17:48:43.670495Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T17:48:43.672818Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:508:32] Status# ERROR ClientId# [1:508:32] ServerId# [0:0:0] PipeClient# [1:508:32] 2024-11-21T17:48:43.672936Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:509:20] Status# ERROR ClientId# [2:509:20] ServerId# [0:0:0] PipeClient# [2:509:20] 2024-11-21T17:48:43.672944Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:510:20] Status# ERROR ClientId# [3:510:20] ServerId# [0:0:0] PipeClient# [3:510:20] 2024-11-21T17:48:43.672951Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:511:20] Status# ERROR ClientId# [4:511:20] ServerId# [0:0:0] PipeClient# [4:511:20] 2024-11-21T17:48:43.672957Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:512:20] Status# ERROR ClientId# [5:512:20] ServerId# [0:0:0] PipeClient# [5:512:20] 2024-11-21T17:48:43.672964Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:513:20] Status# ERROR ClientId# [6:513:20] ServerId# [0:0:0] PipeClient# [6:513:20] 2024-11-21T17:48:43.672971Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:514:20] Status# ERROR ClientId# [7:514:20] ServerId# [0:0:0] PipeClient# [7:514:20] 2024-11-21T17:48:43.672977Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:515:20] Status# ERROR ClientId# [8:515:20] ServerId# [0:0:0] PipeClient# [8:515:20] 2024-11-21T17:48:43.672984Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:516:20] Status# ERROR ClientId# [9:516:20] ServerId# [0:0:0] PipeClient# [9:516:20] 2024-11-21T17:48:43.672991Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:517:20] Status# ERROR ClientId# [10:517:20] ServerId# [0:0:0] PipeClient# [10:517:20] 2024-11-21T17:48:43.673000Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:518:20] Status# ERROR ClientId# [11:518:20] ServerId# [0:0:0] PipeClient# [11:518:20] 2024-11-21T17:48:43.673006Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:519:20] Status# ERROR ClientId# [12:519:20] ServerId# [0:0:0] PipeClient# [12:519:20] 2024-11-21T17:48:43.673012Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:520:20] Status# ERROR ClientId# [13:520:20] ServerId# [0:0:0] PipeClient# [13:520:20] 2024-11-21T17:48:43.673019Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:521:20] Status# ERROR ClientId# [14:521:20] ServerId# [0:0:0] PipeClient# [14:521:20] 2024-11-21T17:48:43.673025Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:522:20] Status# ERROR ClientId# [15:522:20] ServerId# [0:0:0] PipeClient# [15:522:20] 2024-11-21T17:48:43.679543Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2024-11-21T17:48:43.679564Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2024-11-21T17:48:43.679572Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2024-11-21T17:48:43.679579Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2024-11-21T17:48:43.679586Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2024-11-21T17:48:43.679592Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2024-11-21T17:48:43.679600Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2024-11-21T17:48:43.679607Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2024-11-21T17:48:43.679614Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2024-11-21T17:48:43.679620Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2024-11-21T17:48:43.679626Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2024-11-21T17:48:43.679633Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2024-11-21T17:48:43.679639Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2024-11-21T17:48:43.679645Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2024-11-21T17:48:43.679652Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2024-11-21T17:48:43.680003Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:574:58] Status# OK ClientId# [1:574:58] ServerId# [1:603:59] PipeClient# [1:574:58] 2024-11-21T17:48:43.680010Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2024-11-21T17:48:43.680462Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:575:21] Status# OK ClientId# [2:575:21] ServerId# [1:604:60] PipeClient# [2:575:21] 2024-11-21T17:48:43.680469Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2024-11-21T17:48:43.680475Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:576:21] Status# OK ClientId# [3:576:21] ServerId# [1:605:61] PipeClient# [3:576:21] 2024-11-21T17:48:43.680479Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2024-11-21T17:48:43.680484Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:577:21] Status# OK ClientId# [4:577:21] ServerId# [1:606:62] PipeClient# [4:577:21] 2024-11-21T17:48:43.680488Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2024-11-21T17:48:43.680493Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:578:21] Status# OK ClientId# [5:578:21] ServerId# [1:607:63] PipeClient# [5:578:21] 2024-11-21T17:48:43.680497Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2024-11-21T17:48:43.680502Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:579:21] Status# OK ClientId# [6:579:21] ServerId# [1:608:64] PipeClient# [6:579:21] 2024-11-21T17:48:43.680506Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2024-11-21T17:48:43.680511Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:580:21] Status# OK ClientId# [7:580:21] ServerId# [1:609:65] PipeClient# [7:580:21] 2024-11-21T17:48:43.680514Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2024-11-21T17:48:43.680522Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:581:21] Status# OK ClientId# [8:581:21] ServerId# [1:610:66] PipeClient# [8:581:21] 2024-11-21T17:48:43.680525Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2024-11-21T17:48:43.680531Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:582:21] Status# OK ClientId# [9:582:21] ServerId# [1:611:67] PipeClient# [9:582:21] 2024-11-21T17:48:43.680535Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2024-11-21T17:48:43.680541Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:583:21] Status# OK ClientId# [10:583:21] ServerId# [1:612:68] PipeClient# [10:583:21] 2024-11-21T17:48:43.680544Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2024-11-21T17:48:43.680549Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:584:21] Status# OK ClientId# [11:584:21] ServerId# [1:613:69] PipeClient# [11:584:21] 2024-11-21T17:48:43.680552Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2024-11-21T17:48:43.680557Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:585:21] Status# OK ClientId# [12:585:21] ServerId# [1:614:70] PipeClient# [12:585:21] 2024-11-21T17:48:43.680561Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2024-11-21T17:48:43.680566Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:586:21] Status# OK ClientId# [13:586:21] ServerId# [1:615:71] PipeClient# [13:586:21] 2024-11-21T17:48:43.680570Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2024-11-21T17:48:43.680579Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:587:21] Status# OK ClientId# [14:587:21] ServerId# [1:616:72] PipeClient# [14:587:21] 2024-11-21T17:48:43.680583Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2024-11-21T17:48:43.680588Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:588:21] Status# OK ClientId# [15:588:21] ServerId# [1:617:73] PipeClient# [15:588:21] 2024-11-21T17:48:43.680591Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2024-11-21T17:48:43.680911Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:43.680921Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T17:48:43.683166Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2024-11-21T17:48:43.683412Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T17:48:43.683421Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T17:48:43.683433Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2024-11-21T17:48:43.683445Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T17:48:43.683450Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T17:48:43.683456Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2024-11-21T17:48:43.683474Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T17:48:43.683479Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T17:48:43.683485Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2024-11-21T17:48:43.683494Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T17:48:43.683499Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T17:48:43.683505Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] status changed to INIT_PENDING 2024-11-21T1 ... },{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T17:48:43.732606Z 3 00h01m18.317512s :BS_NODE DEBUG: [3] VDiskId# [80000001:1:2:2:0] status changed to READY 2024-11-21T17:48:43.732641Z 1 00h01m18.317512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T17:48:43.732659Z 1 00h01m20.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T17:48:43.732667Z 2 00h01m20.394512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to READY 2024-11-21T17:48:43.732697Z 1 00h01m20.394512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T17:48:43.732712Z 1 00h01m21.408512s :BS_NODE DEBUG: [1] VDiskId# [80000001:1:2:0:0] status changed to READY 2024-11-21T17:48:43.732730Z 1 00h01m21.408512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T17:48:43.732743Z 11 00h01m22.459512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to READY 2024-11-21T17:48:43.732765Z 1 00h01m22.459512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T17:48:43.732784Z 12 00h01m26.535512s :BS_NODE DEBUG: [12] VDiskId# [80000001:1:0:2:0] status changed to READY 2024-11-21T17:48:43.732808Z 1 00h01m26.535512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T17:48:43.732822Z 13 00h01m27.548024s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] status changed to READY 2024-11-21T17:48:43.732853Z 1 00h01m27.548024s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T17:48:43.732924Z 7 00h01m27.548536s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2024-11-21T17:48:43.732928Z 7 00h01m27.548536s :BS_NODE DEBUG: [7] VDiskId# [80000000:1:2:0:0] destroyed 2024-11-21T17:48:43.732941Z 13 00h01m28.490512s :BS_NODE DEBUG: [13] VDiskId# [80000001:1:1:0:0] status changed to READY 2024-11-21T17:48:43.732970Z 1 00h01m28.490512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T17:48:43.732983Z 10 00h01m28.816512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2024-11-21T17:48:43.733003Z 1 00h01m28.816512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T17:48:43.733025Z 1 00h01m30.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T17:48:43.733039Z 14 00h01m32.500536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2024-11-21T17:48:43.733069Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483648 2024-11-21T17:48:43.733129Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T17:48:43.733133Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2024-11-21T17:48:43.733156Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T17:48:43.733159Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2024-11-21T17:48:43.733163Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T17:48:43.733166Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2024-11-21T17:48:43.733169Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T17:48:43.733172Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2024-11-21T17:48:43.733175Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T17:48:43.733178Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2024-11-21T17:48:43.733181Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T17:48:43.733183Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2024-11-21T17:48:43.733186Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T17:48:43.733189Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2024-11-21T17:48:43.733192Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T17:48:43.733195Z 1 00h01m32.500536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2024-11-21T17:48:43.733403Z 1 00h01m32.501048s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:43.733409Z 1 00h01m32.501048s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2024-11-21T17:48:43.733449Z 1 00h01m32.501048s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2024-11-21T17:48:43.733452Z 1 00h01m32.501048s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483648 Success# true 2024-11-21T17:48:43.733462Z 8 00h01m32.501048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T17:48:43.733465Z 8 00h01m32.501048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2024-11-21T17:48:43.733472Z 2 00h01m32.501048s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T17:48:43.733476Z 2 00h01m32.501048s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2024-11-21T17:48:43.733482Z 3 00h01m32.501048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T17:48:43.733485Z 3 00h01m32.501048s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2024-11-21T17:48:43.733490Z 4 00h01m32.501048s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T17:48:43.733493Z 4 00h01m32.501048s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2024-11-21T17:48:43.733499Z 5 00h01m32.501048s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T17:48:43.733502Z 5 00h01m32.501048s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2024-11-21T17:48:43.733507Z 6 00h01m32.501048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T17:48:43.733511Z 6 00h01m32.501048s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2024-11-21T17:48:43.733516Z 9 00h01m32.501048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T17:48:43.733521Z 13 00h01m32.501048s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2024-11-21T17:48:43.733524Z 13 00h01m32.501048s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2024-11-21T17:48:43.733529Z 14 00h01m32.501048s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:43.733532Z 14 00h01m32.501048s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2024-11-21T17:48:43.733538Z 15 00h01m32.501048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:43.733541Z 15 00h01m32.501048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2024-11-21T17:48:43.733547Z 15 00h01m32.501048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2024-11-21T17:48:43.733637Z 14 00h01m33.297512s :BS_NODE DEBUG: [14] VDiskId# [80000001:1:1:1:0] status changed to READY 2024-11-21T17:48:43.733742Z 15 00h01m37.082512s :BS_NODE DEBUG: [15] VDiskId# [80000001:1:1:2:0] status changed to READY 2024-11-21T17:48:43.733792Z 15 00h01m38.289048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2024-11-21T17:48:43.734092Z 15 00h02m03.001048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2024-11-21T17:48:43.734187Z 9 00h02m03.001560s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T17:48:43.734192Z 9 00h02m03.001560s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] Test command err: 2024-11-21T17:48:41.570351Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790973795332554:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:41.570571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001799/r3tmp/tmpQqHN4f/pdisk_1.dat 2024-11-21T17:48:41.618495Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1053, node 1 2024-11-21T17:48:41.627295Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:41.627309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:41.627311Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:41.627343Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:41.671380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:41.671408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:41.672473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:41.746771Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:41.748307Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:64093, port: 64093 2024-11-21T17:48:41.748721Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:41.760533Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:41.804447Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:41.804687Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:41.804701Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:41.848422Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:41.892439Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:41.893095Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****yEvQ (19817C67) () has now valid token of ldapuser@ldap 2024-11-21T17:48:42.036791Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790978203633458:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:42.036988Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001799/r3tmp/tmpkK792r/pdisk_1.dat 2024-11-21T17:48:42.045276Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29802, node 2 2024-11-21T17:48:42.055379Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.055393Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.055395Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.055435Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.105824Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.107288Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24167, port: 24167 2024-11-21T17:48:42.107318Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:42.109562Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:42.137244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:42.137271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:42.138390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:42.152588Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****kTlA (3C4899EE) () has now valid token of ldapuser@ldap 2024-11-21T17:48:42.431468Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790980330506442:2197];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001799/r3tmp/tmpxVnlSc/pdisk_1.dat 2024-11-21T17:48:42.435365Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:48:42.445793Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6691, node 3 2024-11-21T17:48:42.454172Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.454189Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.454193Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.454259Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.494114Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.495782Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:19642 ldap://localhost:19642 ldap://localhost:11111, port: 19642 2024-11-21T17:48:42.495818Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:42.514770Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:42.531035Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:42.531081Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:42.532137Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:42.556419Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:42.556708Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:42.556732Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:42.600422Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:42.644455Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:42.644859Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****ALQA (BD126E16) () has now valid token of ldapuser@ldap 2024-11-21T17:48:42.890768Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790981795089729:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:42.890789Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001799/r3tmp/tmpx1gmfa/pdisk_1.dat 2024-11-21T17:48:42.901244Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6690, node 4 2024-11-21T17:48:42.909994Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.910008Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.910010Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.910052Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.978554Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.980527Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:64339, port: 64339 2024-11-21T17:48:42.980561Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:42.991292Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:42.991324Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:42.991454Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2024-11-21T17:48:42.992364Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.032376Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:43.032588Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:43.032605Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T17:48:43.076415Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T17:48:43.124362Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T17:48:43.124653Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****TZFA (97F232C5) () has now valid token of ldapuser@ldap 2024-11-21T17:48:43.349637Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439790982501359428:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.349653Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001799/r3tmp/tmplyF16O/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14657, node 5 2024-11-21T17:48:43.364131Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:43.367834Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.367844Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.367846Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.367874Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.418030Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.419399Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:25334, port: 25334 2024-11-21T17:48:43.419434Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:43.443403Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2024-11-21T17:48:43.450026Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.450055Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.451198Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.488543Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****zdMA (47164438) () has now valid token of ldapuser@ldap 2024-11-21T17:48:43.668634Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439790984062066290:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.668912Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001799/r3tmp/tmpGnVeRZ/pdisk_1.dat 2024-11-21T17:48:43.677903Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6130, node 6 2024-11-21T17:48:43.689646Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.689663Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.689666Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.689718Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.770549Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.770582Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.771628Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.773789Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.775273Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:20146, port: 20146 2024-11-21T17:48:43.775304Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:43.799425Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:20146. Invalid credentials 2024-11-21T17:48:43.799603Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****sCOw (7A274874) () has now permanent error message 'Could not login via LDAP' >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad >> BsControllerTest::SelfHealMirror3dc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] Test command err: 2024-11-21T17:48:42.248366Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790979094504559:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:42.248412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001752/r3tmp/tmpc1I5EN/pdisk_1.dat 2024-11-21T17:48:42.303486Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27456, node 1 2024-11-21T17:48:42.316419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.316433Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.316435Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.316471Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.349384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:42.349431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:42.350518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:42.402672Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.405205Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:20619, port: 20619 2024-11-21T17:48:42.405247Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:42.411008Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2024-11-21T17:48:42.411396Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****khZQ (32D6F30C) () has now retryable error message 'Could not login via LDAP' 2024-11-21T17:48:42.599179Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790981652424401:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:42.599392Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001752/r3tmp/tmpZdg7sX/pdisk_1.dat 2024-11-21T17:48:42.609478Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22551, node 2 2024-11-21T17:48:42.619287Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.619300Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.619302Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.619342Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.665843Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.667400Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****5VQQ (6A209FA8) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:48:42.699532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:42.699567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:42.700651Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:42.938964Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790980022192990:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:42.939318Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001752/r3tmp/tmpJ7KwrN/pdisk_1.dat 2024-11-21T17:48:42.945861Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5944, node 3 2024-11-21T17:48:42.955910Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.955924Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.955926Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.955963Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.982468Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.984757Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****Y2uA (B896125B) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:48:43.040486Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.040518Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.041631Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.248329Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790985199212721:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.248376Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001752/r3tmp/tmptnrfnM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2507, node 4 2024-11-21T17:48:43.262628Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:43.266247Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.266263Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.266265Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.266304Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.329403Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.330906Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****60sg (6955C984) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:48:43.348753Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.348791Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.349840Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.569566Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439790982246137057:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.569845Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001752/r3tmp/tmplnh7lM/pdisk_1.dat 2024-11-21T17:48:43.577384Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20120, node 5 2024-11-21T17:48:43.587465Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.587481Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.587483Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.587516Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.650329Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.651850Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****hBBw (8233786C) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:48:43.669786Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.669822Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.670927Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.905164Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439790984278276567:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.905185Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001752/r3tmp/tmp8X9okf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2824, node 6 2024-11-21T17:48:43.918612Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:43.920875Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.920888Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.920890Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.920924Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.936877Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.938580Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:22062, port: 22062 2024-11-21T17:48:43.938614Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:44.000520Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:44.005608Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:44.005642Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:44.006687Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:44.048708Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****tSbg (9599C0CB) () has now valid token of ldapuser@ldap >> SelfHealActorTest::SingleErrorDisk [GOOD] >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> BsControllerTest::TestLocalBrokenRelocation >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TTablesWithReboots::AlterTableConfigWithReboots [GOOD] |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] Test command err: 2024-11-21T17:48:43.577019Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T17:48:43.577034Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T17:48:43.577067Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T17:48:43.577070Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T17:48:43.577074Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T17:48:43.577076Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T17:48:43.577079Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T17:48:43.577082Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T17:48:43.577085Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T17:48:43.577088Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T17:48:43.577091Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T17:48:43.577094Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T17:48:43.577099Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T17:48:43.577103Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T17:48:43.577108Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T17:48:43.577111Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T17:48:43.577116Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T17:48:43.577119Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T17:48:43.577125Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T17:48:43.577129Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T17:48:43.577135Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T17:48:43.577138Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T17:48:43.577143Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T17:48:43.577148Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T17:48:43.577160Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T17:48:43.577163Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T17:48:43.577166Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T17:48:43.577168Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T17:48:43.577172Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T17:48:43.577175Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T17:48:43.577178Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2024-11-21T17:48:43.577180Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2024-11-21T17:48:43.577183Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2024-11-21T17:48:43.577186Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2024-11-21T17:48:43.577189Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2024-11-21T17:48:43.577191Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2024-11-21T17:48:43.577195Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2024-11-21T17:48:43.577197Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2024-11-21T17:48:43.577202Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2024-11-21T17:48:43.577205Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2024-11-21T17:48:43.577208Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2024-11-21T17:48:43.577211Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2024-11-21T17:48:43.577214Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2024-11-21T17:48:43.577216Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2024-11-21T17:48:43.577219Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2024-11-21T17:48:43.577222Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2024-11-21T17:48:43.577225Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2024-11-21T17:48:43.577228Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2024-11-21T17:48:43.577231Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2024-11-21T17:48:43.577233Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2024-11-21T17:48:43.577237Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2024-11-21T17:48:43.577239Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2024-11-21T17:48:43.577242Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2024-11-21T17:48:43.577246Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2024-11-21T17:48:43.577249Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2024-11-21T17:48:43.577251Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2024-11-21T17:48:43.577255Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2024-11-21T17:48:43.577257Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2024-11-21T17:48:43.577261Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2024-11-21T17:48:43.577263Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2024-11-21T17:48:43.577266Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2024-11-21T17:48:43.577269Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2024-11-21T17:48:43.577272Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2024-11-21T17:48:43.577275Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2024-11-21T17:48:43.578981Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2157:49] Status# ERROR ClientId# [1:2157:49] ServerId# [0:0:0] PipeClient# [1:2157:49] 2024-11-21T17:48:43.579118Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2158:37] Status# ERROR ClientId# [2:2158:37] ServerId# [0:0:0] PipeClient# [2:2158:37] 2024-11-21T17:48:43.579123Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2159:37] Status# ERROR ClientId# [3:2159:37] ServerId# [0:0:0] PipeClient# [3:2159:37] 2024-11-21T17:48:43.579127Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2160:37] Status# ERROR ClientId# [4:2160:37] ServerId# [0:0:0] PipeClient# [4:2160:37] 2024-11-21T17:48:43.579131Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2161:37] Status# ERROR ClientId# [5:2161:37] ServerId# [0:0:0] PipeClient# [5:2161:37] 2024-11-21T17:48:43.579134Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2162:37] Status# ERROR ClientId# [6:2162:37] ServerId# [0:0:0] PipeClient# [6:2162:37] 2024-11-21T17:48:43.579138Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2163:37] Status# ERROR ClientId# [7:2163:37] ServerId# [0:0:0] PipeClient# [7:2163:37] 2024-11-21T17:48:43.579141Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2164:37] Status# ERROR ClientId# [8:2164:37] ServerId# [0:0:0] PipeClient# [8:2164:37] 2024-11-21T17:48:43.579145Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2165:37] Status# ERROR ClientId# [9:2165:37] ServerId# [0:0:0] PipeClient# [9:2165:37] 2024-11-21T17:48:43.579148Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2166:37] Status# ERROR ClientId# [10:2166:37] ServerId# [0:0:0] PipeClient# [10:2166:37] 2024-11-21T17:48:43.579152Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2167:37] Status# ERROR ClientId# [11:2167:37] ServerId# [0:0:0] PipeClient# [11:2167:37] 2024-11-21T17:48:43.579156Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2168:37] Status# ERROR ClientId# [12:2168:37] ServerId# [0:0:0] PipeClient# [12:2168:37] 2024-11-21T17:48:43.579159Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2169:37] Status# ERROR ClientId# [13:2169:37] ServerId# [0:0:0] PipeClient# [13:2169:37] 2024-11-21T17:48:43.579163Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2170:37] Status# ERROR ClientId# [14:2170:37] ServerId# [0:0:0] PipeClient# [14:2170:37] 2024-11-21T17:48:43.579167Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2171:37] Status# ERROR ClientId# [15:2171:37] ServerId# [0:0:0] PipeClient# [15:2171:37] 2024-11-21T17:48:43.579170Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2172:37] Status# ERROR ClientId# [16:2172:37] ServerId# [0:0:0] PipeClient# [16:2172:37] 2024-11-21T17:48:43.579174Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2173:37] Status# ERROR ClientId# [17:2173:37] ServerId# [0:0:0] PipeClient# [17:2173:37] 2024-11-21T17:48:43.579178Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2174:37] Status# ERROR ClientId# [18:2174:37] ServerId# [0:0:0] PipeClient# [18:2174:37] 2024-11-21T17:48:43.579183Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2175:37] Status# ERROR ClientId# [19:2175:37] ServerId# [0:0:0] PipeClient# [19:2175:37] 2024-11-21T17:48:43.579187Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2176:37] Status# ERROR ClientId# [20:2176:37] ServerId# [0:0:0] PipeClient# [20:2176:37] 2024-11-21T17:48:43.579190Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2177:37] Status# ERROR ClientId# [21:2177:37] ServerId# [0:0:0] PipeClient# [21:2177:37] 2024-11-21T17:48:43.579194Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2178:37] Status# ERROR ClientId# [22:2178:37] ServerId# [0:0:0] PipeClient# [22:2178:37] 2024-11-21T17:48:43.579197Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2179:37] Status# ERROR ClientId# [23:2179:37] ServerId# [0:0:0] PipeClient# [23:2179:37] 2024-11-21T17:48:43.579201Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2180:37] Status# ERROR ClientId# [24:2180:37] ServerId# [0:0:0] PipeClient# [24:2180:37] 2024-11-21T17:48:43.579204Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2181:37] Status# ERROR ClientId# [25:2181:37] ServerId# [0:0:0] PipeClient# [25:2181:37] 2024-11-21T17:48:43.579208Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2182:37] Status# ERROR ClientId# [26:2182:37] ServerId# [0:0:0] PipeClient# [26:2182:37] 2024-11-21T17:48:43.579211Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2183:37] Status# ERROR ClientId# [27:2183:37] ServerId# [0:0:0] PipeClient# [27:2183:37] 2024-11-21T17:48:43.579215Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2184:37] Status# ERROR ClientId# [28:2184:37] ServerId# [0:0:0] PipeClient# [28:2184:37] 2024-11-21T17:48:43.579218Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2185:37] Status# ERROR ClientId# [29:2185:37] ServerId# [0:0:0] PipeClient# [29:2185:37] 2024-11-21T17:48:43.579222Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2186:37] Status# ERROR ClientId# [30:2186:37] ServerId# [0:0:0] PipeClient# [30:2186:37] 2024-11-21T17:48:43.579226Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2187:37] Status# ERROR ClientId# [31:2187:37] ServerId# [0:0:0] PipeClient# [31:2187:37] 2024-11-21T17:48:43.579231Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2188:37] Status# ERROR ClientId# [32:2188:37] ServerId# [0:0:0] PipeClient# [32:2188:37] 2024-11-21T17:48:43.597787Z 1 00h00m00.002048s :BS_NODE DEBUG: [1] CheckState from [1:2253:71] expected 1 current 0 2024-11-21T17:48:43.597808Z 2 00h00m00.002048s :BS_NODE DEBUG: [2] CheckState from [2:2254:38] expected 1 current 0 2024-11-21T17:48:43.597814Z 3 00h00m00.002048s :BS_NODE DEBUG: [3] CheckState from [3:2255:38] expected 1 current 0 2024-11-21T17:48:43.597819Z 4 00h00m00.002048s :BS_NODE DEBUG: [4] CheckState from [4:2256:38] expected 1 current 0 2024-11-21T17:48:43.597825Z 5 00h00m00.002048s :BS_NODE DEBUG: [5] CheckState from [5:2257:38] expected 1 current 0 2024-11-21T17:48:43.597830Z 6 00h00m00.002048s :BS_NODE DEBUG: [6] CheckState from [6:2258:38] expected 1 current 0 2024-11-21T17:48:43.597835Z 7 00h00m00.002048s :BS_NODE DEBUG: [7] CheckState from [7:2259:38] expected 1 current 0 2024-11-21T17:48:43.597840Z 8 00h00m00.002048s :BS_NODE DEBUG: [8] CheckState from [8:2260:38] expected 1 current 0 2024-11-21T17:48:43.597845Z 9 00h00m00.002048s :BS_NODE DEBUG: [9] CheckState from [9:2261:38] expected 1 current 0 2024-11-21T17:48:43.597851Z 10 00h00m00.002048s :BS_NODE DEBUG: [10] CheckState from [10:2262 ... SetUpdate 2024-11-21T17:48:44.666323Z 28 05h15m00.117920s :BS_NODE DEBUG: [28] VDiskId# [8000000b:2:0:3:0] -> [8000000b:3:0:3:0] 2024-11-21T17:48:44.666329Z 29 05h15m00.117920s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2024-11-21T17:48:44.666332Z 29 05h15m00.117920s :BS_NODE DEBUG: [29] VDiskId# [8000000b:2:0:4:0] -> [8000000b:3:0:4:0] 2024-11-21T17:48:44.666339Z 31 05h15m00.117920s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2024-11-21T17:48:44.666342Z 31 05h15m00.117920s :BS_NODE DEBUG: [31] VDiskId# [8000000b:2:0:6:0] -> [8000000b:3:0:6:0] 2024-11-21T17:48:44.666347Z 32 05h15m00.117920s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T17:48:44.666350Z 32 05h15m00.117920s :BS_NODE DEBUG: [32] VDiskId# [8000000b:2:0:7:0] -> [8000000b:3:0:7:0] 2024-11-21T17:48:44.666355Z 15 05h15m00.117920s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.666363Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2024-11-21T17:48:44.666366Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] VDiskId# [8000001d:8:0:6:0] -> [8000001d:9:0:6:0] 2024-11-21T17:48:44.666371Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T17:48:44.666375Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] VDiskId# [8000001d:8:0:4:0] -> [8000001d:9:0:4:0] 2024-11-21T17:48:44.666381Z 4 05h15m00.117920s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T17:48:44.666386Z 4 05h15m00.117920s :BS_NODE DEBUG: [4] VDiskId# [8000001d:8:0:2:0] -> [8000001d:9:0:2:0] 2024-11-21T17:48:44.666393Z 9 05h15m00.117920s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T17:48:44.666398Z 9 05h15m00.117920s :BS_NODE DEBUG: [9] VDiskId# [8000001d:8:0:0:0] -> [8000001d:9:0:0:0] 2024-11-21T17:48:44.666406Z 11 05h15m00.117920s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2024-11-21T17:48:44.666409Z 11 05h15m00.117920s :BS_NODE DEBUG: [11] VDiskId# [8000001d:8:0:5:0] -> [8000001d:9:0:5:0] 2024-11-21T17:48:44.666415Z 12 05h15m00.117920s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T17:48:44.666420Z 12 05h15m00.117920s :BS_NODE DEBUG: [12] VDiskId# [8000001d:8:0:3:0] -> [8000001d:9:0:3:0] 2024-11-21T17:48:44.666427Z 14 05h15m00.117920s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:44.666432Z 14 05h15m00.117920s :BS_NODE DEBUG: [14] VDiskId# [8000001d:9:0:1:0] PDiskId# 1001 VSlotId# 1017 created 2024-11-21T17:48:44.666437Z 14 05h15m00.117920s :BS_NODE DEBUG: [14] VDiskId# [8000001d:9:0:1:0] status changed to INIT_PENDING 2024-11-21T17:48:44.666442Z 15 05h15m00.117920s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.666448Z 16 05h15m00.117920s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2024-11-21T17:48:44.666452Z 16 05h15m00.117920s :BS_NODE DEBUG: [16] VDiskId# [8000001d:8:0:7:0] -> [8000001d:9:0:7:0] 2024-11-21T17:48:44.666467Z 7 05h15m00.117920s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2024-11-21T17:48:44.666470Z 7 05h15m00.117920s :BS_NODE DEBUG: [7] VDiskId# [80000033:3:0:5:0] PDiskId# 1000 VSlotId# 1018 created 2024-11-21T17:48:44.666474Z 7 05h15m00.117920s :BS_NODE DEBUG: [7] VDiskId# [80000033:3:0:5:0] status changed to INIT_PENDING 2024-11-21T17:48:44.666479Z 25 05h15m00.117920s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2024-11-21T17:48:44.666483Z 25 05h15m00.117920s :BS_NODE DEBUG: [25] VDiskId# [80000033:2:0:0:0] -> [80000033:3:0:0:0] 2024-11-21T17:48:44.666488Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T17:48:44.666491Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] VDiskId# [80000033:2:0:1:0] -> [80000033:3:0:1:0] 2024-11-21T17:48:44.666496Z 27 05h15m00.117920s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:44.666500Z 27 05h15m00.117920s :BS_NODE DEBUG: [27] VDiskId# [80000033:2:0:2:0] -> [80000033:3:0:2:0] 2024-11-21T17:48:44.666505Z 28 05h15m00.117920s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2024-11-21T17:48:44.666508Z 28 05h15m00.117920s :BS_NODE DEBUG: [28] VDiskId# [80000033:2:0:3:0] -> [80000033:3:0:3:0] 2024-11-21T17:48:44.666513Z 29 05h15m00.117920s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2024-11-21T17:48:44.666516Z 29 05h15m00.117920s :BS_NODE DEBUG: [29] VDiskId# [80000033:2:0:4:0] -> [80000033:3:0:4:0] 2024-11-21T17:48:44.666522Z 31 05h15m00.117920s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2024-11-21T17:48:44.666525Z 31 05h15m00.117920s :BS_NODE DEBUG: [31] VDiskId# [80000033:2:0:6:0] -> [80000033:3:0:6:0] 2024-11-21T17:48:44.666531Z 32 05h15m00.117920s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T17:48:44.666535Z 32 05h15m00.117920s :BS_NODE DEBUG: [32] VDiskId# [80000033:2:0:7:0] -> [80000033:3:0:7:0] 2024-11-21T17:48:44.666539Z 15 05h15m00.117920s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.666547Z 25 05h15m00.117920s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2024-11-21T17:48:44.666550Z 25 05h15m00.117920s :BS_NODE DEBUG: [25] VDiskId# [80000023:2:0:0:0] -> [80000023:3:0:0:0] 2024-11-21T17:48:44.666555Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T17:48:44.666558Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] VDiskId# [80000023:2:0:1:0] -> [80000023:3:0:1:0] 2024-11-21T17:48:44.666564Z 27 05h15m00.117920s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:44.666567Z 27 05h15m00.117920s :BS_NODE DEBUG: [27] VDiskId# [80000023:2:0:2:0] -> [80000023:3:0:2:0] 2024-11-21T17:48:44.666572Z 11 05h15m00.117920s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2024-11-21T17:48:44.666575Z 11 05h15m00.117920s :BS_NODE DEBUG: [11] VDiskId# [80000023:3:0:5:0] PDiskId# 1001 VSlotId# 1018 created 2024-11-21T17:48:44.666580Z 11 05h15m00.117920s :BS_NODE DEBUG: [11] VDiskId# [80000023:3:0:5:0] status changed to INIT_PENDING 2024-11-21T17:48:44.666585Z 28 05h15m00.117920s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2024-11-21T17:48:44.666588Z 28 05h15m00.117920s :BS_NODE DEBUG: [28] VDiskId# [80000023:2:0:3:0] -> [80000023:3:0:3:0] 2024-11-21T17:48:44.666593Z 29 05h15m00.117920s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2024-11-21T17:48:44.666596Z 29 05h15m00.117920s :BS_NODE DEBUG: [29] VDiskId# [80000023:2:0:4:0] -> [80000023:3:0:4:0] 2024-11-21T17:48:44.666602Z 31 05h15m00.117920s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2024-11-21T17:48:44.666605Z 31 05h15m00.117920s :BS_NODE DEBUG: [31] VDiskId# [80000023:2:0:6:0] -> [80000023:3:0:6:0] 2024-11-21T17:48:44.666611Z 32 05h15m00.117920s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T17:48:44.666614Z 32 05h15m00.117920s :BS_NODE DEBUG: [32] VDiskId# [80000023:2:0:7:0] -> [80000023:3:0:7:0] 2024-11-21T17:48:44.666618Z 15 05h15m00.117920s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.666626Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T17:48:44.666629Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] VDiskId# [80000013:3:0:5:0] PDiskId# 1001 VSlotId# 1017 created 2024-11-21T17:48:44.666634Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] VDiskId# [80000013:3:0:5:0] status changed to INIT_PENDING 2024-11-21T17:48:44.666640Z 25 05h15m00.117920s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2024-11-21T17:48:44.666643Z 25 05h15m00.117920s :BS_NODE DEBUG: [25] VDiskId# [80000013:2:0:0:0] -> [80000013:3:0:0:0] 2024-11-21T17:48:44.666652Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T17:48:44.666657Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] VDiskId# [80000013:2:0:1:0] -> [80000013:3:0:1:0] 2024-11-21T17:48:44.666666Z 27 05h15m00.117920s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T17:48:44.666671Z 27 05h15m00.117920s :BS_NODE DEBUG: [27] VDiskId# [80000013:2:0:2:0] -> [80000013:3:0:2:0] 2024-11-21T17:48:44.666679Z 28 05h15m00.117920s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2024-11-21T17:48:44.666682Z 28 05h15m00.117920s :BS_NODE DEBUG: [28] VDiskId# [80000013:2:0:3:0] -> [80000013:3:0:3:0] 2024-11-21T17:48:44.666687Z 29 05h15m00.117920s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2024-11-21T17:48:44.666691Z 29 05h15m00.117920s :BS_NODE DEBUG: [29] VDiskId# [80000013:2:0:4:0] -> [80000013:3:0:4:0] 2024-11-21T17:48:44.666696Z 31 05h15m00.117920s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2024-11-21T17:48:44.666699Z 31 05h15m00.117920s :BS_NODE DEBUG: [31] VDiskId# [80000013:2:0:6:0] -> [80000013:3:0:6:0] 2024-11-21T17:48:44.666704Z 32 05h15m00.117920s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T17:48:44.666707Z 32 05h15m00.117920s :BS_NODE DEBUG: [32] VDiskId# [80000013:2:0:7:0] -> [80000013:3:0:7:0] 2024-11-21T17:48:44.666713Z 15 05h15m00.117920s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.667321Z 17 05h15m02.938920s :BS_NODE DEBUG: [17] VDiskId# [8000001b:3:0:5:0] status changed to REPLICATING 2024-11-21T17:48:44.667376Z 11 05h15m03.208920s :BS_NODE DEBUG: [11] VDiskId# [80000023:3:0:5:0] status changed to REPLICATING 2024-11-21T17:48:44.667409Z 7 05h15m03.221920s :BS_NODE DEBUG: [7] VDiskId# [8000003b:3:0:5:0] status changed to REPLICATING 2024-11-21T17:48:44.667448Z 21 05h15m03.565920s :BS_NODE DEBUG: [21] VDiskId# [8000000b:3:0:5:0] status changed to REPLICATING 2024-11-21T17:48:44.667492Z 21 05h15m04.020920s :BS_NODE DEBUG: [21] VDiskId# [80000013:3:0:5:0] status changed to REPLICATING 2024-11-21T17:48:44.667535Z 14 05h15m04.502920s :BS_NODE DEBUG: [14] VDiskId# [8000001d:9:0:1:0] status changed to REPLICATING 2024-11-21T17:48:44.667566Z 11 05h15m04.983920s :BS_NODE DEBUG: [11] VDiskId# [8000002b:3:0:5:0] status changed to REPLICATING 2024-11-21T17:48:44.667713Z 7 05h15m05.592920s :BS_NODE DEBUG: [7] VDiskId# [80000033:3:0:5:0] status changed to REPLICATING 2024-11-21T17:48:44.667785Z 21 05h15m12.455920s :BS_NODE DEBUG: [21] VDiskId# [80000013:3:0:5:0] status changed to READY 2024-11-21T17:48:44.668687Z 15 05h15m12.456432s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.668704Z 15 05h15m12.456432s :BS_NODE DEBUG: [15] VDiskId# [80000013:2:0:5:0] destroyed 2024-11-21T17:48:44.668834Z 21 05h15m21.591920s :BS_NODE DEBUG: [21] VDiskId# [8000000b:3:0:5:0] status changed to READY 2024-11-21T17:48:44.669713Z 15 05h15m21.592432s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.669725Z 15 05h15m21.592432s :BS_NODE DEBUG: [15] VDiskId# [8000000b:2:0:5:0] destroyed 2024-11-21T17:48:44.669783Z 11 05h15m25.080920s :BS_NODE DEBUG: [11] VDiskId# [80000023:3:0:5:0] status changed to READY 2024-11-21T17:48:44.670700Z 15 05h15m25.081432s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.670712Z 15 05h15m25.081432s :BS_NODE DEBUG: [15] VDiskId# [80000023:2:0:5:0] destroyed 2024-11-21T17:48:44.670737Z 14 05h15m25.641920s :BS_NODE DEBUG: [14] VDiskId# [8000001d:9:0:1:0] status changed to READY 2024-11-21T17:48:44.671552Z 15 05h15m25.642432s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.671561Z 15 05h15m25.642432s :BS_NODE DEBUG: [15] VDiskId# [8000001d:8:0:1:0] destroyed 2024-11-21T17:48:44.671586Z 17 05h15m26.551920s :BS_NODE DEBUG: [17] VDiskId# [8000001b:3:0:5:0] status changed to READY 2024-11-21T17:48:44.672435Z 15 05h15m26.552432s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.672445Z 15 05h15m26.552432s :BS_NODE DEBUG: [15] VDiskId# [8000001b:2:0:5:0] destroyed 2024-11-21T17:48:44.672622Z 11 05h15m33.238920s :BS_NODE DEBUG: [11] VDiskId# [8000002b:3:0:5:0] status changed to READY 2024-11-21T17:48:44.673460Z 15 05h15m33.239432s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.673469Z 15 05h15m33.239432s :BS_NODE DEBUG: [15] VDiskId# [8000002b:2:0:5:0] destroyed 2024-11-21T17:48:44.673488Z 7 05h15m33.702920s :BS_NODE DEBUG: [7] VDiskId# [80000033:3:0:5:0] status changed to READY 2024-11-21T17:48:44.674312Z 15 05h15m33.703432s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.674321Z 15 05h15m33.703432s :BS_NODE DEBUG: [15] VDiskId# [80000033:2:0:5:0] destroyed 2024-11-21T17:48:44.674343Z 7 05h15m34.039920s :BS_NODE DEBUG: [7] VDiskId# [8000003b:3:0:5:0] status changed to READY 2024-11-21T17:48:44.675151Z 15 05h15m34.040432s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:44.675161Z 15 05h15m34.040432s :BS_NODE DEBUG: [15] VDiskId# [8000003b:2:0:5:0] destroyed >> TKesusTest::TestQuoterAccountResourcesBurst >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad >> TKesusTest::TestRegisterProxy |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> TKesusTest::TestKesusConfig >> BasicUsage::ConflictingWrites [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestQuoterResourceDescribe >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> TTableProfileTests::OverwriteCachingPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyType >> BsControllerTest::TestLocalBrokenRelocation [GOOD] >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceModification >> TKesusTest::TestUnregisterProxy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2024-11-21T17:48:42.603041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790979214559695:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:42.603357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00173a/r3tmp/tmpcGtEGB/pdisk_1.dat 2024-11-21T17:48:42.656787Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25313, node 1 2024-11-21T17:48:42.672000Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.672011Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.672014Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.672042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.703831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:42.703860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:42.704998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:42.719011Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.719512Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:5584, port: 5584 2024-11-21T17:48:42.719540Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:42.800518Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:42.844451Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:42.892822Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****5ECA (23A1AC1C) () has now valid token of ldapuser@ldap 2024-11-21T17:48:43.050536Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790985671838992:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.050561Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00173a/r3tmp/tmp8fCqYp/pdisk_1.dat 2024-11-21T17:48:43.061260Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7642, node 2 2024-11-21T17:48:43.068462Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.068474Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.068475Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.068506Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.150739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.150763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.151852Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.155734Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.157055Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:64565, port: 64565 2024-11-21T17:48:43.157079Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:43.220459Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:43.268427Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:43.268607Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:43.268620Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.316433Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.360433Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.360837Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****2uSQ (52EC98CA) () has now valid token of ldapuser@ldap 2024-11-21T17:48:43.512850Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790983833778845:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.512871Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00173a/r3tmp/tmpdpQDJT/pdisk_1.dat 2024-11-21T17:48:43.523579Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21947, node 3 2024-11-21T17:48:43.533149Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.533166Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.533169Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.533217Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.570464Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.572011Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:9138, port: 9138 2024-11-21T17:48:43.572039Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:43.613162Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.613196Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.614265Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.628459Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:43.676637Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****sNFA (50CC584E) () has now valid token of ldapuser@ldap 2024-11-21T17:48:43.821404Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790983319024721:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.821555Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00173a/r3tmp/tmpA3HALq/pdisk_1.dat 2024-11-21T17:48:43.829301Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27048, node 4 2024-11-21T17:48:43.839660Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.839672Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.839673Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.839711Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.903547Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.905117Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://qqq:21191 ldaps://localhost:21191 ldaps://localhost:11111, port: 21191 2024-11-21T17:48:43.905145Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:43.923852Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.923874Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.924922Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.984491Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:44.032411Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:44.032623Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:44.032648Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:44.080479Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:44.128425Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:44.129014Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****_8fQ (B8F1D528) () has now valid token of ldapuser@ldap 2024-11-21T17:48:44.316057Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439790988487480905:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:44.316168Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00173a/r3tmp/tmpfVfOcp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6605, node 5 2024-11-21T17:48:44.331045Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:44.333880Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:44.333894Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:44.333897Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:44.333931Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:44.415411Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:44.415442Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:44.416762Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:44.576166Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:44.578347Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:30108, port: 30108 2024-11-21T17:48:44.578380Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:44.636494Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2024-11-21T17:48:44.680429Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:44.680639Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:44.680658Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T17:48:44.724443Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T17:48:44.768435Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T17:48:44.768817Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****DDUg (1A007838) () has now valid token of ldapuser@ldap 2024-11-21T17:48:45.000472Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439790993535153813:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:45.000489Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00173a/r3tmp/tmpSXJjIk/pdisk_1.dat 2024-11-21T17:48:45.014717Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14755, node 6 2024-11-21T17:48:45.022300Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:45.022313Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:45.022315Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:45.022349Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:45.072399Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:45.073935Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:21651, port: 21651 2024-11-21T17:48:45.073965Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:45.100951Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:45.100986Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:45.102080Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:45.136508Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2024-11-21T17:48:45.136541Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:21651. Bad search filter 2024-11-21T17:48:45.136787Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****qNwg (75F0450D) () has now permanent error message 'Could not login via LDAP' ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterTableConfigWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:21.565980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:21.566001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:21.566005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:21.566010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:21.566019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:21.566023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:21.566031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:21.566116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:21.576473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:21.576492Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:21.578670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:21.578767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:21.578790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:21.581716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:21.581785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:21.581848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.581988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:21.582547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.582742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:21.582748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.582755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:21.582760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.582765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:21.582793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:21.583733Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:21.597187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:21.597247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.597291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:21.597320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:21.597326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.597879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.597898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:21.597940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.597950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:21.597954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:21.597958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:21.598387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.598398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:21.598402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:21.598900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.598924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.598935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.598944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.599739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:21.600425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:21.600490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:21.600740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.600779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:21.600791Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.600868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:21.600875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.600904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:21.600923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:21.601333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:21.601344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.601380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.601387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:21.601460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.601467Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:21.601476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:21.601480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.601485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:21.601489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.601493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:21.601496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:21.601507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:21.601512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:21.601517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... tionId: 1004:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 1004 MinStep: 5000005 MaxStep: 18446744073709551615 PrepareArriveTime: 79000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 46 } } 2024-11-21T17:48:44.724087Z node 86 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#1004:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 1004 MinStep: 5000005 MaxStep: 18446744073709551615 PrepareArriveTime: 79000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 46 } } 2024-11-21T17:48:44.724091Z node 86 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T17:48:44.724107Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 1004:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T17:48:44.724111Z node 86 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 3 -> 128 2024-11-21T17:48:44.724470Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:44.724499Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:44.724505Z node 86 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#1004:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:44.724516Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1004 ready parts: 1/1 2024-11-21T17:48:44.724548Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1004 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:44.724882Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2024-11-21T17:48:44.724906Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1004 at step: 5000005 2024-11-21T17:48:44.724984Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:44.725004Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 369367189608 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:44.725015Z node 86 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#1004:0 HandleReply TEvOperationPlan, operationId: 1004:0, stepId: 5000005, at schemeshard: 72057594046678944 2024-11-21T17:48:44.725083Z node 86 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 129 2024-11-21T17:48:44.725103Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2024-11-21T17:48:44.726057Z node 86 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:44.726066Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:48:44.726124Z node 86 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:44.726130Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [86:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T17:48:44.726209Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:44.726216Z node 86 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T17:48:44.726534Z node 86 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:44.726548Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:44.726553Z node 86 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:48:44.726558Z node 86 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:48:44.726564Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:48:44.726580Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T17:48:44.726776Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 200 } } 2024-11-21T17:48:44.726783Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:44.726798Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 200 } } 2024-11-21T17:48:44.726810Z node 86 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 200 } } 2024-11-21T17:48:44.726886Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 369367189770 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:48:44.726892Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:44.726904Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 369367189770 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:48:44.726909Z node 86 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:48:44.726917Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 369367189770 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:48:44.726926Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:44.726931Z node 86 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:44.726935Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:48:44.726940Z node 86 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T17:48:44.727548Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:48:44.727620Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:44.727638Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:44.727693Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:44.727700Z node 86 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T17:48:44.727712Z node 86 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:48:44.727716Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:48:44.727722Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T17:48:44.727727Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:48:44.727732Z node 86 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:48:44.727736Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:48:44.727755Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:48:44.728205Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:48:44.728213Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:48:44.728297Z node 86 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:48:44.728314Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:48:44.728319Z node 86 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [86:494:2469] TestWaitNotification: OK eventTxId 1004 >> ColumnBuildTest::BaseCase [GOOD] >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2024-11-21T17:48:44.852983Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T17:48:44.853002Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T17:48:44.853035Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T17:48:44.853039Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T17:48:44.853043Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T17:48:44.853046Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T17:48:44.853050Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T17:48:44.853053Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T17:48:44.853057Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T17:48:44.853060Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T17:48:44.853063Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T17:48:44.853066Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T17:48:44.853070Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T17:48:44.853076Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T17:48:44.853080Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T17:48:44.853082Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T17:48:44.853086Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T17:48:44.853089Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T17:48:44.853092Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T17:48:44.853095Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T17:48:44.853099Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T17:48:44.853101Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T17:48:44.853105Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T17:48:44.853108Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T17:48:44.853114Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T17:48:44.853116Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T17:48:44.853120Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T17:48:44.853123Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T17:48:44.853126Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T17:48:44.853129Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T17:48:44.853133Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2024-11-21T17:48:44.853136Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2024-11-21T17:48:44.853139Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2024-11-21T17:48:44.853142Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2024-11-21T17:48:44.853145Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2024-11-21T17:48:44.853147Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2024-11-21T17:48:44.853151Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2024-11-21T17:48:44.853154Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2024-11-21T17:48:44.853160Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2024-11-21T17:48:44.853163Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2024-11-21T17:48:44.853167Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2024-11-21T17:48:44.853170Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2024-11-21T17:48:44.853173Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2024-11-21T17:48:44.853176Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2024-11-21T17:48:44.853179Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2024-11-21T17:48:44.853183Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2024-11-21T17:48:44.853188Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2024-11-21T17:48:44.853192Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2024-11-21T17:48:44.853203Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2024-11-21T17:48:44.853207Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2024-11-21T17:48:44.853212Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2024-11-21T17:48:44.853216Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2024-11-21T17:48:44.853222Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2024-11-21T17:48:44.853228Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2024-11-21T17:48:44.853234Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2024-11-21T17:48:44.853238Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2024-11-21T17:48:44.853244Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2024-11-21T17:48:44.853248Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2024-11-21T17:48:44.853254Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2024-11-21T17:48:44.853258Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2024-11-21T17:48:44.853263Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2024-11-21T17:48:44.853266Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2024-11-21T17:48:44.853272Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2024-11-21T17:48:44.853275Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2024-11-21T17:48:44.853278Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2024-11-21T17:48:44.853281Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2024-11-21T17:48:44.853284Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2024-11-21T17:48:44.853287Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2024-11-21T17:48:44.853292Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2024-11-21T17:48:44.853294Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2024-11-21T17:48:44.853300Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2024-11-21T17:48:44.853303Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2024-11-21T17:48:44.855372Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2024-11-21T17:48:44.855589Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2024-11-21T17:48:44.855596Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2024-11-21T17:48:44.855601Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2024-11-21T17:48:44.855605Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2024-11-21T17:48:44.855609Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2024-11-21T17:48:44.855613Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2024-11-21T17:48:44.855617Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2024-11-21T17:48:44.855620Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2024-11-21T17:48:44.855624Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2024-11-21T17:48:44.855628Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2024-11-21T17:48:44.855632Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2024-11-21T17:48:44.855636Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2024-11-21T17:48:44.855640Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2024-11-21T17:48:44.855646Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2024-11-21T17:48:44.855650Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2024-11-21T17:48:44.855654Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2024-11-21T17:48:44.855658Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2024-11-21T17:48:44.855663Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2024-11-21T17:48:44.855666Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2024-11-21T17:48:44.855670Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2024-11-21T17:48:44.855674Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2024-11-21T17:48:44.855678Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2024-11-21T17:48:44.855682Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2024-11-21T17:48:44.855686Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2024-11-21T17:48:44.855690Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2024-11-21T17:48:44.855694Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2024-11-21T17:48:44.855697Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2024-11-21T17:48:44.855702Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2024-11-21T17:48:44.855706Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2024-11-21T17:48:44.855709Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2024-11-21T17:48:44.855713Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2024-11-21T17:48:44.855717Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2024-11-21T17:48:44.855721Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2024-11-21T17:48:44.855724Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2024-11-21T17:48:45.333919Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2024-11-21T17:48:45.333921Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2024-11-21T17:48:45.333924Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2024-11-21T17:48:45.333927Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2024-11-21T17:48:45.333961Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2024-11-21T17:48:45.333967Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2024-11-21T17:48:45.333971Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2024-11-21T17:48:45.333974Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2024-11-21T17:48:45.333977Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2024-11-21T17:48:45.333980Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2024-11-21T17:48:45.333983Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2024-11-21T17:48:45.333986Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2024-11-21T17:48:45.333989Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2024-11-21T17:48:45.333992Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2024-11-21T17:48:45.333995Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2024-11-21T17:48:45.333998Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2024-11-21T17:48:45.334001Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2024-11-21T17:48:45.334004Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2024-11-21T17:48:45.334007Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2024-11-21T17:48:45.334010Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2024-11-21T17:48:45.334012Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2024-11-21T17:48:45.334041Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2024-11-21T17:48:45.334047Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2024-11-21T17:48:45.334050Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2024-11-21T17:48:45.334053Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2024-11-21T17:48:45.334057Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2024-11-21T17:48:45.334060Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2024-11-21T17:48:45.334063Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2024-11-21T17:48:45.334065Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2024-11-21T17:48:45.334068Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2024-11-21T17:48:45.334071Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2024-11-21T17:48:45.334075Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2024-11-21T17:48:45.334077Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2024-11-21T17:48:45.334114Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2024-11-21T17:48:45.334120Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2024-11-21T17:48:45.334123Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2024-11-21T17:48:45.334127Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2024-11-21T17:48:45.334130Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2024-11-21T17:48:45.334133Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2024-11-21T17:48:45.334136Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2024-11-21T17:48:45.334140Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2024-11-21T17:48:45.334143Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2024-11-21T17:48:45.334667Z 2 01h25m01.545560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2024-11-21T17:48:45.334726Z 10 01h25m01.559560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2024-11-21T17:48:45.334762Z 10 01h25m02.085560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2024-11-21T17:48:45.334798Z 4 01h25m02.481560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2024-11-21T17:48:45.334835Z 8 01h25m02.736560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2024-11-21T17:48:45.334883Z 4 01h25m02.835560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2024-11-21T17:48:45.334927Z 2 01h25m02.952560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2024-11-21T17:48:45.334963Z 10 01h25m03.158560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2024-11-21T17:48:45.334993Z 5 01h25m03.568560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2024-11-21T17:48:45.335031Z 5 01h25m03.721560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2024-11-21T17:48:45.335063Z 7 01h25m04.036560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2024-11-21T17:48:45.335096Z 4 01h25m04.219560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2024-11-21T17:48:45.335128Z 7 01h25m04.478560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2024-11-21T17:48:45.335467Z 7 01h25m05.317560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2024-11-21T17:48:45.335535Z 4 01h25m05.346560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2024-11-21T17:48:45.335586Z 7 01h25m05.527560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2024-11-21T17:48:45.335640Z 10 01h25m08.048560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2024-11-21T17:48:45.337140Z 1 01h25m08.049072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.337153Z 1 01h25m08.049072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2024-11-21T17:48:45.337185Z 10 01h25m08.276560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2024-11-21T17:48:45.338536Z 1 01h25m08.277072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.338547Z 1 01h25m08.277072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed 2024-11-21T17:48:45.338623Z 4 01h25m11.182560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2024-11-21T17:48:45.339895Z 1 01h25m11.183072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.339906Z 1 01h25m11.183072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2024-11-21T17:48:45.339924Z 4 01h25m11.680560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2024-11-21T17:48:45.341225Z 1 01h25m11.681072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.341235Z 1 01h25m11.681072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2024-11-21T17:48:45.341259Z 7 01h25m13.343560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2024-11-21T17:48:45.342531Z 1 01h25m13.344072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.342542Z 1 01h25m13.344072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2024-11-21T17:48:45.342562Z 7 01h25m14.246560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2024-11-21T17:48:45.343815Z 1 01h25m14.247072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.343832Z 1 01h25m14.247072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed 2024-11-21T17:48:45.343962Z 5 01h25m16.125560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2024-11-21T17:48:45.345252Z 1 01h25m16.126072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.345263Z 1 01h25m16.126072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2024-11-21T17:48:45.345285Z 2 01h25m17.716560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2024-11-21T17:48:45.346486Z 1 01h25m17.717072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.346496Z 1 01h25m17.717072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed 2024-11-21T17:48:45.346517Z 5 01h25m19.857560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2024-11-21T17:48:45.347701Z 1 01h25m19.858072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.347713Z 1 01h25m19.858072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed 2024-11-21T17:48:45.347785Z 4 01h25m20.213560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2024-11-21T17:48:45.349000Z 1 01h25m20.214072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.349010Z 1 01h25m20.214072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed 2024-11-21T17:48:45.349033Z 10 01h25m22.713560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2024-11-21T17:48:45.350230Z 1 01h25m22.714072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.350254Z 1 01h25m22.714072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2024-11-21T17:48:45.350335Z 8 01h25m23.302560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2024-11-21T17:48:45.351521Z 1 01h25m23.303072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.351531Z 1 01h25m23.303072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2024-11-21T17:48:45.351606Z 7 01h25m26.083560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2024-11-21T17:48:45.352804Z 1 01h25m26.084072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.352816Z 1 01h25m26.084072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed 2024-11-21T17:48:45.352940Z 2 01h25m29.305560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2024-11-21T17:48:45.354114Z 1 01h25m29.306072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.354123Z 1 01h25m29.306072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed 2024-11-21T17:48:45.354142Z 7 01h25m29.961560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2024-11-21T17:48:45.355335Z 1 01h25m29.962072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.355345Z 1 01h25m29.962072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed 2024-11-21T17:48:45.355483Z 4 01h25m30.008560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2024-11-21T17:48:45.356685Z 1 01h25m30.009072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T17:48:45.356697Z 1 01h25m30.009072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore >> TKesusTest::TestAcquireSemaphoreTimeout >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2024-11-21T17:48:45.189309Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.189350Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.191926Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.191946Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.213161Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.213370Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=16308311568924673071, session=0, seqNo=0) 2024-11-21T17:48:45.213443Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:45.224187Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=16308311568924673071, session=1) 2024-11-21T17:48:45.224295Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=6910616617601326355, session=0, seqNo=0) 2024-11-21T17:48:45.224321Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:45.234905Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=6910616617601326355, session=2) 2024-11-21T17:48:45.235073Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:45.235118Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:45.235146Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:45.245816Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2024-11-21T17:48:45.245911Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=112, session=1, semaphore="Lock2" count=1) 2024-11-21T17:48:45.245945Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2024-11-21T17:48:45.245959Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2024-11-21T17:48:45.256688Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=112) 2024-11-21T17:48:45.256774Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:130:2156], cookie=333, name="Lock1") 2024-11-21T17:48:45.256801Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2024-11-21T17:48:45.256837Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2024-11-21T17:48:45.256850Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2024-11-21T17:48:45.256858Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2024-11-21T17:48:45.256872Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2024-11-21T17:48:45.267518Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:130:2156], cookie=333) 2024-11-21T17:48:45.267542Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=222) 2024-11-21T17:48:45.267546Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=223) 2024-11-21T17:48:45.267606Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:130:2156], cookie=334, name="Lock2") 2024-11-21T17:48:45.267626Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2024-11-21T17:48:45.267633Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2024-11-21T17:48:45.278280Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:130:2156], cookie=334) 2024-11-21T17:48:45.278406Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:159:2183], cookie=2574482692722510627, name="Lock1") 2024-11-21T17:48:45.278445Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:159:2183], cookie=2574482692722510627) 2024-11-21T17:48:45.278496Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:162:2186], cookie=5090039357597986522, name="Lock2") 2024-11-21T17:48:45.278502Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:162:2186], cookie=5090039357597986522) 2024-11-21T17:48:45.280449Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.280465Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.280501Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.280544Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.312198Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.312256Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2024-11-21T17:48:45.312264Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2024-11-21T17:48:45.312352Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:202:2216], cookie=2940392904017241693, name="Lock1") 2024-11-21T17:48:45.312370Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:202:2216], cookie=2940392904017241693) 2024-11-21T17:48:45.312471Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:210:2223], cookie=14317839139411036384, name="Lock2") 2024-11-21T17:48:45.312478Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:210:2223], cookie=14317839139411036384) 2024-11-21T17:48:45.423852Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.423874Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.426173Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.426210Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.437009Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.437132Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=4996621012124126044, session=0, seqNo=0) 2024-11-21T17:48:45.437156Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:45.458018Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=4996621012124126044, session=1) 2024-11-21T17:48:45.458091Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=10309715441616395858, session=0, seqNo=0) 2024-11-21T17:48:45.458118Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:45.468657Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=10309715441616395858, session=2) 2024-11-21T17:48:45.468834Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:45.468862Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:45.468873Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:45.479483Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2024-11-21T17:48:45.479541Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=112, session=1, semaphore="Lock2" count=1) 2024-11-21T17:48:45.479563Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2024-11-21T17:48:45.479574Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2024-11-21T17:48:45.490116Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=112) 2024-11-21T17:48:45.490213Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=333, session=1, semaphore="Lock1" count=1) 2024-11-21T17:48:45.490279Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2024-11-21T17:48:45.490297Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2024-11-21T17:48:45.490312Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2024-11-21T17:48:45.501224Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=333) 2024-11-21T17:48:45.501261Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=222) 2024-11-21T17:48:45.501265Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=223) 2024-11-21T17:48:45.501378Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:157:2181], cookie=68182109802204137, name="Lock1") 2024-11-21T17:48:45.501394Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:157:2181], cookie=68182109802204137) 2024-11-21T17:48:45.501431Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:160:2184], cookie=4048247210896570845, name="Lock2") 2024-11-21T17:48:45.501438Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:160:2184], cookie=4048247210896570845) 2024-11-21T17:48:45.501474Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:163:2187], cookie=6386257676386923553, name="Lock1") 2024-11-21T17:48:45.501478Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:163:2187], cookie=6386257676386923553) 2024-11-21T17:48:45.501509Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:166:2190], cookie=4946030175919608738, name="Lock2") 2024-11-21T17:48:45.501522Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:166:2190], cookie=4946030175919608738) 2024-11-21T17:48:45.501544Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=444, session=2, semaphore="Lock2" count=1) 2024-11-21T17:48:45.501570Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2024-11-21T17:48:45.512211Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=444) 2024-11-21T17:48:45.512352Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:171:2195], cookie=13522807095047988192, name="Lock2") 2024-11-21T17:48:45.512371Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:171:2195], cookie=13522807095047988192) 2024-11-21T17:48:45.512409Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:174:2198], cookie=7637260590753831138, name="Lock2") 2024-11-21T17:48:45.512414Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:174:2198], cookie=7637260590753831138) 2024-11-21T17:48:45.514010Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.514024Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.514056Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.514136Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.555859Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.555901Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:45.555908Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2024-11-21T17:48:45.555913Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2024-11-21T17:48:45.555917Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2024-11-21T17:48:45.556000Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:214:2228], cookie=12135368777471165197, name="Lock1") 2024-11-21T17:48:45.556018Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:214:2228], cookie=12135368777471165197) 2024-11-21T17:48:45.556122Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:222:2235], cookie=10064619567170396954, name="Lock2") 2024-11-21T17:48:45.556128Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:222:2235], cookie=10064619567170396954) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] Test command err: 2024-11-21T17:48:43.005534Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790986019887925:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.005803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016df/r3tmp/tmpplVXBW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8463, node 1 2024-11-21T17:48:43.064253Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:43.068353Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.068370Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.068372Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.068404Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.107482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.107512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.108635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.145059Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.146843Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21770, port: 21770 2024-11-21T17:48:43.146874Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:43.176011Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:43.216427Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:43.264429Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:43.264603Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:43.264618Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.308414Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.356428Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.357011Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****05_Q (9F0AA81C) () has now valid token of ldapuser@ldap 2024-11-21T17:48:43.444247Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790982978976103:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.444318Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016df/r3tmp/tmp4Lz4eF/pdisk_1.dat 2024-11-21T17:48:43.452381Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25477, node 2 2024-11-21T17:48:43.463317Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.463328Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.463329Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.463355Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.544765Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.544797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.545853Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.625218Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.626684Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:62228 ldap://localhost:62228 ldap://localhost:11111, port: 62228 2024-11-21T17:48:43.626706Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:43.646578Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:43.688465Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:43.732421Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:43.732627Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:43.732644Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.776404Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.820466Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.820902Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****bcvQ (2664A65C) () has now valid token of ldapuser@ldap 2024-11-21T17:48:43.948849Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790983277767545:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.949133Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016df/r3tmp/tmpGptMPK/pdisk_1.dat 2024-11-21T17:48:43.956524Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24838, node 3 2024-11-21T17:48:43.967375Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.967388Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.967390Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.967418Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:44.049063Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:44.049090Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:44.050186Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:44.051528Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:44.053810Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:17943, port: 17943 2024-11-21T17:48:44.053835Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:44.072392Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:44.120509Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2024-11-21T17:48:44.168710Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****GB4Q (57451B05) () has now valid token of ldapuser@ldap 2024-11-21T17:48:44.443362Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790988895656502:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:44.443549Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016df/r3tmp/tmpPIR4qU/pdisk_1.dat 2024-11-21T17:48:44.451413Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7980, node 4 2024-11-21T17:48:44.462630Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:44.462642Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:44.462644Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:44.462675Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:44.545254Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:44.545289Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:44.546325Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:44.546508Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:44.548812Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:31409, port: 31409 2024-11-21T17:48:44.548841Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:44.584864Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:44.628488Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:31409. Invalid credentials 2024-11-21T17:48:44.628748Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****LiFw (386767A3) () has now permanent error message 'Could not login via LDAP' test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016df/r3tmp/tmpaGShWd/pdisk_1.dat 2024-11-21T17:48:44.767695Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 24369, node 5 2024-11-21T17:48:44.774723Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:44.776360Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:44.776371Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:44.776374Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:44.776408Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:44.831298Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:44.832672Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24534, port: 24534 2024-11-21T17:48:44.832701Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:44.857538Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:44.858460Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:44.858482Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:44.859543Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:44.904451Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:24534. Invalid credentials 2024-11-21T17:48:44.904657Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****YrGQ (736130AD) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:48:45.100900Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439790992975785064:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:45.100931Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016df/r3tmp/tmplbc5AI/pdisk_1.dat 2024-11-21T17:48:45.111530Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26332, node 6 2024-11-21T17:48:45.117398Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:45.117411Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:45.117412Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:45.117447Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:45.201047Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:45.201080Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:45.202166Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:45.285808Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:45.287223Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:32282, port: 32282 2024-11-21T17:48:45.287245Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:45.312081Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:45.352483Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:45.352661Z node 6 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:32282 return no entries 2024-11-21T17:48:45.352836Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****KFJg (D8C39B52) () has now permanent error message 'Could not login via LDAP' ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:43.215714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:43.215739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:43.215744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:43.215749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:43.215755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:43.215759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:43.215767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:43.215843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:43.222816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:43.222831Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:43.225161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:43.225849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:43.225881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:43.227312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:43.227515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:43.227629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:43.227706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:43.228694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:43.228900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:43.228907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:43.228933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:43.228938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:43.228942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:43.228952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:43.230117Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:43.243732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:43.243810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:43.243857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:43.243888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:43.243894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:43.244539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:43.244568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:43.244607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:43.244626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:43.244631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:43.244636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:43.245001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:43.245010Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:43.245014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:43.245315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:43.245323Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:43.245328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:43.245334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:43.245788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:43.246168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:43.246231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:43.246380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:43.246400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:43.246407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:43.246444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:43.246448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:43.246472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:43.246480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:43.247038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:43.247050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:43.247097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:43.247101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:43.247192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:43.247211Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:43.247229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:43.247234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:43.247240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:43.247246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:43.247251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:43.247254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:43.247270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:43.247274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:43.247277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:43.247604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:43.247618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:43.247623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:43.247628Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:43.247632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:43.247649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... G: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1139:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2024-11-21T17:48:45.514635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2024-11-21T17:48:45.514643Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2024-11-21T17:48:45.514650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2024-11-21T17:48:45.514671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2024-11-21T17:48:45.515243Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T17:48:45.515259Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1139:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T17:48:45.515335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2024-11-21T17:48:45.515356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2024-11-21T17:48:45.515376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2024-11-21T17:48:45.515379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2024-11-21T17:48:45.515385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2024-11-21T17:48:45.526283Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1808:3675], Recipient [1:748:2640]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:48:45.526301Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:48:45.579411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2024-11-21T17:48:45.579456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 664 RawX2: 4294969875 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2024-11-21T17:48:45.579467Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2024-11-21T17:48:45.579473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2024-11-21T17:48:45.579995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2024-11-21T17:48:45.580010Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2024-11-21T17:48:45.580023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2024-11-21T17:48:45.580026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2024-11-21T17:48:45.580030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2024-11-21T17:48:45.580041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:568:2508] message: TxId: 281474976725761 2024-11-21T17:48:45.580045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2024-11-21T17:48:45.580049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2024-11-21T17:48:45.580051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2024-11-21T17:48:45.580065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2024-11-21T17:48:45.580561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2024-11-21T17:48:45.580573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2024-11-21T17:48:45.580584Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2024-11-21T17:48:45.580603Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1139:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T17:48:45.580934Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T17:48:45.580947Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1139:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T17:48:45.580954Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T17:48:45.581356Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2024-11-21T17:48:45.581375Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1139:3014], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2024-11-21T17:48:45.581378Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2024-11-21T17:48:45.581394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T17:48:45.581398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1157:3032] TestWaitNotification: OK eventTxId 106 2024-11-21T17:48:45.581712Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2024-11-21T17:48:45.581807Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 100 } >> TKesusTest::TestAttachNewSessions >> TKesusTest::TestSessionDetach >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionStealing >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit [GOOD] >> TKesusTest::TestAttachOutOfSequence >> TKesusTest::TestAcquireWaiterDowngrade >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> YdbOlapStore::LogGrepExisting [GOOD] >> YdbOlapStore::LogExistingRequest >> TKesusTest::TestAcquireLocks >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx >> BsControllerTest::SelfHealMirror3dc [GOOD] >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> TKesusTest::TestAcquireUpgrade >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestSessionStealingAnyKey [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2024-11-21T17:48:45.114629Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.114656Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.118080Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.118117Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.139457Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.360286Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.360317Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.363837Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.363899Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.375063Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.376073Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:130:2156], cookie=15373309892876239656, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2024-11-21T17:48:45.376128Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T17:48:45.396983Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:130:2156], cookie=15373309892876239656) 2024-11-21T17:48:45.397111Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:139:2163], cookie=16443960541478958833, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2024-11-21T17:48:45.397148Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2024-11-21T17:48:45.407768Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:139:2163], cookie=16443960541478958833) 2024-11-21T17:48:45.407869Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:144:2168], cookie=5275479228666826999, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2024-11-21T17:48:45.407900Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2024-11-21T17:48:45.418568Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:144:2168], cookie=5275479228666826999) 2024-11-21T17:48:45.418683Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:149:2173], cookie=10997492962147617119, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2024-11-21T17:48:45.418714Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2024-11-21T17:48:45.429509Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:149:2173], cookie=10997492962147617119) 2024-11-21T17:48:45.429665Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:154:2178], cookie=15882331910609907029, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2024-11-21T17:48:45.429706Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2024-11-21T17:48:45.440458Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:154:2178], cookie=15882331910609907029) 2024-11-21T17:48:45.440634Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:159:2183], cookie=4131313090447954678, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2024-11-21T17:48:45.440691Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2024-11-21T17:48:45.451426Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:159:2183], cookie=4131313090447954678) 2024-11-21T17:48:45.451563Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:164:2188], cookie=2807577739703889421, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2024-11-21T17:48:45.451615Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 7 "Root2" 2024-11-21T17:48:45.462528Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:164:2188], cookie=2807577739703889421) 2024-11-21T17:48:45.462714Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:169:2193], cookie=13990267818342486564, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2024-11-21T17:48:45.462779Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2024-11-21T17:48:45.473557Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:169:2193], cookie=13990267818342486564) 2024-11-21T17:48:45.473695Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:174:2198], cookie=15716355673361682165, ids=[100], paths=[], recursive=0) 2024-11-21T17:48:45.473714Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:174:2198], cookie=15716355673361682165) 2024-11-21T17:48:45.473765Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:177:2201], cookie=22870754770365011, ids=[], paths=[Nonexistent/Path], recursive=0) 2024-11-21T17:48:45.473776Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:177:2201], cookie=22870754770365011) 2024-11-21T17:48:45.473815Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:180:2204], cookie=6139999279947342413, ids=[], paths=[/Root, ], recursive=0) 2024-11-21T17:48:45.473823Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:180:2204], cookie=6139999279947342413) 2024-11-21T17:48:45.473875Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:183:2207], cookie=9762490844707919270, ids=[1, 1], paths=[], recursive=0) 2024-11-21T17:48:45.473880Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:183:2207], cookie=9762490844707919270) 2024-11-21T17:48:45.473929Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:186:2210], cookie=2321539932471579107, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2024-11-21T17:48:45.473936Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:186:2210], cookie=2321539932471579107) 2024-11-21T17:48:45.473975Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:189:2213], cookie=10953491527117177154, ids=[], paths=[], recursive=1) 2024-11-21T17:48:45.473983Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:189:2213], cookie=10953491527117177154) 2024-11-21T17:48:45.474061Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:192:2216], cookie=5186663227555721529, ids=[], paths=[], recursive=0) 2024-11-21T17:48:45.474066Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:192:2216], cookie=5186663227555721529) 2024-11-21T17:48:45.474108Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:195:2219], cookie=10150467899382248240, ids=[3, 2], paths=[], recursive=1) 2024-11-21T17:48:45.474114Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:195:2219], cookie=10150467899382248240) 2024-11-21T17:48:45.474156Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:198:2222], cookie=10485410964629280438, ids=[3, 2], paths=[], recursive=0) 2024-11-21T17:48:45.474163Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:198:2222], cookie=10485410964629280438) 2024-11-21T17:48:45.474206Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:201:2225], cookie=12694002786398815398, ids=[], paths=[Root2/], recursive=1) 2024-11-21T17:48:45.474211Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:201:2225], cookie=12694002786398815398) 2024-11-21T17:48:45.474264Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:204:2228], cookie=7637944552205390022, ids=[], paths=[Root2/], recursive=0) 2024-11-21T17:48:45.474269Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:204:2228], cookie=7637944552205390022) 2024-11-21T17:48:45.476657Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.476681Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.476729Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.476842Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.518889Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.518992Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:244:2258], cookie=7971339139831119729, ids=[100], paths=[], recursive=0) 2024-11-21T17:48:45.519012Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:244:2258], cookie=7971339139831119729) 2024-11-21T17:48:45.519118Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:250:2263], cookie=17775261708929569341, ids=[], paths=[Nonexistent/Path], recursive=0) 2024-11-21T17:48:45.519130Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:250:2263], cookie=17775261708929569341) 2024-11-21T17:48:45.519208Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:253:2266], cookie=11015515983239160660, ids=[], paths=[/Root, ], recursive=0) 2024-11-21T17:48:45.519218Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:253:2266], cookie=11015515983239160660) 2024-11-21T17:48:45.519270Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:256:2269], cookie=14445434976025913604, ids=[1, 1], paths=[], recursive=0) 2024-11-21T17:48:45.519276Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:256:2269], cookie=14445434976025913604) 2024-11-21T17:48:45.519328Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:259:2272], cookie=1719567252019822129, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2024-11-21T17:48:45.519335Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:259:2272], cookie=1719567252019822129) 2024-11-21T17:48:45.519386Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:262:2275], cookie=5698160395926802065, ids=[], paths=[], recursive=1) 2024-11-21T17:48:45.519396Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:262:2275], cookie=5698160395926802065) 2024-11-21T17:48:45.519472Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[2:265:2278], cookie=950512884243169690, ids=[], paths=[], recursive=0) 2024-11-21T17:48:45.519478Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[2:265:2278], cookie=950512884243169690) 2024-11-21T17:48:45.519534Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] ... T DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.283334Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.283447Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=2923297931564210756, path="/Root", config={ MaxUnitsPerSecond: 1 }) 2024-11-21T17:48:46.283484Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T17:48:46.294193Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=2923297931564210756) 2024-11-21T17:48:46.294318Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:139:2163], cookie=14082803188980975382, path="/Root/Q", config={ }) 2024-11-21T17:48:46.294355Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Q" 2024-11-21T17:48:46.305119Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:139:2163], cookie=14082803188980975382) 2024-11-21T17:48:46.305293Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:144:2168], cookie=13619372107311879783, path="/Root/Folder", config={ }) 2024-11-21T17:48:46.305349Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Folder" 2024-11-21T17:48:46.316059Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:144:2168], cookie=13619372107311879783) 2024-11-21T17:48:46.316188Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:149:2173], cookie=13945337590482932590, path="/Root/Folder/Q1", config={ }) 2024-11-21T17:48:46.316240Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2024-11-21T17:48:46.327048Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:149:2173], cookie=13945337590482932590) 2024-11-21T17:48:46.327191Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:154:2178], cookie=11068520193031019440, ids=[], paths=[], recursive=1) 2024-11-21T17:48:46.327214Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:154:2178], cookie=11068520193031019440) 2024-11-21T17:48:46.327323Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:160:2184], cookie=13140740735203527591, ids=[], paths=[], recursive=1) 2024-11-21T17:48:46.327332Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:160:2184], cookie=13140740735203527591) 2024-11-21T17:48:46.327422Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:166:2190], cookie=15938864277501257382, ids=[], paths=[], recursive=1) 2024-11-21T17:48:46.327429Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:166:2190], cookie=15938864277501257382) 2024-11-21T17:48:46.327479Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:169:2193], cookie=2196795236331561243, id=0, path="/Root/Folder/NonexistingRes") 2024-11-21T17:48:46.327492Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:169:2193], cookie=2196795236331561243) 2024-11-21T17:48:46.327541Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:172:2196], cookie=9970100417575299944, ids=[], paths=[], recursive=1) 2024-11-21T17:48:46.327548Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:172:2196], cookie=9970100417575299944) 2024-11-21T17:48:46.327603Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:175:2199], cookie=14567462765640875623, id=100, path="") 2024-11-21T17:48:46.327610Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:175:2199], cookie=14567462765640875623) 2024-11-21T17:48:46.327659Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:178:2202], cookie=16505942214959640898, ids=[], paths=[], recursive=1) 2024-11-21T17:48:46.327665Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:178:2202], cookie=16505942214959640898) 2024-11-21T17:48:46.327714Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:181:2205], cookie=12485018930370798209, id=3, path="") 2024-11-21T17:48:46.327722Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:181:2205], cookie=12485018930370798209) 2024-11-21T17:48:46.327772Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:184:2208], cookie=11024720361419959571, ids=[], paths=[], recursive=1) 2024-11-21T17:48:46.327778Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:184:2208], cookie=11024720361419959571) 2024-11-21T17:48:46.327829Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:187:2211], cookie=2237605220293816727, id=0, path="/Root/Folder/Q1") 2024-11-21T17:48:46.327862Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2024-11-21T17:48:46.338696Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:187:2211], cookie=2237605220293816727) 2024-11-21T17:48:46.338896Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:192:2216], cookie=8515572100015617522, ids=[], paths=[], recursive=1) 2024-11-21T17:48:46.338915Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:192:2216], cookie=8515572100015617522) 2024-11-21T17:48:46.341717Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.341742Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.341799Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.341919Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.384033Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.384122Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:232:2246], cookie=306241762175455316, ids=[], paths=[], recursive=1) 2024-11-21T17:48:46.384138Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:232:2246], cookie=306241762175455316) 2024-11-21T17:48:46.384336Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:238:2251], cookie=2024648539428928429, id=3, path="") 2024-11-21T17:48:46.384378Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2024-11-21T17:48:46.395224Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:238:2251], cookie=2024648539428928429) 2024-11-21T17:48:46.395380Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:243:2256], cookie=8381420275798857650, ids=[], paths=[], recursive=1) 2024-11-21T17:48:46.395394Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:243:2256], cookie=8381420275798857650) 2024-11-21T17:48:46.397294Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.397313Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.397365Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.397476Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.439430Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.439522Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:283:2286], cookie=11286940069866076619, ids=[], paths=[], recursive=1) 2024-11-21T17:48:46.439538Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:283:2286], cookie=11286940069866076619) 2024-11-21T17:48:46.500628Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.500652Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.503370Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.503395Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.514271Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.514360Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=3859104899850213180, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2024-11-21T17:48:46.514408Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Q1" 2024-11-21T17:48:46.535251Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=3859104899850213180) 2024-11-21T17:48:46.535404Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:139:2163], cookie=18143294129668197069, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2024-11-21T17:48:46.535447Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Q2" 2024-11-21T17:48:46.546283Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:139:2163], cookie=18143294129668197069) 2024-11-21T17:48:46.546689Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:144:2168]. Cookie: 8529428711867621291. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:46.546704Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:144:2168], cookie=8529428711867621291) 2024-11-21T17:48:46.546831Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:144:2168]. Cookie: 2405712641486435215. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2024-11-21T17:48:46.546838Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:144:2168], cookie=2405712641486435215) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] Test command err: 2024-11-21T17:48:37.817442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790956558994770:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:37.817500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012b5/r3tmp/tmpPUR5BJ/pdisk_1.dat 2024-11-21T17:48:37.998048Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:38.002157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:38.002198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:38.005904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21125, node 1 2024-11-21T17:48:38.042443Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:38.042459Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:38.042461Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:38.042507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:38.119487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:38.125738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:38.125773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:38.131378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:48:38.131458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:48:38.131463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:48:38.132310Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:38.136822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:48:38.136833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:48:38.137629Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:38.138503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211318187, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:38.138513Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:48:38.138579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:48:38.138957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:38.139000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:38.139010Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:48:38.139028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:48:38.139035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:48:38.139047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:48:38.139471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:48:38.139483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:48:38.139487Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:48:38.139502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:2527 2024-11-21T17:48:38.207081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:38.207204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:38.207209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:38.208395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T17:48:38.208447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:38.208506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:38.208526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateSubDomain, at tablet72057594046644480 2024-11-21T17:48:38.208529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:48:38.208698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:38.208703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:38.208706Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:48:38.208740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:38.208742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:38.208743Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:48:38.208853Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:48:38.209295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:48:38.209303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:48:38.209713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:38.210569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211318257, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:38.210580Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715658:0, at tablet 72057594046644480 2024-11-21T17:48:38.210641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:48:38.210990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:38.211045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:38.211054Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:48:38.211064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:48:38.211071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:48:38.211082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T17:48:38.211246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:38.211253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:38.211256Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:48:38.211290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:38.211292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:38.211294Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:48:38.211300Z node 1 :FLAT_TX_SCH ... TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:45.605414Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:45.605430Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:48:45.605443Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:48:45.605457Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:48:45.605474Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T17:48:45.605586Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:45.605597Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:45.605600Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:48:45.605627Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:45.605634Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:45.605636Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:48:45.605641Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:48:45.606536Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:48:45.606666Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:45.606676Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:48:45.607057Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T17:48:45.607095Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 waiting... 2024-11-21T17:48:45.607439Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T17:48:45.608876Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:48:45.608954Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:48:45.608966Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2024-11-21T17:48:45.609344Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:48:46.108187Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7439790995264284656:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:46.108480Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:48:46.109951Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:46.109976Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:46.110907Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2024-11-21T17:48:46.111119Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18369 2024-11-21T17:48:46.128145Z node 13 :FLAT_TX_SCHEMESHARD INFO: Got new config: TableProfilesConfig { TableProfiles { Name: "default" CompactionPolicy: "default" ExecutionPolicy: "default" PartitioningPolicy: "default" StoragePolicy: "default" ReplicationPolicy: "default" CachingPolicy: "default" } TableProfiles { Name: "profile1" CompactionPolicy: "compaction1" ExecutionPolicy: "execution1" PartitioningPolicy: "partitioning1" StoragePolicy: "storage1" ReplicationPolicy: "replication1" CachingPolicy: "caching1" } TableProfiles { Name: "profile2" CompactionPolicy: "compaction2" ExecutionPolicy: "execution2" PartitioningPolicy: "partitioning2" StoragePolicy: "storage2" ReplicationPolicy: "replication2" CachingPolicy: "caching2" } CompactionPolicies { Name: "default" } CompactionPolicies { Name: "compaction1" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction2" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } ExecutionPolicies { Name: "default" } ExecutionPolicies { Name: "execution1" PipelineConfig { NumActiveTx: 1 EnableOutOfOrder: false DisableImmediate: false EnableSoftUpdates: true } ResourceProfile: "profile1" EnableFilterByKey: true ExecutorFastLogPolicy: false TxReadSizeLimit: 10000000 } ExecutionPolicies { Name: "execution2" PipelineConfig { NumActiveTx: 8 EnableOutOfOrder: true DisableImmediate: true EnableSoftUpdates: false } ResourceProfile: "profile2" EnableFilterByKey: false ExecutorFastLogPolicy: true TxReadSizeLimit: 20000000 } PartitioningPolicies { Name: "default" } PartitioningPolicies { Name: "partitioning1" UniformPartitionsCount: 10 AutoSplit: true AutoMerge: false SizeToSplit: 123456 } PartitioningPolicies { Name: "partitioning2" UniformPartitionsCount: 20 AutoSplit: true AutoMerge: true SizeToSplit: 1000000000 } StoragePolicies { Name: "default" } StoragePolicies { Name: "storage1" ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecLZ4 StorageConfig { SysLog { PreferredPoolKind: "hdd" } Log { PreferredPoolKind: "hdd" } Data { PreferredPoolKind: "hdd" } External { PreferredPoolKind: "hdd" } ExternalThreshold: 4294967295 } } } StoragePolicies { Name: "storage2" ColumnFamilies { Id: 0 ColumnCache: ColumnCacheEver StorageConfig { SysLog { PreferredPoolKind: "ssd" } Log { PreferredPoolKind: "ssd" } Data { PreferredPoolKind: "ssd" } External { PreferredPoolKind: "ssd" } DataThreshold: 30000 } } } ReplicationPolicies { Name: "default" } ReplicationPolicies { Name: "replication1" FollowerCount: 1 AllowFollowerPromotion: false CrossDataCenter: true } ReplicationPolicies { Name: "replication2" FollowerCount: 2 AllowFollowerPromotion: true CrossDataCenter: false } CachingPolicies { Name: "default" } CachingPolicies { Name: "caching1" ExecutorCacheSize: 10000000 } CachingPolicies { Name: "caching2" ExecutorCacheSize: 20000000 } } 2024-11-21T17:48:46.136310Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:48:46.136434Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusSchemeError, reason: Error at split boundary 0: Value of type Uint64 expected in tuple at position 1, at schemeshard: 72057594046644480 2024-11-21T17:48:46.137243Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusSchemeError, reason: Error at split boundary 0: Value of type Uint64 expected in tuple at position 1, operation: CREATE TABLE, path: /Root/ydb_ut_tenant/table-1 2024-11-21T17:48:46.138592Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2024-11-21T17:48:46.138694Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:48:46.397411Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:48:46.397679Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:7439790995264284985:2278], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |78.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] Test command err: 2024-11-21T17:48:44.577360Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T17:48:44.577378Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T17:48:44.577420Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T17:48:44.577425Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T17:48:44.577431Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T17:48:44.577434Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T17:48:44.577440Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T17:48:44.577444Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T17:48:44.577450Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T17:48:44.577454Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T17:48:44.577459Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T17:48:44.577463Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T17:48:44.577469Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T17:48:44.577477Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T17:48:44.577483Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T17:48:44.577487Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T17:48:44.577493Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T17:48:44.577496Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T17:48:44.577502Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T17:48:44.577506Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T17:48:44.577512Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T17:48:44.577516Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T17:48:44.577521Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T17:48:44.577524Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T17:48:44.577533Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T17:48:44.577536Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T17:48:44.577543Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T17:48:44.577545Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T17:48:44.577550Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T17:48:44.577554Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T17:48:44.577560Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2024-11-21T17:48:44.577563Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2024-11-21T17:48:44.577569Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2024-11-21T17:48:44.577573Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2024-11-21T17:48:44.577578Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2024-11-21T17:48:44.577582Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2024-11-21T17:48:44.577588Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2024-11-21T17:48:44.577592Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2024-11-21T17:48:44.577601Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2024-11-21T17:48:44.577606Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2024-11-21T17:48:44.577611Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2024-11-21T17:48:44.577615Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2024-11-21T17:48:44.577620Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2024-11-21T17:48:44.577624Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2024-11-21T17:48:44.577629Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2024-11-21T17:48:44.577632Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2024-11-21T17:48:44.577638Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2024-11-21T17:48:44.577642Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2024-11-21T17:48:44.577650Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2024-11-21T17:48:44.577654Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2024-11-21T17:48:44.577660Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2024-11-21T17:48:44.577664Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2024-11-21T17:48:44.577670Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2024-11-21T17:48:44.577675Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2024-11-21T17:48:44.577680Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2024-11-21T17:48:44.577684Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2024-11-21T17:48:44.577690Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2024-11-21T17:48:44.577694Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2024-11-21T17:48:44.577700Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2024-11-21T17:48:44.577703Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2024-11-21T17:48:44.577708Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2024-11-21T17:48:44.577712Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2024-11-21T17:48:44.577717Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2024-11-21T17:48:44.577721Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2024-11-21T17:48:44.577727Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2024-11-21T17:48:44.577730Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2024-11-21T17:48:44.577736Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2024-11-21T17:48:44.577740Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2024-11-21T17:48:44.577745Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2024-11-21T17:48:44.577749Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2024-11-21T17:48:44.577757Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2024-11-21T17:48:44.577760Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2024-11-21T17:48:44.580415Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2024-11-21T17:48:44.580584Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2024-11-21T17:48:44.580589Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2024-11-21T17:48:44.580595Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2024-11-21T17:48:44.580601Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2024-11-21T17:48:44.580607Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2024-11-21T17:48:44.580612Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2024-11-21T17:48:44.580618Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2024-11-21T17:48:44.580623Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2024-11-21T17:48:44.580629Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2024-11-21T17:48:44.580636Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2024-11-21T17:48:44.580641Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2024-11-21T17:48:44.580649Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2024-11-21T17:48:44.580652Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2024-11-21T17:48:44.580657Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2024-11-21T17:48:44.580660Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2024-11-21T17:48:44.580664Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2024-11-21T17:48:44.580667Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2024-11-21T17:48:44.580672Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2024-11-21T17:48:44.580675Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2024-11-21T17:48:44.580679Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2024-11-21T17:48:44.580683Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2024-11-21T17:48:44.580687Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2024-11-21T17:48:44.580692Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2024-11-21T17:48:44.580696Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2024-11-21T17:48:44.580702Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2024-11-21T17:48:44.580708Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2024-11-21T17:48:44.580713Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2024-11-21T17:48:44.580718Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2024-11-21T17:48:44.580724Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2024-11-21T17:48:44.580730Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2024-11-21T17:48:44.580735Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2024-11-21T17:48:44.580741Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2024-11-21T17:48:44.580747Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2024-11-21T17:48:44.580753Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 1T17:48:46.738375Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T17:48:46.738378Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] VDiskId# [80000046:2:2:0:0] -> [80000046:3:2:0:0] 2024-11-21T17:48:46.738387Z 35 05h45m00.119456s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2024-11-21T17:48:46.738391Z 35 05h45m00.119456s :BS_NODE DEBUG: [35] VDiskId# [80000036:2:2:1:0] -> [80000036:3:2:1:0] 2024-11-21T17:48:46.738396Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T17:48:46.738399Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] VDiskId# [80000036:2:0:2:0] -> [80000036:3:0:2:0] 2024-11-21T17:48:46.738405Z 21 05h45m00.119456s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T17:48:46.738410Z 21 05h45m00.119456s :BS_NODE DEBUG: [21] VDiskId# [80000036:2:1:0:0] -> [80000036:3:1:0:0] 2024-11-21T17:48:46.738417Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T17:48:46.738420Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [80000036:2:1:1:0] -> [80000036:3:1:1:0] 2024-11-21T17:48:46.738425Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T17:48:46.738428Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] VDiskId# [80000036:2:0:0:0] -> [80000036:3:0:0:0] 2024-11-21T17:48:46.738434Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T17:48:46.738437Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] VDiskId# [80000036:2:2:2:0] -> [80000036:3:2:2:0] 2024-11-21T17:48:46.738442Z 11 05h45m00.119456s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2024-11-21T17:48:46.738446Z 11 05h45m00.119456s :BS_NODE DEBUG: [11] VDiskId# [80000036:2:0:1:0] -> [80000036:3:0:1:0] 2024-11-21T17:48:46.738452Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:46.738455Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] VDiskId# [80000036:3:1:2:0] PDiskId# 1000 VSlotId# 1008 created 2024-11-21T17:48:46.738459Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] VDiskId# [80000036:3:1:2:0] status changed to INIT_PENDING 2024-11-21T17:48:46.738465Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T17:48:46.738468Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] VDiskId# [80000036:2:2:0:0] -> [80000036:3:2:0:0] 2024-11-21T17:48:46.738478Z 35 05h45m00.119456s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2024-11-21T17:48:46.738481Z 35 05h45m00.119456s :BS_NODE DEBUG: [35] VDiskId# [80000026:2:2:1:0] -> [80000026:3:2:1:0] 2024-11-21T17:48:46.738487Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T17:48:46.738490Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] VDiskId# [80000026:2:0:2:0] -> [80000026:3:0:2:0] 2024-11-21T17:48:46.738495Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T17:48:46.738499Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] VDiskId# [80000026:2:1:0:0] -> [80000026:3:1:0:0] 2024-11-21T17:48:46.738504Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T17:48:46.738507Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [80000026:2:1:1:0] -> [80000026:3:1:1:0] 2024-11-21T17:48:46.738513Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T17:48:46.738516Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] VDiskId# [80000026:2:0:0:0] -> [80000026:3:0:0:0] 2024-11-21T17:48:46.738521Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T17:48:46.738524Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] VDiskId# [80000026:2:2:2:0] -> [80000026:3:2:2:0] 2024-11-21T17:48:46.738530Z 11 05h45m00.119456s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2024-11-21T17:48:46.738533Z 11 05h45m00.119456s :BS_NODE DEBUG: [11] VDiskId# [80000026:2:0:1:0] -> [80000026:3:0:1:0] 2024-11-21T17:48:46.738539Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:46.738542Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] VDiskId# [80000026:3:1:2:0] PDiskId# 1002 VSlotId# 1008 created 2024-11-21T17:48:46.738547Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] VDiskId# [80000026:3:1:2:0] status changed to INIT_PENDING 2024-11-21T17:48:46.738553Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T17:48:46.738556Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] VDiskId# [80000026:2:2:0:0] -> [80000026:3:2:0:0] 2024-11-21T17:48:46.738565Z 35 05h45m00.119456s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2024-11-21T17:48:46.738568Z 35 05h45m00.119456s :BS_NODE DEBUG: [35] VDiskId# [80000016:2:2:1:0] -> [80000016:3:2:1:0] 2024-11-21T17:48:46.738574Z 18 05h45m00.119456s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T17:48:46.738578Z 18 05h45m00.119456s :BS_NODE DEBUG: [18] VDiskId# [80000016:2:1:0:0] -> [80000016:3:1:0:0] 2024-11-21T17:48:46.738584Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T17:48:46.738587Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] VDiskId# [80000016:2:0:2:0] -> [80000016:3:0:2:0] 2024-11-21T17:48:46.738593Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T17:48:46.738596Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] VDiskId# [80000016:3:1:2:0] PDiskId# 1000 VSlotId# 1009 created 2024-11-21T17:48:46.738600Z 20 05h45m00.119456s :BS_NODE DEBUG: [20] VDiskId# [80000016:3:1:2:0] status changed to INIT_PENDING 2024-11-21T17:48:46.738606Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T17:48:46.738609Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [80000016:2:1:1:0] -> [80000016:3:1:1:0] 2024-11-21T17:48:46.738615Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T17:48:46.738618Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] VDiskId# [80000016:2:0:0:0] -> [80000016:3:0:0:0] 2024-11-21T17:48:46.738623Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T17:48:46.738627Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] VDiskId# [80000016:2:2:2:0] -> [80000016:3:2:2:0] 2024-11-21T17:48:46.738632Z 11 05h45m00.119456s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2024-11-21T17:48:46.738635Z 11 05h45m00.119456s :BS_NODE DEBUG: [11] VDiskId# [80000016:2:0:1:0] -> [80000016:3:0:1:0] 2024-11-21T17:48:46.738639Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:46.738645Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T17:48:46.738648Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] VDiskId# [80000016:2:2:0:0] -> [80000016:3:2:0:0] 2024-11-21T17:48:46.738657Z 35 05h45m00.119456s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2024-11-21T17:48:46.738660Z 35 05h45m00.119456s :BS_NODE DEBUG: [35] VDiskId# [80000006:2:2:1:0] -> [80000006:3:2:1:0] 2024-11-21T17:48:46.738666Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T17:48:46.738669Z 2 05h45m00.119456s :BS_NODE DEBUG: [2] VDiskId# [80000006:2:0:2:0] -> [80000006:3:0:2:0] 2024-11-21T17:48:46.738674Z 21 05h45m00.119456s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T17:48:46.738677Z 21 05h45m00.119456s :BS_NODE DEBUG: [21] VDiskId# [80000006:2:1:0:0] -> [80000006:3:1:0:0] 2024-11-21T17:48:46.738683Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T17:48:46.738686Z 23 05h45m00.119456s :BS_NODE DEBUG: [23] VDiskId# [80000006:2:1:1:0] -> [80000006:3:1:1:0] 2024-11-21T17:48:46.738691Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T17:48:46.738694Z 8 05h45m00.119456s :BS_NODE DEBUG: [8] VDiskId# [80000006:2:0:0:0] -> [80000006:3:0:0:0] 2024-11-21T17:48:46.738700Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T17:48:46.738703Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] VDiskId# [80000006:2:2:2:0] -> [80000006:3:2:2:0] 2024-11-21T17:48:46.738708Z 11 05h45m00.119456s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2024-11-21T17:48:46.738712Z 11 05h45m00.119456s :BS_NODE DEBUG: [11] VDiskId# [80000006:2:0:1:0] -> [80000006:3:0:1:0] 2024-11-21T17:48:46.738716Z 14 05h45m00.119456s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:46.738723Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T17:48:46.738726Z 32 05h45m00.119456s :BS_NODE DEBUG: [32] VDiskId# [80000006:2:2:0:0] -> [80000006:3:2:0:0] 2024-11-21T17:48:46.738732Z 15 05h45m00.119456s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T17:48:46.738737Z 15 05h45m00.119456s :BS_NODE DEBUG: [15] VDiskId# [80000006:3:1:2:0] PDiskId# 1003 VSlotId# 1008 created 2024-11-21T17:48:46.738743Z 15 05h45m00.119456s :BS_NODE DEBUG: [15] VDiskId# [80000006:3:1:2:0] status changed to INIT_PENDING 2024-11-21T17:48:46.739375Z 13 05h45m02.245456s :BS_NODE DEBUG: [13] VDiskId# [80000066:3:1:2:0] status changed to REPLICATING 2024-11-21T17:48:46.739422Z 15 05h45m03.246456s :BS_NODE DEBUG: [15] VDiskId# [80000006:3:1:2:0] status changed to REPLICATING 2024-11-21T17:48:46.739453Z 14 05h45m03.940456s :BS_NODE DEBUG: [14] VDiskId# [80000036:3:1:2:0] status changed to REPLICATING 2024-11-21T17:48:46.739500Z 20 05h45m03.997456s :BS_NODE DEBUG: [20] VDiskId# [80000056:3:1:2:0] status changed to REPLICATING 2024-11-21T17:48:46.739548Z 20 05h45m04.061456s :BS_NODE DEBUG: [20] VDiskId# [80000016:3:1:2:0] status changed to REPLICATING 2024-11-21T17:48:46.739593Z 17 05h45m04.468456s :BS_NODE DEBUG: [17] VDiskId# [80000046:3:1:2:0] status changed to REPLICATING 2024-11-21T17:48:46.739623Z 13 05h45m04.476456s :BS_NODE DEBUG: [13] VDiskId# [80000076:3:1:2:0] status changed to REPLICATING 2024-11-21T17:48:46.739777Z 14 05h45m05.466456s :BS_NODE DEBUG: [14] VDiskId# [80000026:3:1:2:0] status changed to REPLICATING 2024-11-21T17:48:46.739869Z 15 05h45m11.584456s :BS_NODE DEBUG: [15] VDiskId# [80000006:3:1:2:0] status changed to READY 2024-11-21T17:48:46.741349Z 14 05h45m11.584968s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:46.741360Z 14 05h45m11.584968s :BS_NODE DEBUG: [14] VDiskId# [80000006:2:1:2:0] destroyed 2024-11-21T17:48:46.741388Z 20 05h45m14.338456s :BS_NODE DEBUG: [20] VDiskId# [80000016:3:1:2:0] status changed to READY 2024-11-21T17:48:46.742779Z 14 05h45m14.338968s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:46.742787Z 14 05h45m14.338968s :BS_NODE DEBUG: [14] VDiskId# [80000016:2:1:2:0] destroyed 2024-11-21T17:48:46.742902Z 17 05h45m15.420456s :BS_NODE DEBUG: [17] VDiskId# [80000046:3:1:2:0] status changed to READY 2024-11-21T17:48:46.744330Z 14 05h45m15.420968s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:46.744339Z 14 05h45m15.420968s :BS_NODE DEBUG: [14] VDiskId# [80000046:2:1:2:0] destroyed 2024-11-21T17:48:46.744396Z 14 05h45m22.394456s :BS_NODE DEBUG: [14] VDiskId# [80000036:3:1:2:0] status changed to READY 2024-11-21T17:48:46.745808Z 14 05h45m22.394968s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:46.745815Z 14 05h45m22.394968s :BS_NODE DEBUG: [14] VDiskId# [80000036:2:1:2:0] destroyed 2024-11-21T17:48:46.745992Z 13 05h45m30.575456s :BS_NODE DEBUG: [13] VDiskId# [80000066:3:1:2:0] status changed to READY 2024-11-21T17:48:46.747420Z 14 05h45m30.575968s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:46.747429Z 14 05h45m30.575968s :BS_NODE DEBUG: [14] VDiskId# [80000066:2:1:2:0] destroyed 2024-11-21T17:48:46.747451Z 20 05h45m33.074456s :BS_NODE DEBUG: [20] VDiskId# [80000056:3:1:2:0] status changed to READY 2024-11-21T17:48:46.748981Z 14 05h45m33.074968s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:46.748991Z 14 05h45m33.074968s :BS_NODE DEBUG: [14] VDiskId# [80000056:2:1:2:0] destroyed 2024-11-21T17:48:46.749046Z 14 05h45m35.383456s :BS_NODE DEBUG: [14] VDiskId# [80000026:3:1:2:0] status changed to READY 2024-11-21T17:48:46.750587Z 14 05h45m35.383968s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:46.750596Z 14 05h45m35.383968s :BS_NODE DEBUG: [14] VDiskId# [80000026:2:1:2:0] destroyed 2024-11-21T17:48:46.750649Z 13 05h45m37.624456s :BS_NODE DEBUG: [13] VDiskId# [80000076:3:1:2:0] status changed to READY 2024-11-21T17:48:46.752134Z 14 05h45m37.624968s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T17:48:46.752146Z 14 05h45m37.624968s :BS_NODE DEBUG: [14] VDiskId# [80000076:2:1:2:0] destroyed >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> TTablesWithReboots::DropCopyWithRebootsAtCommit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2024-11-21T17:48:46.019596Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.019618Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.022362Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.022392Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.043817Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.043946Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=923006777346295081, session=0, seqNo=0) 2024-11-21T17:48:46.043984Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.054748Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=923006777346295081, session=1) 2024-11-21T17:48:46.054962Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:130:2156], cookie=15958241877300150744, session=2) 2024-11-21T17:48:46.054976Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:130:2156], cookie=15958241877300150744) 2024-11-21T17:48:46.055013Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:130:2156], cookie=11746368911342293970 2024-11-21T17:48:46.055057Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=2180188272739145871, session=1, seqNo=0) 2024-11-21T17:48:46.065793Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=2180188272739145871, session=1) 2024-11-21T17:48:46.065882Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:46.065929Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:46.065956Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:46.065992Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:130:2156], cookie=2987320922638892515, session=1) 2024-11-21T17:48:46.076140Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2024-11-21T17:48:46.076160Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2024-11-21T17:48:46.076166Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2024-11-21T17:48:46.086754Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2024-11-21T17:48:46.086773Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:130:2156], cookie=2987320922638892515) 2024-11-21T17:48:46.086780Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2024-11-21T17:48:46.262711Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.262740Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.265233Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.265289Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.276546Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.276632Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[2:130:2156], cookie=14330322552517973350, path="") 2024-11-21T17:48:46.297736Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[2:130:2156], cookie=14330322552517973350, status=SUCCESS) 2024-11-21T17:48:46.297873Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:139:2163], cookie=111, session=0, seqNo=0) 2024-11-21T17:48:46.297898Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.297926Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[2:139:2163], cookie=7984970631413915914, session=1) 2024-11-21T17:48:46.308080Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2024-11-21T17:48:46.308107Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2024-11-21T17:48:46.318878Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:139:2163], cookie=111, session=1) 2024-11-21T17:48:46.318903Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[2:139:2163], cookie=7984970631413915914) 2024-11-21T17:48:46.318911Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2024-11-21T17:48:46.503767Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.503793Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.506057Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.506093Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.527236Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.527389Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=12345, session=0, seqNo=0) 2024-11-21T17:48:46.527435Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.538263Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=12345, session=1) 2024-11-21T17:48:46.538432Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:135:2159], cookie=23456, session=1, seqNo=0) 2024-11-21T17:48:46.549101Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:135:2159], cookie=23456, session=1) 2024-11-21T17:48:46.741415Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.741439Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.744676Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.744711Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.766032Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.766220Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=12345, session=0, seqNo=0) 2024-11-21T17:48:46.766268Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.777104Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=12345, session=1) 2024-11-21T17:48:46.777273Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=23456, session=1, seqNo=0) 2024-11-21T17:48:46.788071Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=23456, session=1) >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2024-11-21T17:48:45.971643Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.971666Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.974249Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.974277Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.995345Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.995459Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=8069301964074074000, session=0, seqNo=0) 2024-11-21T17:48:45.995494Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.006167Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=8069301964074074000, session=1) 2024-11-21T17:48:46.006260Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=17602500105796325071, session=0, seqNo=0) 2024-11-21T17:48:46.006289Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:46.016903Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=17602500105796325071, session=2) 2024-11-21T17:48:46.216411Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.216440Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.219163Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.219222Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.230401Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.230520Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=8594391857250236741, session=1, seqNo=0) 2024-11-21T17:48:46.251511Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=8594391857250236741, session=1) 2024-11-21T17:48:46.450139Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.450162Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.452290Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.452332Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.473649Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.473841Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=18312800034871453451, session=0, seqNo=0) 2024-11-21T17:48:46.473879Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.484516Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=18312800034871453451, session=1) 2024-11-21T17:48:46.690488Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.690528Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.693614Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.693643Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.714952Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.715033Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[4:130:2156], cookie=9712143168692906168, path="") 2024-11-21T17:48:46.725828Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[4:130:2156], cookie=9712143168692906168, status=SUCCESS) 2024-11-21T17:48:46.726017Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:139:2163], cookie=9471911803853016798, session=0, seqNo=0) 2024-11-21T17:48:46.726053Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.736818Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:139:2163], cookie=9471911803853016798, session=1) 2024-11-21T17:48:46.736979Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:140:2164], cookie=111, session=0, seqNo=0) 2024-11-21T17:48:46.737011Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:46.737046Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path attach session=1 to sender=[4:140:2164], cookie=222, seqNo=0 2024-11-21T17:48:46.747691Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:140:2164], cookie=111, session=2) 2024-11-21T17:48:46.922813Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.922835Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.925379Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.925404Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.936341Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.936415Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[5:130:2156], cookie=10629838331877845028, path="") 2024-11-21T17:48:46.957307Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[5:130:2156], cookie=10629838331877845028, status=SUCCESS) 2024-11-21T17:48:46.957452Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:139:2163], cookie=17019087858694973096, session=0, seqNo=0) 2024-11-21T17:48:46.957477Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.968089Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:139:2163], cookie=17019087858694973096, session=1) 2024-11-21T17:48:46.968216Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:139:2163], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:46.968282Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:46.968329Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:46.968376Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:140:2164], cookie=111, session=0, seqNo=0) 2024-11-21T17:48:46.968389Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:46.968402Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:140:2164], cookie=222, session=1, seqNo=0) 2024-11-21T17:48:46.979159Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:139:2163], cookie=123) 2024-11-21T17:48:46.979186Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:140:2164], cookie=111, session=2) 2024-11-21T17:48:46.979192Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:140:2164], cookie=222, session=1) >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> TKesusTest::TestCreateSemaphore [GOOD] >> TKesusTest::TestSessionDestroy >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TKesusTest::TestAllocatesResources [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2024-11-21T17:48:45.357026Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.357051Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.359511Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.359531Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.380651Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.380745Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:130:2156], cookie=12587303094385723089, path="/foo/bar/baz") 2024-11-21T17:48:45.391591Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:130:2156], cookie=12587303094385723089, status=SUCCESS) 2024-11-21T17:48:45.391700Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:139:2163], cookie=1725403772633520863) 2024-11-21T17:48:45.402313Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:139:2163], cookie=1725403772633520863) 2024-11-21T17:48:45.402411Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:144:2168], cookie=16533117164200658000, path="/foo/bar/baz") 2024-11-21T17:48:45.413052Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:144:2168], cookie=16533117164200658000, status=SUCCESS) 2024-11-21T17:48:45.413162Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:149:2173], cookie=3101208866951599951) 2024-11-21T17:48:45.423639Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:149:2173], cookie=3101208866951599951) 2024-11-21T17:48:45.425437Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.425456Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.425488Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.425558Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.467306Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.467401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:191:2205], cookie=7332382006981944597) 2024-11-21T17:48:45.478054Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:191:2205], cookie=7332382006981944597) 2024-11-21T17:48:45.478178Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:199:2212], cookie=3423500915719476475, path="/foo/bar/baz") 2024-11-21T17:48:45.488965Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:199:2212], cookie=3423500915719476475, status=SUCCESS) 2024-11-21T17:48:45.489110Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:204:2217], cookie=6034016835008237208, path="/foo/bar/baz") 2024-11-21T17:48:45.489130Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:204:2217], cookie=6034016835008237208, status=PRECONDITION_FAILED) 2024-11-21T17:48:45.603501Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.603528Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.606700Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.606784Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.618032Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.618122Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:130:2156], cookie=252156730762553559, name="Lock1") 2024-11-21T17:48:45.618140Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:130:2156], cookie=252156730762553559) 2024-11-21T17:48:45.839636Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.839665Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.842830Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.842886Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.864263Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.864370Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=1498144091951180205, session=0, seqNo=0) 2024-11-21T17:48:45.864403Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:45.875157Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=1498144091951180205, session=1) 2024-11-21T17:48:45.875269Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:128:2154], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:45.875320Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:45.875380Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:45.886180Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:128:2154], cookie=111) 2024-11-21T17:48:45.886330Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:139:2163], cookie=9416644897890515722, name="Lock1", force=0) 2024-11-21T17:48:45.897091Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:139:2163], cookie=9416644897890515722) 2024-11-21T17:48:45.897230Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:144:2168], cookie=4106361768926631068, name="Sem1", force=0) 2024-11-21T17:48:45.908055Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:144:2168], cookie=4106361768926631068) 2024-11-21T17:48:45.908197Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:149:2173], cookie=14779868277705516857, name="Sem1", limit=42) 2024-11-21T17:48:45.908246Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2024-11-21T17:48:45.918975Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:149:2173], cookie=14779868277705516857) 2024-11-21T17:48:45.919112Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:154:2178], cookie=13700933799231785400, name="Sem1", force=0) 2024-11-21T17:48:45.919140Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 2 "Sem1" 2024-11-21T17:48:45.929847Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:154:2178], cookie=13700933799231785400) 2024-11-21T17:48:45.929978Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:159:2183], cookie=11278131696267538066, name="Sem1", force=0) 2024-11-21T17:48:45.940693Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:159:2183], cookie=11278131696267538066) 2024-11-21T17:48:46.078139Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.078173Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.081205Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.081243Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.102748Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.102889Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=8111993166509727927, session=0, seqNo=0) 2024-11-21T17:48:46.102937Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.113720Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=8111993166509727927, session=1) 2024-11-21T17:48:46.113801Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=4758594729669699381, session=0, seqNo=0) 2024-11-21T17:48:46.113831Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:46.124683Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=4758594729669699381, session=2) 2024-11-21T17:48:46.124761Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=2 from sender=[4:130:2156], cookie=12026492505344794330 2024-11-21T17:48:46.124860Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:142:2166], cookie=14913235312258169705, name="Sem1", limit=3) 2024-11-21T17:48:46.124911Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2024-11-21T17:48:46.135679Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:142:2166], cookie=14913235312258169705) 2024-11-21T17:48:46.135757Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=112, name="Sem1") 2024-11-21T17:48:46.135774Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=112) 2024-11-21T17:48:46.135792Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=113, name="Sem1") 2024-11-21T17:48:46.135797Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=113) 2024-11-21T17:48:46.135814Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=14311139039257792058, session=2, seqNo=0) 2024-11-21T17:48:46.146580Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=14311139039257792058, session=2) 2024-11-21T17:48:46.146657Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=114, name="Sem1") 2024-11-21T17:48:46.146677Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=114) 2024-11-21T17:48:46.146708Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=115, name="Sem1") 2024-11-21T17:48:46.146712Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=115) 2024-11-21T17:48:46.146772Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:149:2173], cookie=4629069100970125931, name="Sem1") 2024-11-21T17:48:46.157674Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:149:2173], cookie=4629069100970125931) 2024-11-21T17:48:46.157791Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=116, session=1, semaphore="Sem1" count=1) 2024-11-21T17:48:46.157836Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2024-11-21T17:48:46.168729Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=116) 2024-11-21T17:48:46.168827Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=117, session=2, semaphore="Sem1" count=2) 2024-11-21T17:48:46.168873Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2024-11-21T17:48:46.179571Z node 4 :KESUS_TABLET DEBUG: [7205759403792 ... 48:46.879227Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2024-11-21T17:48:46.889805Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:200:2218], cookie=1396205059081755573) 2024-11-21T17:48:46.889872Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=126, session=1, semaphore="Sem2" count=3) 2024-11-21T17:48:46.889899Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Sem2" queue: next order #5 session 1 2024-11-21T17:48:46.900517Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=126) 2024-11-21T17:48:46.900596Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=127, name="Sem2") 2024-11-21T17:48:46.900611Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=127) 2024-11-21T17:48:46.900633Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=128, session=1, semaphore="Sem2" count=3) 2024-11-21T17:48:46.911267Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=128) 2024-11-21T17:48:47.155677Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.166607Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.176876Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=129, session=1, semaphore="Sem2" count=2) 2024-11-21T17:48:47.187789Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=129) 2024-11-21T17:48:47.187881Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=130, name="Sem2") 2024-11-21T17:48:47.187899Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=130) 2024-11-21T17:48:47.187925Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=131, session=1, semaphore="Sem2" count=1) 2024-11-21T17:48:47.198715Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=131) 2024-11-21T17:48:47.198818Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=132, name="Sem2") 2024-11-21T17:48:47.198838Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=132) 2024-11-21T17:48:47.198857Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=133, name="Sem2") 2024-11-21T17:48:47.198861Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=133) 2024-11-21T17:48:47.319782Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:47.319806Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:47.322227Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:47.322266Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:47.333368Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:47.334398Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=5968532175222584921, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2024-11-21T17:48:47.334458Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root1" 2024-11-21T17:48:47.355509Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=5968532175222584921) 2024-11-21T17:48:47.355641Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:139:2163], cookie=512770299501267294, path="/Root1/Res", config={ }) 2024-11-21T17:48:47.355690Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2024-11-21T17:48:47.366434Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:139:2163], cookie=512770299501267294) 2024-11-21T17:48:47.366572Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:144:2168], cookie=5692804574643732532, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2024-11-21T17:48:47.366618Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root2" 2024-11-21T17:48:47.377381Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:144:2168], cookie=5692804574643732532) 2024-11-21T17:48:47.377518Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:149:2173], cookie=9586831833281752896, path="/Root2/Res", config={ }) 2024-11-21T17:48:47.377566Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2024-11-21T17:48:47.388390Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:149:2173], cookie=9586831833281752896) 2024-11-21T17:48:47.388672Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:154:2178], cookie=9107083954456371936, path="/Root2/Res/Subres", config={ }) 2024-11-21T17:48:47.388739Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2024-11-21T17:48:47.399472Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:154:2178], cookie=9107083954456371936) 2024-11-21T17:48:47.399758Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:159:2183]. Cookie: 4209927275761643873. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:47.399766Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:159:2183], cookie=4209927275761643873) 2024-11-21T17:48:47.440593Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:159:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2024-11-21T17:48:47.481418Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:159:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2024-11-21T17:48:47.512006Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:159:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2024-11-21T17:48:47.512153Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:166:2187]. Cookie: 9909024958568870239. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2024-11-21T17:48:47.512291Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:169:2190]. Cookie: 6992302607049285448. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:47.512303Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:169:2190], cookie=6992302607049285448) 2024-11-21T17:48:47.542798Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:169:2190]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2024-11-21T17:48:47.583559Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:169:2190]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2024-11-21T17:48:47.583705Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:174:2194]. Cookie: 13966044307363615895. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2024-11-21T17:48:47.583792Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:159:2183]. Cookie: 9811894189892115041. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:47.583798Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:159:2183], cookie=9811894189892115041) 2024-11-21T17:48:47.583843Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:169:2190]. Cookie: 16016149179235696499. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:47.583847Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:169:2190], cookie=16016149179235696499) 2024-11-21T17:48:47.614308Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:159:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2024-11-21T17:48:47.614336Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:169:2190]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2024-11-21T17:48:47.614455Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:181:2201]. Cookie: 8877378526592146570. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSemaphoreData >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-false [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2024-11-21T17:48:46.631374Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.631396Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.633924Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.633945Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.654986Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.655148Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=10053552585049399942, session=0, seqNo=222) 2024-11-21T17:48:46.655184Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.665761Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=10053552585049399942, session=1) 2024-11-21T17:48:46.665848Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=10234607372170827696, session=1, seqNo=111) 2024-11-21T17:48:46.676587Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=10234607372170827696, session=1) 2024-11-21T17:48:46.871932Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.871957Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.874574Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.874635Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.885604Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.885700Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=111, session=0, seqNo=42) 2024-11-21T17:48:46.885724Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.885745Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=222, session=1, seqNo=41) 2024-11-21T17:48:46.906634Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=111, session=1) 2024-11-21T17:48:46.906663Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=222, session=1) 2024-11-21T17:48:47.109341Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:47.109369Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:47.112446Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:47.112497Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:47.133794Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:47.133922Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=5900237065874034606, session=0, seqNo=0) 2024-11-21T17:48:47.133962Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:47.144669Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=5900237065874034606, session=1) 2024-11-21T17:48:47.144898Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:145:2169], cookie=13000374928603166202) 2024-11-21T17:48:47.144910Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:145:2169], cookie=13000374928603166202) 2024-11-21T17:48:47.347547Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:47.347578Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:47.350503Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:47.350528Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:47.371766Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:47.581520Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:47.581541Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:47.583859Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:47.583881Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:47.594900Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:47.595006Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=2263500062214982276, session=0, seqNo=0) 2024-11-21T17:48:47.595039Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:47.615792Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=2263500062214982276, session=1) 2024-11-21T17:48:47.615885Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:47.615933Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:47.615978Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:47.626668Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=111) 2024-11-21T17:48:47.626812Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:144:2168], cookie=16594478782493638363, name="Sem1", limit=42) 2024-11-21T17:48:47.626839Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2024-11-21T17:48:47.637470Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:144:2168], cookie=16594478782493638363) 2024-11-21T17:48:47.637583Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:149:2173], cookie=11320652576183645033, name="Sem1", limit=42) 2024-11-21T17:48:47.648213Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:149:2173], cookie=11320652576183645033) 2024-11-21T17:48:47.648369Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:154:2178], cookie=2155588892800844209, name="Sem1", limit=51) 2024-11-21T17:48:47.659008Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:154:2178], cookie=2155588892800844209) 2024-11-21T17:48:47.659124Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:159:2183], cookie=4237499379447226526, name="Lock1", limit=42) 2024-11-21T17:48:47.669791Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:159:2183], cookie=4237499379447226526) 2024-11-21T17:48:47.669892Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:164:2188], cookie=3877969368849776813, name="Lock1", limit=18446744073709551615) 2024-11-21T17:48:47.682316Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:164:2188], cookie=3877969368849776813) 2024-11-21T17:48:47.682435Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:169:2193], cookie=17060285960580102452, name="Sem1") 2024-11-21T17:48:47.682461Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:169:2193], cookie=17060285960580102452) 2024-11-21T17:48:47.682507Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:172:2196], cookie=3745346292865265644, name="Sem2") 2024-11-21T17:48:47.682515Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:172:2196], cookie=3745346292865265644) 2024-11-21T17:48:47.684337Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:47.684353Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:47.684384Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:47.684417Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:47.726197Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:47.726232Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:47.726326Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:212:2226], cookie=7339673040405895170, name="Sem1") 2024-11-21T17:48:47.726343Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:212:2226], cookie=7339673040405895170) 2024-11-21T17:48:47.726446Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:219:2232], cookie=3604677805157527967, name="Sem2") 2024-11-21T17:48:47.726454Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:219:2232], cookie=3604677805157527967) |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2024-11-21T17:48:46.590651Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.590677Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.593495Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.593518Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.614623Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.614742Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=2550802406469847943, session=0, seqNo=0) 2024-11-21T17:48:46.614782Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.625306Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=2550802406469847943, session=1) 2024-11-21T17:48:46.625363Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=3805749747653430137, session=0, seqNo=0) 2024-11-21T17:48:46.625383Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:46.635871Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=3805749747653430137, session=2) 2024-11-21T17:48:46.635944Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=1) 2024-11-21T17:48:46.635985Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:46.636018Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:46.646635Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2024-11-21T17:48:46.646692Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:46.646748Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=333, session=2, semaphore="Lock1" count=1) 2024-11-21T17:48:46.646757Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2024-11-21T17:48:46.657450Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2024-11-21T17:48:46.657470Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=333) 2024-11-21T17:48:46.657574Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=5950878406841033397, name="Lock1") 2024-11-21T17:48:46.657596Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=5950878406841033397) 2024-11-21T17:48:46.829814Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.829853Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.832140Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.832186Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.843129Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.843246Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=9398118870297720141, session=0, seqNo=0) 2024-11-21T17:48:46.843276Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.863980Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=9398118870297720141, session=1) 2024-11-21T17:48:46.864039Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=14626615281806957178, session=0, seqNo=0) 2024-11-21T17:48:46.864064Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:46.874625Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=14626615281806957178, session=2) 2024-11-21T17:48:46.874685Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:46.874707Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:46.874720Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:46.885311Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2024-11-21T17:48:46.885384Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2024-11-21T17:48:46.885442Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:46.896015Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=222) 2024-11-21T17:48:46.896033Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=333) 2024-11-21T17:48:46.896108Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:146:2170], cookie=14081873987620742339, name="Lock1") 2024-11-21T17:48:46.896121Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:146:2170], cookie=14081873987620742339) 2024-11-21T17:48:46.896157Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:149:2173], cookie=875061556894953103, name="Lock1") 2024-11-21T17:48:46.896160Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:149:2173], cookie=875061556894953103) 2024-11-21T17:48:47.078268Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:47.078292Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:47.080490Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:47.080527Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:47.101897Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:47.102009Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=11002173732374041093, session=0, seqNo=0) 2024-11-21T17:48:47.102036Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:47.112621Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=11002173732374041093, session=1) 2024-11-21T17:48:47.112682Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=2832392524302423313, session=0, seqNo=0) 2024-11-21T17:48:47.112700Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:47.123298Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=2832392524302423313, session=2) 2024-11-21T17:48:47.123410Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:128:2154], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:47.123446Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:47.123456Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:47.133951Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:128:2154], cookie=111) 2024-11-21T17:48:47.134013Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:128:2154], cookie=222, session=2, semaphore="Lock1" count=1) 2024-11-21T17:48:47.134061Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:128:2154], cookie=333, session=2, semaphore="Lock1" count=1) 2024-11-21T17:48:47.134069Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2024-11-21T17:48:47.144690Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:128:2154], cookie=222) 2024-11-21T17:48:47.144713Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:128:2154], cookie=333) 2024-11-21T17:48:47.144820Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:147:2171], cookie=10210719453914765098, name="Lock1") 2024-11-21T17:48:47.144840Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:147:2171], cookie=10210719453914765098) 2024-11-21T17:48:47.144915Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:150:2174], cookie=4372511371926857209, name="Lock1") 2024-11-21T17:48:47.144921Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:150:2174], cookie=4372511371926857209) 2024-11-21T17:48:47.147393Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:47.147418Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:47.147474Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:47.147578Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:47.199341Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:47.199368Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:47.199442Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:190:2204], cookie=7226150743400448178, name="Lock1") 2024-11-21T17:48:47.199456Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:190:2204], cookie=7226150743400448178) 2024-11-21T17:48:47.199520Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:198:2211], cookie=16533550606399279975, name="Lock1") 2024-11-21T17:48:47.199524Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:198:2211], cookie=16533550606399279975) 2024-11-21T17:48:47.314997Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:47.315022Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:47.317528Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:47.317559Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:47.338875Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:47.338988Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=18134283790868991910, session=0, seqNo=0) 2024-11-21T17:48:47.339015Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:47.349641Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=18134283790868991910, session=1) 2024-11-21T17:48:47.349712Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=16240284506096205721, session=0, seqNo=0) 2024-11-21T17:48:47.349735Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:47.360579Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=16240284506096205721, session=2) 2024-11-21T17:48:47.360660Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:47.360691Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:47.360702Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:47.371412Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=111) 2024-11-21T17:48:47.371487Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2024-11-21T17:48:47.371548Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=333, name="Lock1") 2024-11-21T17:48:47.371558Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2024-11-21T17:48:47.382341Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=222) 2024-11-21T17:48:47.382364Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=333) 2024-11-21T17:48:47.544578Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:47.544601Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:47.546959Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:47.546986Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:47.557843Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:47.558583Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=9528985185312359595, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2024-11-21T17:48:47.558622Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T17:48:47.579471Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=9528985185312359595) 2024-11-21T17:48:47.579592Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:139:2163], cookie=6491772843471952181, path="/Root/Res", config={ }) 2024-11-21T17:48:47.579630Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2024-11-21T17:48:47.590147Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:139:2163], cookie=6491772843471952181) 2024-11-21T17:48:47.590471Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:144:2168]. Cookie: 7715651644855661288. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:47.590480Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:144:2168], cookie=7715651644855661288) 2024-11-21T17:48:47.590527Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:144:2168]. Cookie: 3376755679376352550. Data: { } 2024-11-21T17:48:47.590531Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:144:2168], cookie=3376755679376352550) 2024-11-21T17:48:47.631178Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2024-11-21T17:48:47.671909Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2024-11-21T17:48:47.702368Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2024-11-21T17:48:47.743008Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2024-11-21T17:48:47.773482Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> JsonProtoConversion::ProtoMapToJson [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::DropCopyWithRebootsAtCommit [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:55.436719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:55.436738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:55.436741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:55.436745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:55.436752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:55.436754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:55.436760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:55.436826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:55.446890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:55.446923Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:55.449133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:55.449247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:55.449281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:55.452261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:55.452342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:55.452444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:55.452661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:55.453529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:55.453821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:55.453832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:55.453845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:55.453852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:55.453858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:55.453913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:55.455304Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:55.471924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:55.472000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.472052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:55.472088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:55.472093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.472863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:55.472882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:55.472920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.472928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:55.472931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:55.472935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:55.473805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.473819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:55.473823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:55.474283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.474301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.474308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:55.474315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:55.474961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:55.475491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:55.475558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:55.475816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:55.475849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:55.475857Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:55.475918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:55.475926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:55.475954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:55.475967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:55.476485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:55.476498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:55.476540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:55.476545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:55.476628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.476635Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:55.476646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:55.476650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:55.476655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:55.476661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:55.476665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:55.476669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:55.476680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:55.476685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:55.476689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 6678944, txId: 1005, path id: 1 2024-11-21T17:48:47.031536Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [194:205:2208], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T17:48:47.031601Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:48:47.031607Z node 194 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1005:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:48:47.031619Z node 194 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:48:47.031623Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:48:47.031627Z node 194 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 240 2024-11-21T17:48:47.031763Z node 194 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:48:47.031774Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:48:47.031778Z node 194 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:48:47.031782Z node 194 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T17:48:47.031786Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:48:47.031894Z node 194 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:48:47.031904Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:48:47.031908Z node 194 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:48:47.031911Z node 194 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:48:47.031915Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:48:47.031923Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T17:48:47.032457Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:48:47.032467Z node 194 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:47.032540Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:48:47.032565Z node 194 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T17:48:47.032569Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:48:47.032577Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T17:48:47.032580Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:48:47.032584Z node 194 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T17:48:47.032587Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T17:48:47.032604Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:48:47.032977Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:48:47.033024Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:48:47.034046Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 432 RawX2: 833223657828 } TabletId: 72075186233409547 State: 4 2024-11-21T17:48:47.034062Z node 194 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:47.034357Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:47.034432Z node 194 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:48:47.034905Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:48:47.034960Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2024-11-21T17:48:47.035053Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:47.035058Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:48:47.035068Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:47.035532Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:48:47.035544Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:48:47.035622Z node 194 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1004 2024-11-21T17:48:47.035661Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:48:47.035667Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 TestWaitNotification wait txId: 1005 2024-11-21T17:48:47.035680Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T17:48:47.035683Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T17:48:47.035738Z node 194 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:48:47.035752Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:48:47.035758Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [194:667:2631] 2024-11-21T17:48:47.035766Z node 194 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T17:48:47.035776Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:48:47.035779Z node 194 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [194:667:2631] TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2024-11-21T17:48:47.035832Z node 194 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:48:47.035842Z node 194 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:48:47.035850Z node 194 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:48:47.035855Z node 194 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2024-11-21T17:48:47.035915Z node 194 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NewTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:47.035941Z node 194 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NewTable" took 34us result status StatusPathDoesNotExist 2024-11-21T17:48:47.035970Z node 194 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/NewTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/NewTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:48:47.036011Z node 194 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:47.036020Z node 194 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 11us result status StatusPathDoesNotExist 2024-11-21T17:48:47.036032Z node 194 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestReleaseLockFailure >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] >> JsonProtoConversion::JsonToProtoMap [GOOD] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] >> TFileStoreWithReboots::CreateDrop >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2024-11-21T17:48:47.866011Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:47.866034Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:47.868554Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:47.868576Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:47.889720Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:47.889882Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=134305377065538520, session=0, seqNo=0) 2024-11-21T17:48:47.889924Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:47.900679Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=134305377065538520, session=1) 2024-11-21T17:48:47.900863Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[1:130:2156], cookie=9638923323821854764, session=1) 2024-11-21T17:48:47.900895Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2024-11-21T17:48:47.911571Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[1:130:2156], cookie=9638923323821854764) 2024-11-21T17:48:47.911725Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:147:2171], cookie=7669541579820967558) 2024-11-21T17:48:47.911738Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:147:2171], cookie=7669541579820967558) 2024-11-21T17:48:47.911801Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:150:2174], cookie=7419720437654458171, session=0, seqNo=0) 2024-11-21T17:48:47.911822Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:47.922359Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:150:2174], cookie=7419720437654458171, session=2) 2024-11-21T17:48:47.922522Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[1:130:2156], cookie=9098430476052084230, session=2) 2024-11-21T17:48:47.922538Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 2024-11-21T17:48:47.933314Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[1:130:2156], cookie=9098430476052084230) 2024-11-21T17:48:48.110463Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:48.110500Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:48.112841Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:48.112891Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:48.123830Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:48.123927Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:130:2156], cookie=3383044961804300096, name="Sem1", limit=1) 2024-11-21T17:48:48.123965Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2024-11-21T17:48:48.144945Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:130:2156], cookie=3383044961804300096) 2024-11-21T17:48:48.145061Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:139:2163], cookie=17235608455964419718, name="Sem2", limit=1) 2024-11-21T17:48:48.145096Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2024-11-21T17:48:48.155714Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:139:2163], cookie=17235608455964419718) 2024-11-21T17:48:48.155803Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:144:2168], cookie=10463221772111900796, name="Sem1") 2024-11-21T17:48:48.155828Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:144:2168], cookie=10463221772111900796) 2024-11-21T17:48:48.155860Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:147:2171], cookie=5852313217050722430, name="Sem2") 2024-11-21T17:48:48.155865Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:147:2171], cookie=5852313217050722430) 2024-11-21T17:48:48.157781Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:48.157809Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:48.157866Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:48.158032Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:48.199965Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:48.200056Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:187:2201], cookie=11922065467779429352, name="Sem1") 2024-11-21T17:48:48.200070Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:187:2201], cookie=11922065467779429352) 2024-11-21T17:48:48.200152Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:193:2206], cookie=7759729102799218499, name="Sem2") 2024-11-21T17:48:48.200155Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:193:2206], cookie=7759729102799218499) 2024-11-21T17:48:48.200188Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:196:2209], cookie=9177942503028954514, name="Sem1", limit=1) 2024-11-21T17:48:48.210843Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:196:2209], cookie=9177942503028954514) 2024-11-21T17:48:48.210961Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:201:2214], cookie=16732583637720126129, name="Sem2", limit=1) 2024-11-21T17:48:48.221743Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:201:2214], cookie=16732583637720126129) 2024-11-21T17:48:48.221891Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:206:2219], cookie=5027324535216754902, name="Sem1") 2024-11-21T17:48:48.221912Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:206:2219], cookie=5027324535216754902) 2024-11-21T17:48:48.221972Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:209:2222], cookie=11292334077445941110, name="Sem2") 2024-11-21T17:48:48.221978Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:209:2222], cookie=11292334077445941110) 2024-11-21T17:48:48.222038Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[2:212:2225], cookie=14420123829227725253, name="Sem1") 2024-11-21T17:48:48.232879Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[2:212:2225], cookie=14420123829227725253) 2024-11-21T17:48:48.233007Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[2:217:2230], cookie=7463218235876104466, name="Sem2") 2024-11-21T17:48:48.243844Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[2:217:2230], cookie=7463218235876104466) 2024-11-21T17:48:48.243976Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[2:222:2235], cookie=13182083002914451627, name="Sem3") 2024-11-21T17:48:48.254773Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[2:222:2235], cookie=13182083002914451627) 2024-11-21T17:48:48.254942Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:227:2240], cookie=6909534732054579593, name="Sem1") 2024-11-21T17:48:48.254959Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:227:2240], cookie=6909534732054579593) 2024-11-21T17:48:48.255003Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:230:2243], cookie=5067541189865976221, name="Sem2") 2024-11-21T17:48:48.255007Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:230:2243], cookie=5067541189865976221) 2024-11-21T17:48:48.255077Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:233:2246], cookie=10078181998719643595, name="Sem3") 2024-11-21T17:48:48.255083Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:233:2246], cookie=10078181998719643595) 2024-11-21T17:48:48.257011Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:48.257039Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:48.257084Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:48.257182Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:48.299136Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:48.299253Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:273:2276], cookie=5177678711858949089, name="Sem1") 2024-11-21T17:48:48.299274Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:273:2276], cookie=5177678711858949089) 2024-11-21T17:48:48.299388Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:279:2281], cookie=3545737500115321445, name="Sem2") 2024-11-21T17:48:48.299394Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:279:2281], cookie=3545737500115321445) 2024-11-21T17:48:48.299448Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:282:2284], cookie=7934725203361241846, name="Sem3") 2024-11-21T17:48:48.299456Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:282:2284], cookie=7934725203361241846) 2024-11-21T17:48:48.547984Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:48.548009Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:48.550465Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:48.550510Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:48.571786Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:48.571912Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=15505511401663745557, session=0, seqNo=0) 2024-11-21T17:48:48.571938Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:48.582569Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=15505511401663745557, session=1) 2024-11-21T17:48:48.582640Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=306522875610229061, session=0, seqNo=0) 2024-11-21T17:48:48.582667Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:48.593485Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=306522875610229061, session=2) 2024-11-21T17:48:48.593620Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:139:2163], cookie=514175210803165199, name="Sem1", limit=1) 2024-11-21T17:48:48.593656Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2024-11-21T17:48:48. ... 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 9 "Sem1" 2024-11-21T17:48:48.790006Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:220:2243], cookie=10979413553666174723) 2024-11-21T17:48:48.790167Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:225:2248], cookie=14292511949847297978, name="Sem1", force=0) 2024-11-21T17:48:48.790195Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 9 "Sem1" 2024-11-21T17:48:48.800993Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:225:2248], cookie=14292511949847297978) 2024-11-21T17:48:48.801140Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:230:2253], cookie=11935052724620214585, name="Sem1", limit=1) 2024-11-21T17:48:48.801175Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 10 "Sem1" 2024-11-21T17:48:48.811907Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:230:2253], cookie=11935052724620214585) 2024-11-21T17:48:48.812038Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:235:2258], cookie=295648078859191818, name="Sem1", force=0) 2024-11-21T17:48:48.812054Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 10 "Sem1" 2024-11-21T17:48:48.822740Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:235:2258], cookie=295648078859191818) 2024-11-21T17:48:48.822887Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:240:2263], cookie=6598671267841077218, name="Sem1", limit=1) 2024-11-21T17:48:48.822922Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 11 "Sem1" 2024-11-21T17:48:48.833689Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:240:2263], cookie=6598671267841077218) 2024-11-21T17:48:48.833807Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:128:2154], cookie=111, session=1, semaphore="Sem1" count=1) 2024-11-21T17:48:48.833907Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2024-11-21T17:48:48.844670Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:128:2154], cookie=111) 2024-11-21T17:48:48.844790Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:128:2154], cookie=222, session=2, semaphore="Sem1" count=1) 2024-11-21T17:48:48.855560Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:128:2154], cookie=222) 2024-11-21T17:48:48.855664Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[3:128:2154], cookie=333, name="Sem1") 2024-11-21T17:48:48.855694Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2024-11-21T17:48:48.876716Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[3:128:2154], cookie=333) 2024-11-21T17:48:48.876829Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:128:2154], cookie=444, session=2, semaphore="Sem1" count=1) 2024-11-21T17:48:48.887622Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:128:2154], cookie=444) 2024-11-21T17:48:48.887731Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[3:128:2154], cookie=555, name="Sem1") 2024-11-21T17:48:48.887758Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2024-11-21T17:48:48.887767Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2024-11-21T17:48:48.898590Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[3:128:2154], cookie=555) 2024-11-21T17:48:48.989745Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:48.989776Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:48.992708Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:48.992736Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:49.014093Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:49.014221Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=16380339279106305766, session=0, seqNo=0) 2024-11-21T17:48:49.014255Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:49.025012Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=16380339279106305766, session=1) 2024-11-21T17:48:49.025098Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:130:2156], cookie=112, name="Sem1", limit=5) 2024-11-21T17:48:49.025130Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2024-11-21T17:48:49.035792Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:130:2156], cookie=112) 2024-11-21T17:48:49.035867Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:130:2156], cookie=113, name="Sem1") 2024-11-21T17:48:49.046585Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:130:2156], cookie=113) 2024-11-21T17:48:49.046650Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:130:2156], cookie=114, name="Sem1", force=0) 2024-11-21T17:48:49.046671Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2024-11-21T17:48:49.057448Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:130:2156], cookie=114) 2024-11-21T17:48:49.057539Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[4:130:2156], cookie=766464661470512012 2024-11-21T17:48:49.057600Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:130:2156], cookie=115, name="Sem1", limit=5) 2024-11-21T17:48:49.068403Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:130:2156], cookie=115) 2024-11-21T17:48:49.068478Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:130:2156], cookie=116, name="Sem1") 2024-11-21T17:48:49.079225Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:130:2156], cookie=116) 2024-11-21T17:48:49.079307Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:130:2156], cookie=117, name="Sem1", force=0) 2024-11-21T17:48:49.090316Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:130:2156], cookie=117) 2024-11-21T17:48:49.090419Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=118, session=1, semaphore="Sem1" count=1) 2024-11-21T17:48:49.101174Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=118) 2024-11-21T17:48:49.101250Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=119, name="Sem1") 2024-11-21T17:48:49.112013Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=119) 2024-11-21T17:48:49.112095Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=120, name="Sem1") 2024-11-21T17:48:49.112112Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=120) 2024-11-21T17:48:49.112143Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[4:130:2156], cookie=17627750692308255655, session=1) 2024-11-21T17:48:49.112162Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2024-11-21T17:48:49.122908Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[4:130:2156], cookie=17627750692308255655) 2024-11-21T17:48:49.122979Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:130:2156], cookie=121, name="Sem1", limit=5) 2024-11-21T17:48:49.133694Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:130:2156], cookie=121) 2024-11-21T17:48:49.133766Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:130:2156], cookie=122, name="Sem1") 2024-11-21T17:48:49.144899Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:130:2156], cookie=122) 2024-11-21T17:48:49.144997Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:130:2156], cookie=123, name="Sem1", force=0) 2024-11-21T17:48:49.155907Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:130:2156], cookie=123) 2024-11-21T17:48:49.155983Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=124, session=1, semaphore="Sem1" count=1) 2024-11-21T17:48:49.166669Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=124) 2024-11-21T17:48:49.166744Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=125, name="Sem1") 2024-11-21T17:48:49.177471Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=125) 2024-11-21T17:48:49.177544Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=126, name="Sem1") 2024-11-21T17:48:49.177560Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=126) 2024-11-21T17:48:49.177632Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:130:2156], cookie=127, name="Sem1", limit=5) 2024-11-21T17:48:49.177639Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:130:2156], cookie=127) 2024-11-21T17:48:49.177654Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:130:2156], cookie=128, name="Sem1") 2024-11-21T17:48:49.177670Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:130:2156], cookie=128) 2024-11-21T17:48:49.177691Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:130:2156], cookie=129, name="Sem1", force=0) 2024-11-21T17:48:49.177697Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:130:2156], cookie=129) 2024-11-21T17:48:49.177714Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=130, session=1, semaphore="Sem1" count=1) 2024-11-21T17:48:49.177719Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=130) 2024-11-21T17:48:49.177732Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:130:2156], cookie=131, name="Sem1") 2024-11-21T17:48:49.177735Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:130:2156], cookie=131) 2024-11-21T17:48:49.177745Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=132, name="Sem1") 2024-11-21T17:48:49.177748Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=132) >> TKesusTest::TestReleaseSemaphore [GOOD] >> TFileStoreWithReboots::CheckFileStoreSSDLimits |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] >> YdbOlapStore::LogExistingRequest [GOOD] >> YdbOlapStore::LogExistingUserId |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:47.525660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:47.525683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:47.525688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:47.525692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:47.525706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:47.525710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:47.525718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:47.525792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:47.533364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:47.533384Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:47.535175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:47.535237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:47.535265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:47.539112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:47.539163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:47.539232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.539412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:47.540050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.540304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:47.540313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.540322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:47.540336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:47.540341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:47.540368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:47.541370Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:47.552576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:47.552651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.552701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:47.552734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:47.552738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.553253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.553269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:47.553298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.553303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:47.553305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:47.553308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:47.553580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.553585Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:47.553588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:47.553803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.553808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.553812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.553816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.554183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:47.554554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:47.554589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:47.554711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.554726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:47.554730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.554761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:47.554765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.554783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:47.554791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:47.555078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:47.555084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:47.555109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.555112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:47.555163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.555167Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:47.555174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:47.555177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.555181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:47.555184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.555186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:47.555189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:47.555196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:47.555199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:47.555202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 594046678944 for txId: 1003 at step: 5000004 2024-11-21T17:48:47.890868Z node 223 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:47.890898Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 957777709160 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:47.890904Z node 223 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T17:48:47.890967Z node 223 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T17:48:47.890972Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T17:48:47.890993Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T17:48:47.891011Z node 223 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[223:398:2369], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:48:47.891378Z node 223 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:47.891384Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:48:47.891422Z node 223 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:47.891425Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [223:202:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:48:47.891493Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:47.891498Z node 223 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 1003:0, ProgressState, NeedSyncHive: 0 2024-11-21T17:48:47.891500Z node 223 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2024-11-21T17:48:47.891571Z node 223 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:47.891579Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:47.891582Z node 223 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:48:47.891587Z node 223 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:48:47.891591Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T17:48:47.891601Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:48:47.891989Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:47.891998Z node 223 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:48:47.892007Z node 223 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:48:47.892009Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:47.892012Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:48:47.892021Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [223:321:2313] message: TxId: 1003 2024-11-21T17:48:47.892025Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:47.892030Z node 223 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:48:47.892033Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:48:47.892058Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T17:48:47.892167Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:47.892426Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:48:47.892438Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [223:598:2512] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:48:47.892566Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:47.892603Z node 223 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 45us result status StatusSuccess 2024-11-21T17:48:47.892690Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:47.892769Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2024-11-21T17:48:47.892788Z node 223 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 17us result status StatusSuccess 2024-11-21T17:48:47.892820Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T17:48:47.892861Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:47.892874Z node 223 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 14us result status StatusSuccess 2024-11-21T17:48:47.892922Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestReleaseSemaphore [GOOD] Test command err: 2024-11-21T17:48:48.037125Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:48.037149Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:48.039489Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:48.039507Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:48.060587Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:48.280637Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:48.280675Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:48.284363Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:48.284448Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:48.295770Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:48.527751Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:48.527792Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:48.531050Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:48.531084Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:48.542537Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:48.542866Z node 3 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 4 2024-11-21T17:48:48.543009Z node 3 :KESUS_TABLET TRACE: Got TEvServerDisconnected([3:181:2155]) 2024-11-21T17:48:48.960246Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:48.960281Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:48.962731Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:48.962755Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:48.973870Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:48.973984Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=9856779765486406057, session=0, seqNo=0) 2024-11-21T17:48:48.974031Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:48.994817Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=9856779765486406057, session=1) 2024-11-21T17:48:48.994890Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=5027128701063173434, session=0, seqNo=0) 2024-11-21T17:48:48.994923Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:49.005743Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=5027128701063173434, session=2) 2024-11-21T17:48:49.005837Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:130:2156], cookie=111, name="Lock1") 2024-11-21T17:48:49.016519Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:130:2156], cookie=111) 2024-11-21T17:48:49.016593Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:49.016635Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:49.016688Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:49.027290Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=222) 2024-11-21T17:48:49.027355Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:130:2156], cookie=333, name="Lock1") 2024-11-21T17:48:49.037983Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:130:2156], cookie=333) 2024-11-21T17:48:49.204020Z node 6 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:49.204053Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:49.207894Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:49.207923Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:49.219175Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:49.219292Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[6:130:2156], cookie=5338061279968360732, session=0, seqNo=0) 2024-11-21T17:48:49.219325Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:49.240213Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[6:130:2156], cookie=5338061279968360732, session=1) 2024-11-21T17:48:49.240321Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[6:130:2156], cookie=12697187760810434635, session=0, seqNo=0) 2024-11-21T17:48:49.240352Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:49.251021Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[6:130:2156], cookie=12697187760810434635, session=2) 2024-11-21T17:48:49.251134Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[6:141:2165], cookie=7476768308377331265, name="Sem1", limit=1) 2024-11-21T17:48:49.251164Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2024-11-21T17:48:49.261817Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[6:141:2165], cookie=7476768308377331265) 2024-11-21T17:48:49.261890Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[6:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2024-11-21T17:48:49.261924Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2024-11-21T17:48:49.261950Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[6:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2024-11-21T17:48:49.272750Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[6:130:2156], cookie=111) 2024-11-21T17:48:49.272779Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[6:130:2156], cookie=222) 2024-11-21T17:48:49.272931Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:149:2173], cookie=12428098960634318050, name="Sem1") 2024-11-21T17:48:49.272960Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:149:2173], cookie=12428098960634318050) 2024-11-21T17:48:49.273011Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:152:2176], cookie=13533709871029164418, name="Sem1") 2024-11-21T17:48:49.273016Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:152:2176], cookie=13533709871029164418) 2024-11-21T17:48:49.273045Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[6:130:2156], cookie=333, name="Sem1") 2024-11-21T17:48:49.273075Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2024-11-21T17:48:49.283798Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[6:130:2156], cookie=333) 2024-11-21T17:48:49.283926Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:157:2181], cookie=17198419851322856829, name="Sem1") 2024-11-21T17:48:49.283941Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:157:2181], cookie=17198419851322856829) 2024-11-21T17:48:49.283987Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:160:2184], cookie=4070578627329621355, name="Sem1") 2024-11-21T17:48:49.283991Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:160:2184], cookie=4070578627329621355) 2024-11-21T17:48:49.284011Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[6:130:2156], cookie=444, name="Sem1") 2024-11-21T17:48:49.284032Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2024-11-21T17:48:49.294706Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[6:130:2156], cookie=444) 2024-11-21T17:48:49.294837Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:165:2189], cookie=6566010750869354915, name="Sem1") 2024-11-21T17:48:49.294854Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:165:2189], cookie=6566010750869354915) 2024-11-21T17:48:49.294893Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:168:2192], cookie=4565805278849279558, name="Sem1") 2024-11-21T17:48:49.294897Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:168:2192], cookie=4565805278849279558) |78.9%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomainWithoutHive-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:47.571107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:47.571131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:47.571136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:47.571142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:47.571158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:47.571163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:47.571173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:47.571260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:47.582362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:47.582381Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:47.584489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:47.584594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:47.584635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:47.587432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:47.587510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:47.587617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.587837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:47.588775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.589070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:47.589083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.589095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:47.589102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:47.589108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:47.589151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:47.590612Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:47.609234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:47.609340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.609411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:47.609461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:47.609470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.610216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.610245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:47.610294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.610305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:47.610310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:47.610316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:47.610700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.610711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:47.610716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:47.611035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.611045Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.611051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.611059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.611650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:47.612083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:47.612140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:47.612349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.612377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:47.612385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.612443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:47.612450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.612481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:47.612493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:47.612879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:47.612888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:47.612928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.612933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:47.613010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.613018Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:47.613031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:47.613035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.613041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:47.613046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.613051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:47.613054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:47.613066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:47.613073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:47.613077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 594046678944 for txId: 1003 at step: 5000004 2024-11-21T17:48:48.018288Z node 223 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:48.018302Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 957777709160 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:48.018306Z node 223 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T17:48:48.018358Z node 223 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T17:48:48.018363Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T17:48:48.018379Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T17:48:48.018396Z node 223 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[223:398:2369], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:48:48.018690Z node 223 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:48.018696Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:48:48.018726Z node 223 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:48.018730Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [223:202:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:48:48.018786Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:48.018791Z node 223 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 1003:0, ProgressState, NeedSyncHive: 0 2024-11-21T17:48:48.018793Z node 223 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2024-11-21T17:48:48.018857Z node 223 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:48.018864Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:48.018869Z node 223 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:48:48.018872Z node 223 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:48:48.018875Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T17:48:48.018884Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:48:48.019298Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:48.019306Z node 223 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:48:48.019315Z node 223 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:48:48.019317Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:48.019320Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:48:48.019329Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [223:321:2313] message: TxId: 1003 2024-11-21T17:48:48.019333Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:48.019337Z node 223 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:48:48.019340Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:48:48.019364Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T17:48:48.019479Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:48.019685Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:48:48.019691Z node 223 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [223:598:2512] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:48:48.019773Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:48.019797Z node 223 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 29us result status StatusSuccess 2024-11-21T17:48:48.019850Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:48.019899Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2024-11-21T17:48:48.019912Z node 223 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 12us result status StatusSuccess 2024-11-21T17:48:48.019933Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T17:48:48.019961Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:48.019970Z node 223 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 10us result status StatusSuccess 2024-11-21T17:48:48.020002Z node 223 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTablesWithReboots::CopyTableWithReboots [GOOD] >> TFileStoreWithReboots::CheckFileStoreSSDLimits [GOOD] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateAlterChannels ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2024-11-21T17:48:39.050959Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:39.050999Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:39.055374Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:39.055419Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:39.077981Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:39.079075Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:130:2156], cookie=6559368884235170752, path="/a/b", config={ }) 2024-11-21T17:48:39.079121Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:130:2156], cookie=6559368884235170752) 2024-11-21T17:48:39.079273Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:140:2164], cookie=15163724333680615444, path="", config={ }) 2024-11-21T17:48:39.079282Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:140:2164], cookie=15163724333680615444) 2024-11-21T17:48:39.079320Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:143:2167], cookie=12989477276468931242, path="/", config={ }) 2024-11-21T17:48:39.079326Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:143:2167], cookie=12989477276468931242) 2024-11-21T17:48:39.079362Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:146:2170], cookie=8468015244581476658, path="//", config={ }) 2024-11-21T17:48:39.079367Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:146:2170], cookie=8468015244581476658) 2024-11-21T17:48:39.079501Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:155:2179], cookie=11032461787771131052, path="RootQuoter", config={ MaxUnitsPerSecond: 42 }) 2024-11-21T17:48:39.079550Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "RootQuoter" 2024-11-21T17:48:39.090560Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:155:2179], cookie=11032461787771131052) 2024-11-21T17:48:39.090758Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:160:2184], cookie=15472979078787849411, path="/RootQuoter/", config={ MaxUnitsPerSecond: 42 }) 2024-11-21T17:48:39.090812Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:160:2184], cookie=15472979078787849411) 2024-11-21T17:48:39.090886Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:163:2187], cookie=16786154999071929858, path="/RootQuoter/", config={ MaxUnitsPerSecond: 100500 }) 2024-11-21T17:48:39.090897Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:163:2187], cookie=16786154999071929858) 2024-11-21T17:48:39.090951Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:166:2190], cookie=13631587741231700287, path="RootQuoter/SubQuoter", config={ }) 2024-11-21T17:48:39.091002Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "RootQuoter/SubQuoter" 2024-11-21T17:48:39.101955Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:166:2190], cookie=13631587741231700287) 2024-11-21T17:48:39.102156Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:171:2195], cookie=17796753395376854042, path="/RootQuoter//OtherSubQuoter/", config={ MaxUnitsPerSecond: 100500 }) 2024-11-21T17:48:39.102227Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "RootQuoter/OtherSubQuoter" 2024-11-21T17:48:39.113620Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:171:2195], cookie=17796753395376854042) 2024-11-21T17:48:39.113799Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:176:2200], cookie=8278915428476717599, ids=[], paths=[], recursive=1) 2024-11-21T17:48:39.113823Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:176:2200], cookie=8278915428476717599) 2024-11-21T17:48:39.116708Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:39.116756Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:39.116821Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:39.116949Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:39.168731Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:39.168863Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:216:2230], cookie=12043513795044798135, ids=[], paths=[], recursive=1) 2024-11-21T17:48:39.168891Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:216:2230], cookie=12043513795044798135) 2024-11-21T17:48:39.169072Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:222:2235], cookie=2802643089796300195, path="/RootQuoter", config={ MaxUnitsPerSecond: 42 }) 2024-11-21T17:48:39.169091Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:222:2235], cookie=2802643089796300195) 2024-11-21T17:48:39.169159Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:225:2238], cookie=13821382330760239316, path="OtherRootQuoter", config={ MaxUnitsPerSecond: 100 }) 2024-11-21T17:48:39.169210Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "OtherRootQuoter" 2024-11-21T17:48:39.184396Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:225:2238], cookie=13821382330760239316) 2024-11-21T17:48:39.184637Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:230:2243], cookie=11443394249696962964, ids=[], paths=[OtherRootQuoter], recursive=0) 2024-11-21T17:48:39.184675Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:230:2243], cookie=11443394249696962964) 2024-11-21T17:48:39.376315Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:39.376357Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:39.380435Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:39.380519Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:39.405136Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:39.405289Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:130:2156], cookie=14488861714228735175, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2024-11-21T17:48:39.405318Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:130:2156], cookie=14488861714228735175) 2024-11-21T17:48:39.405412Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:137:2161], cookie=3346920770905012680, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2024-11-21T17:48:39.405422Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:137:2161], cookie=3346920770905012680) 2024-11-21T17:48:39.405470Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:140:2164], cookie=6607274801779896402, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2024-11-21T17:48:39.405515Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2024-11-21T17:48:39.423432Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:140:2164], cookie=6607274801779896402) 2024-11-21T17:48:39.423621Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:145:2169], cookie=7318996592052420475, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2024-11-21T17:48:39.423684Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2024-11-21T17:48:39.444541Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:145:2169], cookie=7318996592052420475) 2024-11-21T17:48:39.577895Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:39.577928Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:39.581524Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:39.581591Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:39.604682Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:39.604820Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:128:2154], cookie=11911714524255239532, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2024-11-21T17:48:39.604908Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T17:48:39.615651Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:128:2154], cookie=11911714524255239532) 2024-11-21T17:48:39.615776Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:138:2162], cookie=13862614103317144386, path="/Root/Res", config={ }) 2024-11-21T17:48:39.615816Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2024-11-21T17:48:39.626740Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:138:2162], cookie=13862614103317144386) 2024-11-21T17:48:39.627219Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:143:2167]. Cookie: 17194607735029057951. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:39.627242Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:143:2167], cookie=17194607735029057951) 2024-11-21T17:48:39.627341Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:143:2167]. Cookie: 1554803886295205794. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 28000 } } 2024-11-21T17:48:39.627349Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:143:2167], cookie=1554803886295205794) 2024-11-21T17:48:41.431347Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:41.431382Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:41.434218Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:41.434255Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:41.455576Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:41.455697Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=1121286149976276142, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2024-11-21T17:48:41.455763Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T17:48:41.466585Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=1121286149976276142) 2024-11-21T17:48:41.466749Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:140:2164], cookie=7297426699836069408, path="/Root/Res", config={ }) 2024-11-21T17:48:41.466803Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2024-11-21T17:48:41.477628Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:140:2164], cookie=7297426699836069408) 2024-11-21T17:48:41.477878Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:145:2169]. Cookie: 4048265919330606987. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:41.477888Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:145:2169], cookie=4048265919330606987) 2024-11-21T17:48:41.477955Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:145:2169]. Cookie: 11012919941283784405. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2024-11-21T17:48:41.477962Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:145:2169], cookie=11012919941283784405) 2024-11-21T17:48:43.283975Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:43.284005Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:43.286969Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:43.287011Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:43.298462Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:43.298596Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=517891535056111942, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2024-11-21T17:48:43.298677Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T17:48:43.319653Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=517891535056111942) 2024-11-21T17:48:43.319826Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:140:2164]. Cookie: 599605659838004658. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:43.319833Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:140:2164], cookie=599605659838004658) 2024-11-21T17:48:43.319915Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:140:2164]. Cookie: 4731680729811845837. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2024-11-21T17:48:43.319922Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:140:2164], cookie=4731680729811845837) 2024-11-21T17:48:45.638187Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2188]. Cookie: 8246147403522390899. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:45.638211Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2188], cookie=8246147403522390899) 2024-11-21T17:48:45.638301Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:171:2188]. Cookie: 9582039595064189036. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2024-11-21T17:48:45.638308Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:171:2188], cookie=9582039595064189036) 2024-11-21T17:48:47.618699Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:198:2214]. Cookie: 9494945179626559833. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:47.618719Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:198:2214], cookie=9494945179626559833) 2024-11-21T17:48:47.618773Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:198:2214]. Cookie: 5883423950924104969. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2024-11-21T17:48:47.618778Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:198:2214], cookie=5883423950924104969) >> TxUsage::WriteToTopic_Demo_25 [GOOD] |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CheckFileStoreSSDLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:49.572618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:49.572640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:49.572643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:49.572647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:49.572651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:49.572654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:49.572661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:49.572728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:49.579877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:49.579898Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:49.582031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:49.582562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:49.582589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:49.583593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:49.583837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:49.583914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:49.583979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:49.584976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:49.585242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:49.585252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:49.585294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:49.585302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:49.585307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:49.585322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.586430Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:49.598062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:49.598157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.598218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:49.598261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:49.598276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.598995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:49.599015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:49.599059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.599068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:49.599071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:49.599075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:49.599437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.599452Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:49.599456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:49.599791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.599802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.599806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:49.599811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:49.600246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:49.600656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:49.600704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:49.600847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:49.600869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:49.600875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:49.600923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:49.600928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:49.600952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:49.600961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:49.601269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:49.601276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:49.601318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:49.601324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:49.601411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.601420Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:49.601429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:49.601432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:49.601436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:49.601439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:49.601444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:49.601447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:49.601455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:49.601459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:49.601461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:49.601687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:49.601695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:49.601698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:49.601701Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:49.601704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:49.601714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... T17:48:49.876602Z node 2 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 4 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 5 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:48:49.876640Z node 2 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 4, type FileStore, boot OK, tablet id 72075186233409549 2024-11-21T17:48:49.876654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2024-11-21T17:48:49.876657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 110, shardIdx: 72057594046678944:4, partId: 0 2024-11-21T17:48:49.876665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2024-11-21T17:48:49.876672Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 110:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:48:49.876675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 110:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2024-11-21T17:48:49.876687Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 2 -> 3 2024-11-21T17:48:49.876859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2024-11-21T17:48:49.877151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2024-11-21T17:48:49.877503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.877521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.877525Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TConfigureParts operationId#110:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:49.878032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 110:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275054593 2024-11-21T17:48:49.878054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 110, partId: 0, tablet: 72075186233409549 2024-11-21T17:48:49.878770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 110, tablet: 72075186233409549, partId: 0 2024-11-21T17:48:49.878793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: TxId: 110 Origin: 72075186233409549 Status: OK 2024-11-21T17:48:49.878797Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TConfigureParts operationId#110:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T17:48:49.878801Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 3 -> 128 2024-11-21T17:48:49.879255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.879371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.879377Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId#110:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:49.879384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 110 ready parts: 1/1 2024-11-21T17:48:49.879414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 110 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:49.879670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 110:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:110 msg type: 269090816 2024-11-21T17:48:49.879687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 110, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 110 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 110 at step: 5000009 2024-11-21T17:48:49.879735Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:49.879747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 110 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:49.879751Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId#110:0 HandleReply TEvOperationPlan, step: 5000009, at schemeshard: 72057594046678944 2024-11-21T17:48:49.879764Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 128 -> 240 2024-11-21T17:48:49.879785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:48:49.879793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: Erasing txId 110 2024-11-21T17:48:49.880190Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:49.880198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:49.880243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:48:49.880262Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:49.880267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 110, path id: 1 2024-11-21T17:48:49.880271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 110, path id: 5 2024-11-21T17:48:49.880343Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.880350Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 110:0 ProgressState 2024-11-21T17:48:49.880363Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#110:0 progress is 1/1 2024-11-21T17:48:49.880367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2024-11-21T17:48:49.880372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 110, ready parts: 1/1, is published: false 2024-11-21T17:48:49.880377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2024-11-21T17:48:49.880381Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 110:0 2024-11-21T17:48:49.880385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 110:0 2024-11-21T17:48:49.880410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T17:48:49.880416Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 110, publications: 2, subscribers: 0 2024-11-21T17:48:49.880421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2024-11-21T17:48:49.880423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2024-11-21T17:48:49.880539Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T17:48:49.880550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T17:48:49.880556Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 110 2024-11-21T17:48:49.880561Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2024-11-21T17:48:49.880565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:48:49.880682Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T17:48:49.880692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T17:48:49.880696Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 110 2024-11-21T17:48:49.880699Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T17:48:49.880703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:48:49.880712Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 110, subscribers: 0 2024-11-21T17:48:49.881516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2024-11-21T17:48:49.881553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 2024-11-21T17:48:49.881608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: send EvNotifyTxCompletion 2024-11-21T17:48:49.881613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 110 2024-11-21T17:48:49.881685Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2024-11-21T17:48:49.881702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2024-11-21T17:48:49.881705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [2:669:2620] TestWaitNotification: OK eventTxId 110 |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |78.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient |78.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> TFileStoreWithReboots::Create >> TFileStoreWithReboots::CreateAlterNoVersion |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:04.453808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:04.453827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:04.453832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:04.453837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:04.453847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:04.453851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:04.453860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:04.453924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:04.463903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:04.463921Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:04.466106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:04.466202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:04.466232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:04.468600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:04.468673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:04.468761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.468934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:04.469563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.469825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:04.469835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.469848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:04.469854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:04.469861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:04.469900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:04.471224Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:04.484365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:04.484426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.484476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:04.484517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:04.484525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.485130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.485158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:04.485200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.485209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:04.485214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:04.485218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:04.485616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.485628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:04.485632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:04.486025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.486034Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.486041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.486048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.486591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:04.487032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:04.487086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:04.487273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:04.487299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:04.487305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.487368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:04.487375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:04.487399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:04.487410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:04.487830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:04.487843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:04.487879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:04.487884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:04.487947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:04.487953Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:04.487964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:04.487968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.487974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:04.487993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:04.487998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:04.488002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:04.488013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:04.488018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:04.488022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... RefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:48:49.749598Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:48:49.749605Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:48:49.749653Z node 162 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:48:49.749665Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:48:49.749668Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [162:834:2751] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:48:49.749713Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:49.749743Z node 162 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot" took 36us result status StatusSuccess 2024-11-21T17:48:49.749847Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "NewTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } Children { Name: "NewTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:49.749904Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:49.749947Z node 162 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/Table" took 44us result status StatusSuccess 2024-11-21T17:48:49.750011Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:49.750051Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/NewTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:49.750063Z node 162 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/NewTable1" took 13us result status StatusSuccess 2024-11-21T17:48:49.750092Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot/NewTable1" PathDescription { Self { Name: "NewTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "NewTable1" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:49.750118Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/NewTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:49.750127Z node 162 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/NewTable2" took 9us result status StatusSuccess 2024-11-21T17:48:49.750153Z node 162 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot/NewTable2" PathDescription { Self { Name: "NewTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "NewTable2" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::SimultaneousCreateDropNfs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_25 [GOOD] Test command err: 2024-11-21T17:47:18.214925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790618331387492:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:18.215259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00131d/r3tmp/tmpXmpDML/pdisk_1.dat 2024-11-21T17:47:18.242286Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:18.268972Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6186, node 1 2024-11-21T17:47:18.300392Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/00131d/r3tmp/yandexWPGvrc.tmp 2024-11-21T17:47:18.300402Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/00131d/r3tmp/yandexWPGvrc.tmp 2024-11-21T17:47:18.300433Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/00131d/r3tmp/yandexWPGvrc.tmp 2024-11-21T17:47:18.300442Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:18.302469Z INFO: TTestServer started on Port 2341 GrpcPort 6186 TClient is connected to server localhost:2341 2024-11-21T17:47:18.317599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:18.317639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:18.318657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:6186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:18.387678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:18.394961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:18.467988Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.499368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790618331388244:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.499390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.499531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790618331388256:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.500349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:18.501110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790618331388289:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.501148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.502049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790618331388258:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:47:18.581276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.587393Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790618331388330:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:18.587948Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDk1ZWEzM2YtOTY2ZjhkNmQtYWJjZjQ0NDgtN2I2YTQ5Yzg=, ActorId: [1:7439790618331388241:2304], ActorState: ExecuteState, TraceId: 01jd7xbsk1eexe2d5z5vt9s03t, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:18.588430Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:47:18.589100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.649304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439790618331388601:2596] 2024-11-21T17:47:23.214968Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790618331387492:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:23.214999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:23.759217Z :WriteToTopic_Demo_21_RestartNo INFO: TTopicSdkTestSetup started 2024-11-21T17:47:23.765757Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:47:23.783854Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439790639806225401:2776] connected; active server actors: 1 2024-11-21T17:47:23.783913Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T17:47:23.784575Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T17:47:23.784624Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T17:47:23.784736Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T17:47:23.786373Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:47:23.786996Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T17:47:23.787148Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:47:23.787181Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T17:47:23.787195Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:47:23.787197Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T17:47:23.787200Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:47:23.787204Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:47:23.787286Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T17:47:23.787632Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T17:47:23.787656Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T17:47:23.787665Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.787670Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790639806225431:2428], now have 1 active actors on pipe 2024-11-21T17:47:23.832488Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.832505Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790639806225400:2775], now have 1 active actors on pipe 2024-11-21T17:47:23.834851Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439790618331387866 RawX2: 4294969474 } TxId: 281474976715673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } Explicit ... commits: assignId# 1, from# 2, to# 2, offset# 3 2024-11-21T17:48:48.317706Z :DEBUG: [/Root] [/Root] [c1f03458-9d356b77-4c5839fb-dedb59a1] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 3 } } 2024-11-21T17:48:49.307740Z node 10 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:48:49.307959Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|410f44f4-5d84eee1-5db5fbab-3a8b4661_0 describe result for acl check 2024-11-21T17:48:49.314396Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_B:0:1:2:3 2024-11-21T17:48:49.314423Z :INFO: [/Root] [/Root] [c1f03458-9d356b77-4c5839fb-dedb59a1] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 30 MessagesRead: 3 BytesReadCompressed: 30 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:49.315469Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_13885725518704756992_v1 checking auth because of timeout 2024-11-21T17:48:49.315498Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_13885725518704756992_v1 auth for : test-consumer 2024-11-21T17:48:49.315683Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_13885725518704756992_v1 Handle describe topics response 2024-11-21T17:48:49.315713Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_13885725518704756992_v1 auth is DEAD 2024-11-21T17:48:49.315737Z node 10 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_10_2_13885725518704756992_v1 auth ok: topics# 1, initDone# 1 2024-11-21T17:48:50.314583Z :INFO: [/Root] [/Root] [c1f03458-9d356b77-4c5839fb-dedb59a1] Closing read session. Close timeout: 0.000000s 2024-11-21T17:48:50.314605Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_B:0:1:2:3 2024-11-21T17:48:50.314616Z :INFO: [/Root] [/Root] [c1f03458-9d356b77-4c5839fb-dedb59a1] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2000 BytesRead: 30 MessagesRead: 3 BytesReadCompressed: 30 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:50.314637Z :NOTICE: [/Root] [/Root] [c1f03458-9d356b77-4c5839fb-dedb59a1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:48:50.314647Z :DEBUG: [/Root] [/Root] [c1f03458-9d356b77-4c5839fb-dedb59a1] [] Abort session to cluster 2024-11-21T17:48:50.314679Z :NOTICE: [/Root] [/Root] [c1f03458-9d356b77-4c5839fb-dedb59a1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:48:50.314868Z :INFO: [/Root] [/Root] [8de464c3-50aab2dc-c494800d-374f5c1e] Closing read session. Close timeout: 0.000000s 2024-11-21T17:48:50.314885Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:2:0 2024-11-21T17:48:50.314890Z :INFO: [/Root] [/Root] [8de464c3-50aab2dc-c494800d-374f5c1e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 4013 BytesRead: 30 MessagesRead: 3 BytesReadCompressed: 30 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:50.314917Z :NOTICE: [/Root] [/Root] [8de464c3-50aab2dc-c494800d-374f5c1e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:48:50.314922Z :DEBUG: [/Root] [/Root] [8de464c3-50aab2dc-c494800d-374f5c1e] [] Abort session to cluster 2024-11-21T17:48:50.314993Z :NOTICE: [/Root] [/Root] [8de464c3-50aab2dc-c494800d-374f5c1e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:48:50.315044Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_13885725518704756992_v1 grpc read done: success# 0, data# { } 2024-11-21T17:48:50.315060Z node 10 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_10_2_13885725518704756992_v1 grpc read failed 2024-11-21T17:48:50.315067Z node 10 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_10_2_13885725518704756992_v1 grpc closed 2024-11-21T17:48:50.315080Z node 10 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_10_2_13885725518704756992_v1 is DEAD 2024-11-21T17:48:50.315166Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:50.315183Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_10_2_13885725518704756992_v1 2024-11-21T17:48:50.315201Z :INFO: [/Root] SessionId [test-message_group_id|410f44f4-5d84eee1-5db5fbab-3a8b4661_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T17:48:50.315211Z :INFO: [/Root] SessionId [test-message_group_id|410f44f4-5d84eee1-5db5fbab-3a8b4661_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T17:48:50.315188Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [10:7439791006977385520:2568] destroyed 2024-11-21T17:48:50.315204Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_2_13885725518704756992_v1 2024-11-21T17:48:50.315217Z :DEBUG: [/Root] SessionId [test-message_group_id|410f44f4-5d84eee1-5db5fbab-3a8b4661_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T17:48:50.315229Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic_B] pipe [10:7439791006977385517:2565] disconnected; active server actors: 1 2024-11-21T17:48:50.315245Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic_B] pipe [10:7439791006977385517:2565] client test-consumer disconnected session test-consumer_10_2_13885725518704756992_v1 2024-11-21T17:48:50.315329Z :INFO: [/Root] SessionId [test-message_group_id|410f44f4-5d84eee1-5db5fbab-3a8b4661_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T17:48:50.315290Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13804334210846562208_v1 grpc read done: success# 0, data# { } 2024-11-21T17:48:50.315334Z :DEBUG: [/Root] SessionId [test-message_group_id|410f44f4-5d84eee1-5db5fbab-3a8b4661_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T17:48:50.315303Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_13804334210846562208_v1 grpc read failed 2024-11-21T17:48:50.315305Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_13804334210846562208_v1 grpc closed 2024-11-21T17:48:50.315316Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_13804334210846562208_v1 is DEAD 2024-11-21T17:48:50.315350Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:50.315358Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_10_1_13804334210846562208_v1 2024-11-21T17:48:50.315361Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439790998387450690:2507] destroyed 2024-11-21T17:48:50.315366Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_1_13804334210846562208_v1 2024-11-21T17:48:50.315465Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [10:7439790998387450687:2504] disconnected; active server actors: 1 2024-11-21T17:48:50.315478Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [10:7439790998387450687:2504] client test-consumer disconnected session test-consumer_10_1_13804334210846562208_v1 2024-11-21T17:48:50.315525Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message_group_id|410f44f4-5d84eee1-5db5fbab-3a8b4661_0 grpc read done: success: 0 data: 2024-11-21T17:48:50.315533Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|410f44f4-5d84eee1-5db5fbab-3a8b4661_0 grpc read failed 2024-11-21T17:48:50.315538Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|410f44f4-5d84eee1-5db5fbab-3a8b4661_0 grpc closed 2024-11-21T17:48:50.315540Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|410f44f4-5d84eee1-5db5fbab-3a8b4661_0 is DEAD 2024-11-21T17:48:50.315581Z :INFO: [/Root] SessionId [test-message_group_id|872c097a-2d90a0e4-72e6b4d-8d3ddb1d_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T17:48:50.315585Z :INFO: [/Root] SessionId [test-message_group_id|872c097a-2d90a0e4-72e6b4d-8d3ddb1d_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T17:48:50.315588Z :DEBUG: [/Root] SessionId [test-message_group_id|872c097a-2d90a0e4-72e6b4d-8d3ddb1d_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T17:48:50.315611Z :INFO: [/Root] SessionId [test-message_group_id|872c097a-2d90a0e4-72e6b4d-8d3ddb1d_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T17:48:50.315613Z :DEBUG: [/Root] SessionId [test-message_group_id|872c097a-2d90a0e4-72e6b4d-8d3ddb1d_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T17:48:50.315743Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:50.315754Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:50.315787Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:50.315794Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [10:7439791006977385426:2544] destroyed 2024-11-21T17:48:50.315812Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:50.315825Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [10:7439791006977385423:2544] destroyed 2024-11-21T17:48:50.315844Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:48:50.315902Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|872c097a-2d90a0e4-72e6b4d-8d3ddb1d_0 grpc read done: success: 0 data: 2024-11-21T17:48:50.315909Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|872c097a-2d90a0e4-72e6b4d-8d3ddb1d_0 grpc read failed 2024-11-21T17:48:50.315911Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|872c097a-2d90a0e4-72e6b4d-8d3ddb1d_0 grpc closed 2024-11-21T17:48:50.315913Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|872c097a-2d90a0e4-72e6b4d-8d3ddb1d_0 is DEAD 2024-11-21T17:48:50.316088Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:50.316118Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:50.316132Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439790998387450671:2498] destroyed 2024-11-21T17:48:50.316146Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> LocalPartition::WithoutPartitionWithSplit [GOOD] >> TSettingsValidation::TestDifferentDedupParams |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |79.0%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> YdbOlapStore::LogExistingUserId [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TFileStoreWithReboots::CreateWithIntermediateDirs >> TKesusTest::TestPassesUpdatedPropsToSession >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] >> TxUsage::WriteToTopic_Demo_16 [GOOD] >> TxUsage::WriteToTopic_Demo_17 >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain_reboots/unittest >> TSchemeShardTestExtSubdomainReboots::CreateExternalSubdomain-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:47.547082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:47.547102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:47.547107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:47.547111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:47.547124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:47.547128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:47.547138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:47.547207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:47.557223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:47.557238Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:47.558639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:47.558696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:47.558723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:47.560274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:47.560326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:47.560397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.560596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:47.561267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.561466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:47.561474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.561493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:47.561497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:47.561502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:47.561529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:47.562834Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:47.576718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:47.576797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.576848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:47.576881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:47.576887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.577535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.577554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:47.577588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.577595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:47.577598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:47.577601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:47.577893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.577900Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:47.577904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:47.578159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.578165Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.578169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.578174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.578582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:47.578909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:47.578946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:47.579105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:47.579123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:47.579128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.579166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:47.579171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:47.579193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:47.579201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:47.579519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:47.579529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:47.579566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:47.579571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:47.579646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:47.579652Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:47.579663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:47.579667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.579672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:47.579677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:47.579681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:47.579684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:47.579695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:47.579700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:47.579705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 2024-11-21T17:48:50.476382Z node 233 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:50.476396Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 1000727382120 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:50.476401Z node 233 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T17:48:50.476462Z node 233 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T17:48:50.476468Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T17:48:50.476487Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T17:48:50.476518Z node 233 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[233:417:2380], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:48:50.476788Z node 233 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:50.476793Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:48:50.476829Z node 233 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:50.476832Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [233:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:48:50.476895Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.476900Z node 233 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 1003:0, ProgressState, NeedSyncHive: 0 2024-11-21T17:48:50.476902Z node 233 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2024-11-21T17:48:50.476965Z node 233 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:50.476973Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:48:50.476979Z node 233 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:48:50.476983Z node 233 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:48:50.476986Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 10 2024-11-21T17:48:50.476996Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:48:50.477379Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.477386Z node 233 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:48:50.477396Z node 233 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:48:50.477398Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:50.477402Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:48:50.477410Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [233:324:2316] message: TxId: 1003 2024-11-21T17:48:50.477414Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:50.477419Z node 233 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:48:50.477422Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:48:50.477449Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T17:48:50.477502Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:48:50.477723Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:48:50.477729Z node 233 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [233:635:2538] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:48:50.477821Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:50.477848Z node 233 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 33us result status StatusSuccess 2024-11-21T17:48:50.477903Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409548 Coordinators: 72075186233409549 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409547 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:50.477955Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409547 2024-11-21T17:48:50.477968Z node 233 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409547 describe path "/MyRoot/USER_0" took 12us result status StatusSuccess 2024-11-21T17:48:50.477988Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409547 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409548 Coordinators: 72075186233409549 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409547 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72075186233409547, at schemeshard: 72075186233409547 2024-11-21T17:48:50.478017Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:50.478026Z node 233 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 10us result status StatusSuccess 2024-11-21T17:48:50.478058Z node 233 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |79.0%| [TA] $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |79.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD >> TTablesWithReboots::CopyTableAndDropWithReboots [GOOD] >> TFileStoreWithReboots::CreateAlter >> TTablesWithReboots::AlterTableSchemaFreezeUnfreezeWithReboots [GOOD] >> TFileStoreWithReboots::AlterAssignDrop |79.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood [GOOD] |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |79.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TKesusTest::TestSessionTimeoutAfterReboot >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> BasicUsage::WriteSessionSwitchDatabases [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit [GOOD] >> TTablesWithReboots::AlterTableFollowersWithReboots [GOOD] >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> TTablesWithReboots::CopyWithRebootsAtCommit [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TTxDataShardUploadRows::TestUploadRows >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] |79.1%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2024-11-21T17:48:45.042822Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.042848Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.045826Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.045857Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.067162Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.067950Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:130:2156], cookie=1431233590362741827, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2024-11-21T17:48:45.068042Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T17:48:45.078811Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:130:2156], cookie=1431233590362741827) 2024-11-21T17:48:45.078962Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:140:2164], cookie=12642241686495342205, path="/Root/Res", config={ }) 2024-11-21T17:48:45.079012Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2024-11-21T17:48:45.089723Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:140:2164], cookie=12642241686495342205) 2024-11-21T17:48:45.090063Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:145:2169]. Cookie: 6418115484271984376. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:45.090073Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[1:145:2169], cookie=6418115484271984376) 2024-11-21T17:48:45.090129Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [1:145:2169]. Cookie: 15366121635912824061. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2024-11-21T17:48:45.090135Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[1:145:2169], cookie=15366121635912824061) 2024-11-21T17:48:46.886138Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.886161Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.888733Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.888823Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.899885Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.899986Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:130:2156], cookie=12657704264448146834, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2024-11-21T17:48:46.900041Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T17:48:46.920937Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:130:2156], cookie=12657704264448146834) 2024-11-21T17:48:46.921111Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:140:2164]. Cookie: 8364948293512183108. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:46.921118Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:140:2164], cookie=8364948293512183108) 2024-11-21T17:48:46.921168Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:140:2164]. Cookie: 913949514963607617. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:46.921171Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:140:2164], cookie=913949514963607617) 2024-11-21T17:48:46.921221Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:140:2164]. Cookie: 10590776525844201771. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2024-11-21T17:48:46.921225Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:140:2164], cookie=10590776525844201771) 2024-11-21T17:48:46.921255Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:140:2164]. Cookie: 17803349027783140571. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2024-11-21T17:48:46.921258Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:140:2164], cookie=17803349027783140571) 2024-11-21T17:48:48.730293Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:48.730320Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:48.732476Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:48.732527Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:48.754082Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:48.754243Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:128:2154], cookie=7668018716446402443, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2024-11-21T17:48:48.754353Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T17:48:48.765147Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:128:2154], cookie=7668018716446402443) 2024-11-21T17:48:48.765282Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:138:2162], cookie=9934961193158940248, path="/Root/Res1", config={ }) 2024-11-21T17:48:48.765322Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2024-11-21T17:48:48.776058Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:138:2162], cookie=9934961193158940248) 2024-11-21T17:48:48.776216Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:143:2167], cookie=10613078520924119253, path="/Root/Res2", config={ }) 2024-11-21T17:48:48.776292Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2024-11-21T17:48:48.787016Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:143:2167], cookie=10613078520924119253) 2024-11-21T17:48:48.787187Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:148:2172]. Cookie: 14548487877431631321. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:48.787194Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:148:2172], cookie=14548487877431631321) 2024-11-21T17:48:48.787254Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:148:2172]. Cookie: 13647619228937233518. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:48.787257Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:148:2172], cookie=13647619228937233518) 2024-11-21T17:48:48.787301Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:148:2172]. Cookie: 2525140483741393719. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1019500 } } 2024-11-21T17:48:48.787306Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:148:2172], cookie=2525140483741393719) 2024-11-21T17:48:50.597769Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:50.597807Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:50.601259Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:50.601292Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:50.622884Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:50.623011Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=9943433428749773064, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2024-11-21T17:48:50.623081Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T17:48:50.634022Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=9943433428749773064) 2024-11-21T17:48:50.634224Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:140:2164]. Cookie: 15296832645186543014. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:50.634231Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:140:2164], cookie=15296832645186543014) 2024-11-21T17:48:50.634289Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 9792438803720304610. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2024-11-21T17:48:50.634293Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=9792438803720304610) 2024-11-21T17:48:50.634325Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 15835895649048152718. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2024-11-21T17:48:50.634327Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=15835895649048152718) 2024-11-21T17:48:52.447385Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:52.447414Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:52.449974Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:52.450001Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:52.461094Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:52.461207Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=16012865264790149454, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2024-11-21T17:48:52.461249Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T17:48:52.482373Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=16012865264790149454) 2024-11-21T17:48:52.482530Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:139:2163], cookie=2809950511468246393, path="/Root/Res", config={ }) 2024-11-21T17:48:52.482593Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2024-11-21T17:48:52.493433Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:139:2163], cookie=2809950511468246393) 2024-11-21T17:48:52.493659Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:144:2168]. Cookie: 16016999732697274337. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T17:48:52.493668Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:144:2168], cookie=16016999732697274337) 2024-11-21T17:48:52.493746Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:148:2172], cookie=11457596112880385221, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2024-11-21T17:48:52.493791Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Updated quoter resource 1 "Root" 2024-11-21T17:48:52.493819Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2024-11-21T17:48:52.504727Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:148:2172], cookie=11457596112880385221) 2024-11-21T17:48:52.504883Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:144:2168]. Cookie: 15892761431441037417. Data: { } 2024-11-21T17:48:52.504892Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:144:2168], cookie=15892761431441037417) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogExistingUserId [GOOD] Test command err: 2024-11-21T17:48:36.881314Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790952133019765:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:36.881336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001226/r3tmp/tmppKvrqB/pdisk_1.dat 2024-11-21T17:48:36.941330Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6283, node 1 2024-11-21T17:48:36.982095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:36.982122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:36.988505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:36.988520Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:36.988522Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:36.988566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:36.988790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:37.067830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.068835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:37.068858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.073427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:48:37.073490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:48:37.073494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:48:37.081134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:48:37.081160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:48:37.084429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:48:37.084947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.092894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211317137, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:48:37.092914Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:48:37.093005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:48:37.093797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:37.093864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:37.093878Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:48:37.093895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:48:37.093904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:48:37.093922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:48:37.094849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:48:37.094866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:48:37.094871Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:48:37.094888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:14474 2024-11-21T17:48:37.228829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.229436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:48:37.229495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:48:37.229504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:48:37.231279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2024-11-21T17:48:37.231360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:48:37.231458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:48:37.231476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateOlapStore, at tablet72057594046644480 2024-11-21T17:48:37.232065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:37.232075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:37.232080Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:48:37.232131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:48:37.232133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:48:37.232134Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 waiting... 2024-11-21T17:48:37.243329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:48:37.243391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:48:37.243523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:48:37.243548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:48:37.243583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:48:37.243595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:48:37.244808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:48:37.254994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790956427987924:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:48:37.255051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790956427987924:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:48:37.255092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790956427987924:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:48:37.255125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790956427987924:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:48:37.255144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790956427987924:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:48:37.255164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790956427987924:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:48:37.255193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790956427987924:2289];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:48:37.255213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439790956427987924:2289];tablet_id=72075186224037890;process=TTxInitSc ... cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728897Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728901Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728904Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728908Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728909Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728913Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728916Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728918Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728922Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728923Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728927Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728928Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728932Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728933Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728936Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728937Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728940Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728941Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728943Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728946Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728946Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728950Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728952Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728954Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728957Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728957Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728961Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728962Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728964Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=1101520; 2024-11-21T17:48:51.728970Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728973Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400688:3411] TxId# 281474976715754 Received TEvProposeTransactionResult from ColumnShard# 72075186224037888 Status# SUCCESS 2024-11-21T17:48:51.728974Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728975Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400688:3411] TxId# 281474976715754 Received TEvProposeTransactionResult from ColumnShard# 72075186224037889 Status# SUCCESS 2024-11-21T17:48:51.728978Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728982Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728983Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400689:3412] TxId# 281474976715755 Received TEvProposeTransactionResult from ColumnShard# 72075186224037888 Status# SUCCESS 2024-11-21T17:48:51.728984Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400689:3412] TxId# 281474976715755 Received TEvProposeTransactionResult from ColumnShard# 72075186224037889 Status# SUCCESS 2024-11-21T17:48:51.728987Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728987Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400690:3413] TxId# 281474976715756 Received TEvProposeTransactionResult from ColumnShard# 72075186224037888 Status# SUCCESS 2024-11-21T17:48:51.728989Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400690:3413] TxId# 281474976715756 Received TEvProposeTransactionResult from ColumnShard# 72075186224037889 Status# SUCCESS 2024-11-21T17:48:51.728991Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400700:3417] TxId# 281474976715757 Received TEvProposeTransactionResult from ColumnShard# 72075186224037888 Status# SUCCESS 2024-11-21T17:48:51.728991Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728992Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400700:3417] TxId# 281474976715757 Received TEvProposeTransactionResult from ColumnShard# 72075186224037889 Status# SUCCESS 2024-11-21T17:48:51.728995Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400697:3414] TxId# 281474976715758 Received TEvProposeTransactionResult from ColumnShard# 72075186224037888 Status# SUCCESS 2024-11-21T17:48:51.728995Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.728996Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400697:3414] TxId# 281474976715758 Received TEvProposeTransactionResult from ColumnShard# 72075186224037889 Status# SUCCESS 2024-11-21T17:48:51.728999Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400699:3416] TxId# 281474976715759 Received TEvProposeTransactionResult from ColumnShard# 72075186224037888 Status# SUCCESS 2024-11-21T17:48:51.728999Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.729000Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400699:3416] TxId# 281474976715759 Received TEvProposeTransactionResult from ColumnShard# 72075186224037889 Status# SUCCESS 2024-11-21T17:48:51.729003Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400698:3415] TxId# 281474976715760 Received TEvProposeTransactionResult from ColumnShard# 72075186224037888 Status# SUCCESS 2024-11-21T17:48:51.729004Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:752;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=749504; 2024-11-21T17:48:51.729005Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400698:3415] TxId# 281474976715760 Received TEvProposeTransactionResult from ColumnShard# 72075186224037889 Status# SUCCESS 2024-11-21T17:48:51.729009Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400779:3422] TxId# 281474976715761 Received TEvProposeTransactionResult from ColumnShard# 72075186224037888 Status# SUCCESS 2024-11-21T17:48:51.729011Z node 28 :LONG_TX_SERVICE DEBUG: LongTxService.Commit [28:7439791019226400779:3422] TxId# 281474976715761 Received TEvProposeTransactionResult from ColumnShard# 72075186224037889 Status# SUCCESS ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2024-11-21T17:48:42.780513Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790978799666636:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:42.780762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001732/r3tmp/tmprUYz2u/pdisk_1.dat 2024-11-21T17:48:42.830344Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64116, node 1 2024-11-21T17:48:42.840671Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.840689Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.840692Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.840730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.881975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:42.882008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:42.883073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:42.930951Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.932657Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:2430, port: 2430 2024-11-21T17:48:42.932685Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:42.988470Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:43.032378Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:43.032591Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:43.032607Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.080388Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.128408Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.128985Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****XHvw (A8F79318) () has now valid token of ldapuser@ldap 2024-11-21T17:48:47.780650Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790978799666636:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:47.780694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:48:47.783160Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****XHvw (A8F79318) 2024-11-21T17:48:47.783189Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:2430, port: 2430 2024-11-21T17:48:47.783218Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:47.832445Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:47.832616Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:2430 return no entries 2024-11-21T17:48:47.832756Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****XHvw (A8F79318) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:48:52.785396Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****XHvw (A8F79318) 2024-11-21T17:48:53.245107Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791028323180159:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:53.245244Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001732/r3tmp/tmpLuqVZ8/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12633, node 2 2024-11-21T17:48:53.257790Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:53.260732Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:53.260739Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:53.260741Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:53.260763Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:53.340331Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:53.341693Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:12297, port: 12297 2024-11-21T17:48:53.341724Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:53.345640Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:53.345663Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:53.346728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:53.363525Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:53.408434Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:53.456410Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:53.500638Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****upUw (4AA2CD0F) () has now valid token of ldapuser@ldap 2024-11-21T17:48:53.706683Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439791025641089114:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:53.706921Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001732/r3tmp/tmp75hAuM/pdisk_1.dat 2024-11-21T17:48:53.713464Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1804, node 3 2024-11-21T17:48:53.723316Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:53.723330Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:53.723332Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:53.723369Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:53.744274Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:53.746226Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21610, port: 21610 2024-11-21T17:48:53.746252Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:53.761950Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:53.804475Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:53.809055Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:53.809088Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:53.810154Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:53.848680Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****kw4A (EEEB05BB) () has now valid token of ldapuser@ldap 2024-11-21T17:48:54.033038Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439791029650088142:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:54.033069Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001732/r3tmp/tmpNl2eRr/pdisk_1.dat 2024-11-21T17:48:54.043704Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22585, node 4 2024-11-21T17:48:54.051642Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:54.051654Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:54.051656Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:54.051697Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:54.133207Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:54.133238Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:54.134266Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:54.229874Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:54.231320Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:23964, port: 23964 2024-11-21T17:48:54.231350Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:54.268775Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:54.312958Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:54.360641Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****8j2Q (E2BC8A05) () has now valid token of ldapuser@ldap 2024-11-21T17:48:54.504651Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439791032811124783:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:54.504994Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001732/r3tmp/tmpWvomXy/pdisk_1.dat 2024-11-21T17:48:54.512143Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13767, node 5 2024-11-21T17:48:54.522312Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:54.522323Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:54.522325Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:54.522357Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:54.579524Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:54.580815Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:6516, port: 6516 2024-11-21T17:48:54.580838Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:54.601724Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:54.605225Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:54.605256Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:54.606297Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:54.644457Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2024-11-21T17:48:54.688404Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:54.688564Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:54.688577Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T17:48:54.732458Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T17:48:54.776439Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2024-11-21T17:48:54.776848Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****80bg (F82FCA3B) () has now valid token of ldapuser@ldap 2024-11-21T17:48:54.970274Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439791031088511623:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:54.970302Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001732/r3tmp/tmp6cTSPm/pdisk_1.dat 2024-11-21T17:48:54.980554Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11459, node 6 2024-11-21T17:48:54.986720Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:54.986729Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:54.986730Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:54.986753Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:55.070569Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:55.070607Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:55.071693Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:55.104249Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:55.105714Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:3901, port: 3901 2024-11-21T17:48:55.105744Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:55.116658Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:55.164418Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2024-11-21T17:48:55.164440Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:3901. Bad search filter 2024-11-21T17:48:55.164598Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****oRvA (51280271) () has now permanent error message 'Could not login via LDAP' ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyTableAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:55.614397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:55.614419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:55.614423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:55.614427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:55.614438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:55.614442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:55.614449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:55.614517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:55.624810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:55.624830Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:55.636683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:55.636805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:55.636832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:55.642500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:55.642568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:55.642646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:55.642779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:55.643282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:55.643518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:55.643525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:55.643536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:55.643542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:55.643548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:55.643586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:55.644650Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:55.663100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:55.663170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.663226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:55.663272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:55.663282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.663924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:55.663943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:55.663981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.663989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:55.663993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:55.663997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:55.664303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.664311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:55.664316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:55.664559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.664566Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.664570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:55.664577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:55.665145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:55.665477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:55.665524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:55.665700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:55.665719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:55.665725Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:55.665771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:55.665777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:55.665801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:55.665812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:55.666127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:55.666134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:55.666170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:55.666175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:55.666244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:55.666250Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:55.666260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:55.666264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:55.666269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:55.666274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:55.666278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:55.666282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:55.666292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:55.666297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:55.666301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 7594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 970662611295 } Origin: 72075186233409547 State: 5 TxId: 1005 Step: 0 Generation: 2 2024-11-21T17:48:54.190096Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409547, partId: 0 2024-11-21T17:48:54.190102Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 970662611295 } Origin: 72075186233409547 State: 5 TxId: 1005 Step: 0 Generation: 2 2024-11-21T17:48:54.190106Z node 226 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1005:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T17:48:54.190340Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.190347Z node 226 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1005:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T17:48:54.190351Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1005:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:48:54.190353Z node 226 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1005, done: 0, blocked: 1 2024-11-21T17:48:54.190359Z node 226 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 1005:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1005 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T17:48:54.190373Z node 226 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 137 -> 129 2024-11-21T17:48:54.190384Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:54.190390Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:48:54.190605Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.190621Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.190747Z node 226 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:54.190752Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:54.190779Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:48:54.190794Z node 226 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:54.190796Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [226:202:2205], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T17:48:54.190799Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [226:202:2205], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T17:48:54.190850Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.190854Z node 226 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1005:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:48:54.190862Z node 226 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.190864Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:48:54.190867Z node 226 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 240 2024-11-21T17:48:54.190964Z node 226 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:48:54.190971Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:48:54.190974Z node 226 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:48:54.190976Z node 226 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T17:48:54.190978Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:48:54.191172Z node 226 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:48:54.191179Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:48:54.191182Z node 226 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:48:54.191184Z node 226 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:48:54.191186Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:48:54.191193Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T17:48:54.191439Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.191444Z node 226 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:54.191487Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:48:54.191504Z node 226 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T17:48:54.191506Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:48:54.191509Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T17:48:54.191511Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:48:54.191514Z node 226 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T17:48:54.191516Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T17:48:54.191526Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:48:54.191645Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:48:54.191779Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:48:54.192480Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 970662611295 } TabletId: 72075186233409547 State: 4 2024-11-21T17:48:54.192492Z node 226 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:48:54.192700Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:48:54.192750Z node 226 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:48:54.204566Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:48:54.204618Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2024-11-21T17:48:54.204743Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:48:54.204746Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:48:54.204754Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:54.205170Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:48:54.205179Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:48:54.205379Z node 226 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T17:48:54.205415Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T17:48:54.205425Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T17:48:54.205473Z node 226 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T17:48:54.205484Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:48:54.205487Z node 226 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [226:665:2629] TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2024-11-21T17:48:54.205523Z node 226 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:48:54.205530Z node 226 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:48:54.205535Z node 226 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:48:54.205539Z node 226 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit [GOOD] Test command err: 2024-11-21T17:47:18.234707Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790620885276695:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:18.235334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:18.267336Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001452/r3tmp/tmpfuxlHx/pdisk_1.dat 2024-11-21T17:47:18.296597Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9375, node 1 2024-11-21T17:47:18.323169Z INFO: TTestServer started on Port 19219 GrpcPort 9375 2024-11-21T17:47:18.320465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001452/r3tmp/yandexydT80N.tmp 2024-11-21T17:47:18.320479Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001452/r3tmp/yandexydT80N.tmp 2024-11-21T17:47:18.320539Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001452/r3tmp/yandexydT80N.tmp 2024-11-21T17:47:18.320561Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19219 2024-11-21T17:47:18.335024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:18.335055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:18.336129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:9375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:18.391885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.395301Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.403595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:47:18.541503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790620885277306:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.541529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790620885277296:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.541552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.541880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790620885277335:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.541892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.542186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:18.543745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790620885277310:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:47:18.580942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.586333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.603634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439790620885277653:2597] 2024-11-21T17:47:23.234653Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790620885276695:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:23.234700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:23.820887Z :WriteToTopic_Demo_2 INFO: TTopicSdkTestSetup started 2024-11-21T17:47:23.828524Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:47:23.832636Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439790642360114464:2774] connected; active server actors: 1 2024-11-21T17:47:23.832749Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T17:47:23.833029Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T17:47:23.833092Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T17:47:23.833336Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T17:47:23.837168Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:47:23.837403Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T17:47:23.837480Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:47:23.837514Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T17:47:23.837518Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:47:23.837520Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T17:47:23.837524Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:47:23.837529Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:47:23.837539Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T17:47:23.837541Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T17:47:23.837738Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T17:47:23.837752Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.837757Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790642360114482:2434], now have 1 active actors on pipe 2024-11-21T17:47:23.881453Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.881473Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790642360114461:2773], now have 1 active actors on pipe 2024-11-21T17:47:23.883668Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439790620885276950 RawX2: 4294969492 } TxId: 281474976710674 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } 2024-11-21T17:47:23.883726Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710674, State UNKNOWN 2024-11-21T17:47:23.883736Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T17:47:23.883740Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710674, NewState PREPARING 2024-11-21T17:47:23.883800Z node 1 :PERSQUEUE DEBUG: [TxId: 281474976710674] save tx TxId: 281474976710674 State: PREPARED MinStep: 1732211243868 MaxStep: 18446744073709551615 Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeco ... topic: 'topic_A' requestId: 2024-11-21T17:48:54.338391Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T17:48:54.338441Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2024-11-21T17:48:54.338433Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 1 (startOffset 0) session test-consumer_10_1_16820059505364525426_v1 2024-11-21T17:48:54.338455Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (2-2) 2024-11-21T17:48:54.338464Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 1} (3-3) 2024-11-21T17:48:54.338467Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 2} (4-4) 2024-11-21T17:48:54.338472Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 3} (5-5) 2024-11-21T17:48:54.338476Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 4} (6-6) 2024-11-21T17:48:54.338480Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 0} (7-7) 2024-11-21T17:48:54.338473Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:48:54.338483Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 1} (8-8) 2024-11-21T17:48:54.338489Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 2} (9-9) 2024-11-21T17:48:54.338492Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 3} (10-10) 2024-11-21T17:48:54.338495Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 4} (11-11) 2024-11-21T17:48:54.338501Z :DEBUG: [/Root] [/Root] [63efa7ae-5544a975-34bfa0a-b21dff2d] [] The application data is transferred to the client. Number of messages 11, size 8000000 bytes 2024-11-21T17:48:54.338508Z :DEBUG: [/Root] [/Root] [63efa7ae-5544a975-34bfa0a-b21dff2d] [] Returning serverBytesSize = 0 to budget 0 11 2024-11-21T17:48:54.338532Z :DEBUG: [/Root] [/Root] [63efa7ae-5544a975-34bfa0a-b21dff2d] [] Commit offsets [1, 12). Partition stream id: 1 2024-11-21T17:48:54.338658Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 1 end: 12 } } } } 2024-11-21T17:48:54.338705Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 12 prev 0 end 12 by cookie 3 2024-11-21T17:48:54.338719Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T17:48:54.338725Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T17:48:54.338908Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:48:54.338924Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 12 (startOffset 0) session test-consumer_10_1_16820059505364525426_v1 2024-11-21T17:48:54.338926Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 2 2024-11-21T17:48:54.338937Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:48:54.338957Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2024-11-21T17:48:54.338971Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 1 endOffset 12 with cookie 2 2024-11-21T17:48:54.338989Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 1 2024-11-21T17:48:54.339120Z :DEBUG: [/Root] [/Root] [63efa7ae-5544a975-34bfa0a-b21dff2d] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 1 } } 2024-11-21T17:48:54.339401Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 12 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:48:54.339415Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:48:54.339423Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 3 2024-11-21T17:48:54.339441Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2024-11-21T17:48:54.339452Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 12 endOffset 12 with cookie 3 2024-11-21T17:48:54.339454Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 12 2024-11-21T17:48:54.339536Z :DEBUG: [/Root] [/Root] [63efa7ae-5544a975-34bfa0a-b21dff2d] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 12 } } 2024-11-21T17:48:55.290380Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:11:12 2024-11-21T17:48:55.290401Z :INFO: [/Root] [/Root] [63efa7ae-5544a975-34bfa0a-b21dff2d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 15000000 MessagesRead: 12 BytesReadCompressed: 15000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:55.291741Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 checking auth because of timeout 2024-11-21T17:48:55.291775Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 auth for : test-consumer 2024-11-21T17:48:55.291906Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 Handle describe topics response 2024-11-21T17:48:55.291934Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 auth is DEAD 2024-11-21T17:48:55.291944Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 auth ok: topics# 1, initDone# 1 2024-11-21T17:48:56.290664Z :INFO: [/Root] [/Root] [63efa7ae-5544a975-34bfa0a-b21dff2d] Closing read session. Close timeout: 0.000000s 2024-11-21T17:48:56.290681Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:11:12 2024-11-21T17:48:56.290689Z :INFO: [/Root] [/Root] [63efa7ae-5544a975-34bfa0a-b21dff2d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2000 BytesRead: 15000000 MessagesRead: 12 BytesReadCompressed: 15000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:56.290708Z :NOTICE: [/Root] [/Root] [63efa7ae-5544a975-34bfa0a-b21dff2d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:48:56.290718Z :DEBUG: [/Root] [/Root] [63efa7ae-5544a975-34bfa0a-b21dff2d] [] Abort session to cluster 2024-11-21T17:48:56.290758Z :NOTICE: [/Root] [/Root] [63efa7ae-5544a975-34bfa0a-b21dff2d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:48:56.290957Z :INFO: [/Root] SessionId [test-message_group_id|92910152-c4c74e5d-7262361b-bf40511_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2024-11-21T17:48:56.290962Z :INFO: [/Root] SessionId [test-message_group_id|92910152-c4c74e5d-7262361b-bf40511_0] PartitionId [0] Generation [2] Write session will now close 2024-11-21T17:48:56.290968Z :DEBUG: [/Root] SessionId [test-message_group_id|92910152-c4c74e5d-7262361b-bf40511_0] PartitionId [0] Generation [2] Write session: aborting 2024-11-21T17:48:56.290986Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 grpc read done: success# 0, data# { } 2024-11-21T17:48:56.291005Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 grpc read failed 2024-11-21T17:48:56.291009Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 grpc closed 2024-11-21T17:48:56.291014Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_16820059505364525426_v1 is DEAD 2024-11-21T17:48:56.291048Z :INFO: [/Root] SessionId [test-message_group_id|92910152-c4c74e5d-7262361b-bf40511_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2024-11-21T17:48:56.291052Z :DEBUG: [/Root] SessionId [test-message_group_id|92910152-c4c74e5d-7262361b-bf40511_0] PartitionId [0] Generation [2] Write session: destroy 2024-11-21T17:48:56.291130Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [10:7439791030828707574:2524] disconnected; active server actors: 1 2024-11-21T17:48:56.291136Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [10:7439791030828707574:2524] client test-consumer disconnected session test-consumer_10_1_16820059505364525426_v1 2024-11-21T17:48:56.291160Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:56.291167Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_10_1_16820059505364525426_v1 2024-11-21T17:48:56.291171Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439791030828707577:2527] destroyed 2024-11-21T17:48:56.291177Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|92910152-c4c74e5d-7262361b-bf40511_0 grpc read done: success: 0 data: 2024-11-21T17:48:56.291177Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|92910152-c4c74e5d-7262361b-bf40511_0 grpc read failed 2024-11-21T17:48:56.291180Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|92910152-c4c74e5d-7262361b-bf40511_0 grpc closed 2024-11-21T17:48:56.291181Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|92910152-c4c74e5d-7262361b-bf40511_0 is DEAD 2024-11-21T17:48:56.291264Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:56.291264Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_1_16820059505364525426_v1 2024-11-21T17:48:56.291284Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:56.291286Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439791022238772873:2493] destroyed 2024-11-21T17:48:56.291298Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterTableFollowersWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:21.565425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:21.565443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:21.565447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:21.565450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:21.565457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:21.565459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:21.565465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:21.565526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:21.574595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:21.574609Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:21.576334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:21.576408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:21.576429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:21.578399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:21.578460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:21.578540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.578662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:21.579193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.579399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:21.579407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.579416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:21.579422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.579428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:21.579454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:21.580508Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:21.593920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:21.593983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.594033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:21.594084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:21.594091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.594615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.594635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:21.594659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.594666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:21.594668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:21.594671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:21.594966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.594973Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:21.594976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:21.595169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.595174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.595178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.595182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.595598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:21.595931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:21.595973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:21.596131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.596154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:21.596160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.596203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:21.596209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.596238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:21.596249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:21.596594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:21.596605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.596626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.596629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:21.596677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.596682Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:21.596689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:21.596691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.596694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:21.596697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.596700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:21.596702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:21.596712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:21.596717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:21.596721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 1004 MinStep: 5000006 MaxStep: 18446744073709551615 PrepareArriveTime: 94000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 36 } } 2024-11-21T17:48:56.559120Z node 133 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#1004:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 1004 MinStep: 5000006 MaxStep: 18446744073709551615 PrepareArriveTime: 94000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 36 } } 2024-11-21T17:48:56.559124Z node 133 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T17:48:56.559133Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 1004:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T17:48:56.559137Z node 133 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 3 -> 128 2024-11-21T17:48:56.559348Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:56.559364Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:56.559369Z node 133 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#1004:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:56.559378Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1004 ready parts: 1/1 2024-11-21T17:48:56.559399Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1004 MinStep: 5000006 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:56.559595Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2024-11-21T17:48:56.559611Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1004 at step: 5000006 2024-11-21T17:48:56.559665Z node 133 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:56.559679Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 571230652517 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:56.559684Z node 133 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#1004:0 HandleReply TEvOperationPlan, operationId: 1004:0, stepId: 5000006, at schemeshard: 72057594046678944 2024-11-21T17:48:56.559732Z node 133 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 129 2024-11-21T17:48:56.559745Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2024-11-21T17:48:56.560454Z node 133 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:56.560461Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:48:56.560502Z node 133 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:56.560507Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [133:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2024-11-21T17:48:56.560666Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:56.560671Z node 133 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:48:56.560771Z node 133 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:56.560779Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:48:56.560783Z node 133 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:48:56.560787Z node 133 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 6 2024-11-21T17:48:56.560791Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:48:56.560803Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T17:48:56.561137Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000006 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 175 } } 2024-11-21T17:48:56.561143Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:56.561157Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000006 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 175 } } 2024-11-21T17:48:56.561167Z node 133 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000006 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 175 } } 2024-11-21T17:48:56.561234Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 571230652685 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:48:56.561237Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:56.561246Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 571230652685 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:48:56.561250Z node 133 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:48:56.561256Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 571230652685 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:48:56.561264Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:56.561267Z node 133 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:56.561270Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:48:56.561274Z node 133 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T17:48:56.561369Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:48:56.561609Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:56.561624Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:56.561666Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:56.561673Z node 133 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T17:48:56.561681Z node 133 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:48:56.561684Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:48:56.561689Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T17:48:56.561693Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:48:56.561697Z node 133 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:48:56.561700Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:48:56.561716Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:48:56.562052Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:48:56.562058Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:48:56.562101Z node 133 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:48:56.562115Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:48:56.562119Z node 133 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [133:545:2520] TestWaitNotification: OK eventTxId 1004 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::AlterTableSchemaFreezeUnfreezeWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:21.528205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:21.528224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:21.528248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:21.528253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:21.528265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:21.528270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:21.528280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:21.528349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:21.535938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:21.535953Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:21.537560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:21.537633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:21.537654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:21.539596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:21.539645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:21.539698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.539868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:21.540532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.540784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:21.540793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.540805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:21.540812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.540818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:21.540851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:21.541984Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:21.556935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:21.556995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.557040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:21.557076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:21.557083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.557579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.557599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:21.557624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.557631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:21.557635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:21.557639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:21.557955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.557963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:21.557967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:21.558202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.558209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.558213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.558218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.558788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:21.559094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:21.559127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:21.559270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.559291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:21.559297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.559337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:21.559342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.559360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:21.559369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:21.559653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:21.559659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.559681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.559685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:21.559734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.559738Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:21.559746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:21.559750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.559755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:21.559759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.559763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:21.559766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:21.559774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:21.559778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:21.559782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... } } 2024-11-21T17:48:54.213570Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 343 RawX2: 532575947030 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:48:54.213572Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409547, partId: 0 2024-11-21T17:48:54.213579Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 343 RawX2: 532575947030 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:48:54.213585Z node 124 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:48:54.213589Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 343 RawX2: 532575947030 } Origin: 72075186233409547 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:48:54.213596Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:54.213598Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T17:48:54.213618Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 342 RawX2: 532575947029 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:48:54.213620Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T17:48:54.213625Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 342 RawX2: 532575947029 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:48:54.213627Z node 124 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:48:54.213631Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 342 RawX2: 532575947029 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:48:54.213633Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:54.213635Z node 124 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.213637Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:48:54.213640Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:48:54.213642Z node 124 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T17:48:54.213939Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:48:54.214209Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.214227Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.214238Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.214249Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.214263Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.214268Z node 124 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T17:48:54.214276Z node 124 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:48:54.214278Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:48:54.214281Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T17:48:54.214285Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:48:54.214287Z node 124 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:48:54.214290Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:48:54.214321Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:48:54.214759Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:48:54.214766Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:48:54.214805Z node 124 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:48:54.214818Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:48:54.214820Z node 124 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [124:566:2529] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:48:54.214859Z node 124 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:54.214885Z node 124 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "MyRoot/Table" took 33us result status StatusSuccess 2024-11-21T17:48:54.215033Z node 124 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key2" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key1" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnIds: 2 KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } ExecutorCacheSize: 42 TxReadSizeLimit: 100 PartitioningPolicy { MinPartitionsCount: 2 } FreezeState: Unfreeze } TableSchemaVersion: 3 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\010\000\000\000\000\000\000\000\000\000\000\200\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2024-11-21T17:46:39.322718Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1732211199322711 2024-11-21T17:46:39.448544Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790450732451712:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:39.448882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:39.451346Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790453368943855:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:39.473984Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e1e/r3tmp/tmpuPp8yq/pdisk_1.dat 2024-11-21T17:46:39.475243Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:39.476317Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:39.501298Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32268, node 1 2024-11-21T17:46:39.510360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e1e/r3tmp/yandexosS1i9.tmp 2024-11-21T17:46:39.510379Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e1e/r3tmp/yandexosS1i9.tmp 2024-11-21T17:46:39.510440Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e1e/r3tmp/yandexosS1i9.tmp 2024-11-21T17:46:39.510493Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:39.515464Z INFO: TTestServer started on Port 25248 GrpcPort 32268 TClient is connected to server localhost:25248 PQClient connected to localhost:32268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:39.538180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:46:39.548809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:39.548838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:39.550631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:46:39.575266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:39.575295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:39.576962Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:39.577266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:39.728020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790450732452596:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.728044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.728055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790450732452622:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.728854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:39.729818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790450732452654:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.729854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:39.732507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790450732452625:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T17:46:39.760929Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790453368944009:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:39.760947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.761014Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjYzYzE5NjEtNGQ0MDFiZWQtNGQyNzllNGEtNWQ3MmQ0Mzc=, ActorId: [2:7439790453368943983:2277], ActorState: ExecuteState, TraceId: 01jd7xakr80v02jprv7y6qzak8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:39.761502Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:39.790310Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790450732452790:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:39.790420Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGQyNTQ3MGUtYmNhMWNmN2ItNzUyNDcwNzYtMzcwOTgzOGI=, ActorId: [1:7439790450732452593:2299], ActorState: ExecuteState, TraceId: 01jd7xakqf7mb5pwjzz8kvs0z8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:39.790591Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:39.855639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:39.895127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:32268", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T17:46:40.008925Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd7xakzjbzzdnbax2sy8b16t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJlMmFjOGQtODE5Nzg4NjgtNmUyMGM4Y2UtNjMzYWRkOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790455027420389:2933] 2024-11-21T17:46:44.451940Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790450732451712:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:44.451990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:46:44.452045Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439790453368943855:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:44.452087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:46:45.032729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:32268 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:46:45.053427Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPI ... on=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:48:35.155122Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790949957880399:3681] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2024-11-21T17:48:35.155132Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 4 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:48:35.155397Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:35.155419Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439790949957880429:3681], now have 1 active actors on pipe 2024-11-21T17:48:35.155529Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T17:48:35.155591Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:48:35.155626Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:48:35.155692Z node 4 :PERSQUEUE INFO: new Cookie src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src_id 2024-11-21T17:48:35.155739Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:48:35.155776Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:48:35.156000Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:48:35.156010Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:48:35.156033Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:48:35.156141Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0 2024-11-21T17:48:35.156542Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T17:48:35.156560Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211315156 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:35.156598Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Write session established. Init response: last_seq_no: 2 session_id: "src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0" supported_codecs { codecs: 1 codecs: 2 codecs: 3 } 2024-11-21T17:48:36.156688Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790949957880399:3681] (SourceId=src_id, PreferedPartition=(NULL)) Update the table 2024-11-21T17:48:36.161196Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790949957880399:3681] (SourceId=src_id, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=1 Status=SUCCESS 2024-11-21T17:48:36.161215Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439790949957880399:3681] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2024-11-21T17:48:38.964043Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:48:43.964268Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:48:48.964409Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:48:53.959437Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2024-11-21T17:48:53.959478Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 4 2024-11-21T17:48:53.959681Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvStatus 2024-11-21T17:48:53.959822Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2024-11-21T17:48:53.960085Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 4 DataSize: 0 UsedReserveSize: 0 2024-11-21T17:48:53.960112Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 2024-11-21T17:48:53.964579Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 >>> Ready to answer: ok 2024-11-21T17:48:55.187837Z :DEBUG: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] Write 1 messages with Id from 1 to 1 >>> Got event: ReadyToAcceptEvent 2024-11-21T17:48:55.188119Z :DEBUG: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] Write session: try to update token 2024-11-21T17:48:55.188136Z :DEBUG: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 2024-11-21T17:48:55.188511Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:48:55.188637Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T17:48:55.188848Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:48:55.188872Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:48:55.188919Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T17:48:55.189022Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T17:48:55.189139Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:48:55.189153Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:48:55.189173Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 1 size 98 offset: -1 2024-11-21T17:48:55.189266Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2024-11-21T17:48:55.189321Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2024-11-21T17:48:55.189401Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 160 WTime 1732211335189 2024-11-21T17:48:55.189444Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:48:55.190523Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] topic 'rt3.dc1--test-topicCounters. CacheSize 0 CachedBlobs 0 2024-11-21T17:48:55.190540Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 2024-11-21T17:48:55.190550Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:48:55.190564Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T17:48:55.190570Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T17:48:55.190635Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T17:48:55.190863Z :DEBUG: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T17:48:55.190931Z :DEBUG: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 1000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T17:48:55.190939Z :DEBUG: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2024-11-21T17:48:55.190942Z :DEBUG: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] Write session: acknoledged message 1 2024-11-21T17:48:55.191178Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0 grpc read done: success: 0 data: 2024-11-21T17:48:55.191190Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0 grpc read failed 2024-11-21T17:48:55.191195Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0 grpc closed 2024-11-21T17:48:55.191199Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0 is DEAD 2024-11-21T17:48:55.191308Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:55.191374Z :DEBUG: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2024-11-21T17:48:55.191405Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:55.191431Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439790949957880429:3681] destroyed 2024-11-21T17:48:55.191436Z :ERROR: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2024-11-21T17:48:55.191440Z :ERROR: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2024-11-21T17:48:55.191441Z :INFO: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] Write session will now close 2024-11-21T17:48:55.191441Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:48:55.191455Z :DEBUG: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] Write session: aborting 2024-11-21T17:48:55.195139Z :DEBUG: [/Root] SessionId [src_id|3cc0c72f-58a1fcaa-defe0649-a0f107d1_0] MessageGroupId [src_id] Write session: destroy >> TTxDataShardUploadRows::TestUploadRowsLocks-StreamLookup >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyWithRebootsAtCommit [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:01.795676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:01.795694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:01.795697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:01.795700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:01.795709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:01.795712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:01.795718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:01.795777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:01.802937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:01.802955Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:01.804751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:01.804835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:01.804882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:01.806926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:01.806986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:01.807075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:01.807235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:01.807769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:01.808011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:01.808021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:01.808033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:01.808039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:01.808045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:01.808089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:01.809191Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:01.821636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:01.821686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:01.821731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:01.821768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:01.821772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:01.822257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:01.822278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:01.822301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:01.822313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:01.822316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:01.822319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:01.822606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:01.822615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:01.822619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:01.822857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:01.822863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:01.822866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:01.822879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:01.823283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:01.823583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:01.823618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:01.823763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:01.823782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:01.823787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:01.823830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:01.823834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:01.823851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:01.823862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:01.824175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:01.824181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:01.824200Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:01.824203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:01.824276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:01.824281Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:01.824287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:01.824290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:01.824292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:01.824295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:01.824298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:01.824300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:01.824307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:01.824310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:01.824312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 5186233409549, at schemeshard: 72057594046678944 2024-11-21T17:48:57.119725Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:48:57.119727Z node 212 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T17:48:57.120101Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:57.120118Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:57.120125Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:57.120131Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:57.120183Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:57.120188Z node 212 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2024-11-21T17:48:57.120193Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:48:57.120196Z node 212 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2024-11-21T17:48:57.120204Z node 212 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T17:48:57.120206Z node 212 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2024-11-21T17:48:57.121060Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:48:57.121073Z node 212 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:48:57.121082Z node 212 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:48:57.121085Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:57.121089Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:48:57.121092Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:48:57.121096Z node 212 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:48:57.121098Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:48:57.121119Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:48:57.121122Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification wait txId: 1003 2024-11-21T17:48:57.121485Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:48:57.121491Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:48:57.121528Z node 212 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:48:57.121539Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:48:57.121543Z node 212 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [212:631:2572] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:48:57.121587Z node 212 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:57.121614Z node 212 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 36us result status StatusSuccess 2024-11-21T17:48:57.121681Z node 212 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "NewTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:57.121712Z node 212 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NewTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:48:57.121734Z node 212 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NewTable" took 23us result status StatusSuccess 2024-11-21T17:48:57.121843Z node 212 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NewTable" PathDescription { Self { Name: "NewTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "NewTable" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "key3" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnNames: "key2" KeyColumnNames: "key3" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\003\000\004\000\000\000\377\377\377\177\000\000\000\200\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow >> TFileStoreWithReboots::CreateWithIntermediateDirsForceDrop >> TFileStoreWithReboots::CheckFileStoreHDDLimits |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks+StreamLookup >> TTxDataShardUploadRows::TestUploadRowsLocks-StreamLookup [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows >> TFileStoreWithReboots::CheckFileStoreHDDLimits [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> BasicUsage::TWriteSession_WriteEncoded [GOOD] Test command err: 2024-11-21T17:47:18.215349Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790620755616498:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:18.215420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:18.240383Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001369/r3tmp/tmpzGi189/pdisk_1.dat 2024-11-21T17:47:18.275687Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62119, node 1 2024-11-21T17:47:18.299039Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001369/r3tmp/yandexMsauWT.tmp 2024-11-21T17:47:18.299051Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001369/r3tmp/yandexMsauWT.tmp 2024-11-21T17:47:18.299105Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001369/r3tmp/yandexMsauWT.tmp 2024-11-21T17:47:18.299132Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:18.301564Z INFO: TTestServer started on Port 15298 GrpcPort 62119 TClient is connected to server localhost:15298 2024-11-21T17:47:18.317611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:18.317664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:18.319724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:62119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:18.391322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.393957Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:18.404690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:18.525639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790620755617123:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.525673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790620755617098:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.525733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.526387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:18.526467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790620755617156:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.526493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.528028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790620755617127:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:47:18.581044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.586606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.594616Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790620755617325:2329], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:18.594712Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjg1NzUzMDQtM2Y2OTRmZDMtNDhhMjEzYWUtOTQwMDAyNTI=, ActorId: [1:7439790620755617095:2304], ActorState: ExecuteState, TraceId: 01jd7xbskxamteedf88dwx0h49, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:18.595173Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:47:18.603507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439790620755617468:2597] 2024-11-21T17:47:23.215514Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790620755616498:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:23.215570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:23.743277Z :ConnectToYDB INFO: TTopicSdkTestSetup started 2024-11-21T17:47:23.757915Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:47:23.784706Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439790642230454270:2782] connected; active server actors: 1 2024-11-21T17:47:23.784777Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T17:47:23.784889Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T17:47:23.784942Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T17:47:23.785274Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T17:47:23.786362Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:47:23.786976Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T17:47:23.787205Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:47:23.787292Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T17:47:23.787303Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:47:23.787305Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T17:47:23.787307Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:47:23.787311Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:47:23.787414Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T17:47:23.787647Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T17:47:23.787660Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T17:47:23.787670Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.787676Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790642230454305:2428], now have 1 active actors on pipe 2024-11-21T17:47:23.834725Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.834752Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790642230454269:2781], now have 1 active actors on pipe 2024-11-21T17:47:23.834759Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:47:23.837445Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7439790620755616734:2178] txId 281474976710673 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } Expl ... 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2024-11-21T17:48:56.898467Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 partition ready for read: partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1732211331893, sizeLag# 519 2024-11-21T17:48:56.898474Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1TEvPartitionReady. Aval parts: 1 2024-11-21T17:48:56.898479Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 performing read request: guid# 70addf86-9f88bafe-9facb1a6-ca27a932, from# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), count# 4, size# 622, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T17:48:56.898495Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 READ FROM TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1)maxCount 4 maxSize 622 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid 70addf86-9f88bafe-9facb1a6-ca27a932 2024-11-21T17:48:56.898519Z node 12 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T17:48:56.898519Z node 12 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T17:48:56.898524Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T17:48:56.898552Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 1 Topic 'test-topic' partition 0 user test-consumer offset 0 count 4 size 622 endOffset 4 max time lag 0ms effective offset 0 2024-11-21T17:48:56.898558Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T17:48:56.898589Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2024-11-21T17:48:56.898597Z node 12 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:48:56.898625Z node 12 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:48:56.898689Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 98 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1732211331893 CreateTimestampMS: 1732211331893 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 91 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1732211331894 CreateTimestampMS: 1732211331893 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 98 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1732211331894 CreateTimestampMS: 1732211331893 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 71 bytes ..." SourceId: "" SeqNo: 4 WriteTimestampMS: 1732211331894 CreateTimestampMS: 1732211331893 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 29 RealReadOffset: 3 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T17:48:56.898733Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2024-11-21T17:48:56.898740Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 after read state TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 70addf86-9f88bafe-9facb1a6-ca27a932 has messages 1 2024-11-21T17:48:56.898763Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 read done: guid# 70addf86-9f88bafe-9facb1a6-ca27a932, partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), size# 503 2024-11-21T17:48:56.898773Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 response to read: guid# 70addf86-9f88bafe-9facb1a6-ca27a932 2024-11-21T17:48:56.898834Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 Process answer. Aval parts: 0 2024-11-21T17:48:56.898937Z :DEBUG: [/Root] [/Root] [60f8d676-c0f6678b-508b4f64-432f838c] [] Got ReadResponse, serverBytesSize = 503, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428297 2024-11-21T17:48:56.898962Z :DEBUG: [/Root] [/Root] [60f8d676-c0f6678b-508b4f64-432f838c] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428297 2024-11-21T17:48:56.899038Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (0-3) 2024-11-21T17:48:56.899048Z :DEBUG: [/Root] [/Root] [60f8d676-c0f6678b-508b4f64-432f838c] [] Returning serverBytesSize = 503 to budget 2024-11-21T17:48:56.899052Z :DEBUG: [/Root] [/Root] [60f8d676-c0f6678b-508b4f64-432f838c] [] In ContinueReadingDataImpl, ReadSizeBudget = 503, ReadSizeServerDelta = 52428297 2024-11-21T17:48:56.899109Z :DEBUG: [/Root] [/Root] [60f8d676-c0f6678b-508b4f64-432f838c] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T17:48:56.899137Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T17:48:56.899145Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2024-11-21T17:48:56.899148Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 1} (2-2) 2024-11-21T17:48:56.899151Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (3-3) 2024-11-21T17:48:56.899159Z :DEBUG: [/Root] [/Root] [60f8d676-c0f6678b-508b4f64-432f838c] [] The application data is transferred to the client. Number of messages 4, size 14 bytes 2024-11-21T17:48:56.899164Z :DEBUG: [/Root] [/Root] [60f8d676-c0f6678b-508b4f64-432f838c] [] Returning serverBytesSize = 0 to budget 2024-11-21T17:48:56.899182Z :INFO: [/Root] [/Root] [60f8d676-c0f6678b-508b4f64-432f838c] Closing read session. Close timeout: 0.000000s 2024-11-21T17:48:56.899187Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:0 2024-11-21T17:48:56.899192Z :INFO: [/Root] [/Root] [60f8d676-c0f6678b-508b4f64-432f838c] Counters: { Errors: 0 CurrentSessionLifetimeMs: 3 BytesRead: 14 MessagesRead: 4 BytesReadCompressed: 74 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:48:56.899209Z :NOTICE: [/Root] [/Root] [60f8d676-c0f6678b-508b4f64-432f838c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:48:56.899213Z :DEBUG: [/Root] [/Root] [60f8d676-c0f6678b-508b4f64-432f838c] [] Abort session to cluster 2024-11-21T17:48:56.899214Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 grpc read done: success# 1, data# { read_request { bytes_size: 503 } } 2024-11-21T17:48:56.899269Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 got read request: guid# 422dcd6b-c3d5bf5-664028f9-c2d8a2fc 2024-11-21T17:48:56.899305Z :NOTICE: [/Root] [/Root] [60f8d676-c0f6678b-508b4f64-432f838c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:48:56.899325Z :INFO: [/Root] SessionId [12f2e3f3-d2b7ad2-4217416-69a0b7bf|fba104a7-94021b62-e6e8e183-e3c8bde2_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T17:48:56.899327Z :INFO: [/Root] SessionId [12f2e3f3-d2b7ad2-4217416-69a0b7bf|fba104a7-94021b62-e6e8e183-e3c8bde2_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T17:48:56.899332Z :DEBUG: [/Root] SessionId [12f2e3f3-d2b7ad2-4217416-69a0b7bf|fba104a7-94021b62-e6e8e183-e3c8bde2_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T17:48:56.899346Z :INFO: [/Root] SessionId [12f2e3f3-d2b7ad2-4217416-69a0b7bf|fba104a7-94021b62-e6e8e183-e3c8bde2_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T17:48:56.899348Z :DEBUG: [/Root] SessionId [12f2e3f3-d2b7ad2-4217416-69a0b7bf|fba104a7-94021b62-e6e8e183-e3c8bde2_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T17:48:56.899408Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 grpc read done: success# 0, data# { } 2024-11-21T17:48:56.899417Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 grpc read failed 2024-11-21T17:48:56.899421Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 closed 2024-11-21T17:48:56.899505Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_9171140986745436663_v1 is DEAD 2024-11-21T17:48:56.899545Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:56.899554Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_12_1_9171140986745436663_v1 2024-11-21T17:48:56.899558Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [12:7439791041886442522:2538] destroyed 2024-11-21T17:48:56.899565Z node 12 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 12f2e3f3-d2b7ad2-4217416-69a0b7bf|fba104a7-94021b62-e6e8e183-e3c8bde2_0 grpc read done: success: 0 data: 2024-11-21T17:48:56.899570Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 12f2e3f3-d2b7ad2-4217416-69a0b7bf|fba104a7-94021b62-e6e8e183-e3c8bde2_0 grpc read failed 2024-11-21T17:48:56.899572Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 12f2e3f3-d2b7ad2-4217416-69a0b7bf|fba104a7-94021b62-e6e8e183-e3c8bde2_0 grpc closed 2024-11-21T17:48:56.899574Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 12f2e3f3-d2b7ad2-4217416-69a0b7bf|fba104a7-94021b62-e6e8e183-e3c8bde2_0 is DEAD 2024-11-21T17:48:56.899600Z node 12 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_12_1_9171140986745436663_v1 2024-11-21T17:48:56.899618Z node 12 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [12:7439791041886442519:2535] disconnected; active server actors: 1 2024-11-21T17:48:56.899625Z node 12 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [12:7439791041886442519:2535] client test-consumer disconnected session test-consumer_12_1_9171140986745436663_v1 2024-11-21T17:48:56.899759Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:48:56.899803Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:48:56.899817Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [12:7439791016116638429:2454] destroyed 2024-11-21T17:48:56.899824Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CheckFileStoreHDDLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:58.749694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:58.749722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:58.749727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:58.749732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:58.749739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:58.749742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:58.749751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:58.749824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:58.758269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:58.758291Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:58.760716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:58.761221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:58.761248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:58.762447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:58.762605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:58.762670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:58.762741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:58.763359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:58.763566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:58.763572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:58.763601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:58.763607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:58.763612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:58.763621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.764940Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:58.776318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:58.776384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.776436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:58.776471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:58.776476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.776993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:58.777012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:58.777052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.777059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:58.777063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:58.777066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:58.777338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.777345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:58.777348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:58.777584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.777590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.777594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:58.777599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:58.778008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:58.778320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:58.778368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:58.778482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:58.778498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:58.778503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:58.778544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:58.778548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:58.778570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:58.778579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:58.778937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:58.778942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:58.778973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:58.778976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:58.779037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.779041Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:58.779051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:58.779054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:58.779058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:58.779061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:58.779064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:58.779066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:58.779072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:58.779076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:58.779079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:58.779267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:58.779276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:58.779279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:58.779282Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:58.779285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:58.779293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... T17:48:59.039119Z node 2 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 4 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 5 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:48:59.039158Z node 2 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 4, type FileStore, boot OK, tablet id 72075186233409549 2024-11-21T17:48:59.039175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2024-11-21T17:48:59.039178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 110, shardIdx: 72057594046678944:4, partId: 0 2024-11-21T17:48:59.039190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2024-11-21T17:48:59.039198Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 110:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:48:59.039205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 110:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2024-11-21T17:48:59.039219Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 2 -> 3 2024-11-21T17:48:59.039408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2024-11-21T17:48:59.039713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2024-11-21T17:48:59.040027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T17:48:59.040049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T17:48:59.040054Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TConfigureParts operationId#110:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:59.040841Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 110:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275054593 2024-11-21T17:48:59.040868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 110, partId: 0, tablet: 72075186233409549 2024-11-21T17:48:59.041596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 110, tablet: 72075186233409549, partId: 0 2024-11-21T17:48:59.041620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: TxId: 110 Origin: 72075186233409549 Status: OK 2024-11-21T17:48:59.041626Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TConfigureParts operationId#110:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T17:48:59.041631Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 3 -> 128 2024-11-21T17:48:59.042095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T17:48:59.042215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T17:48:59.042221Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId#110:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:59.042227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 110 ready parts: 1/1 2024-11-21T17:48:59.042248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 110 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:59.042543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 110:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:110 msg type: 269090816 2024-11-21T17:48:59.042560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 110, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 110 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 110 at step: 5000009 2024-11-21T17:48:59.042613Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:59.042629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 110 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:59.042634Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId#110:0 HandleReply TEvOperationPlan, step: 5000009, at schemeshard: 72057594046678944 2024-11-21T17:48:59.042647Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 128 -> 240 2024-11-21T17:48:59.042667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:48:59.042675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: Erasing txId 110 2024-11-21T17:48:59.042967Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:59.042972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:59.042995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:48:59.043008Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:59.043012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 110, path id: 1 2024-11-21T17:48:59.043016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 110, path id: 5 2024-11-21T17:48:59.043064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2024-11-21T17:48:59.043069Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 110:0 ProgressState 2024-11-21T17:48:59.043078Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#110:0 progress is 1/1 2024-11-21T17:48:59.043081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2024-11-21T17:48:59.043085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 110, ready parts: 1/1, is published: false 2024-11-21T17:48:59.043089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2024-11-21T17:48:59.043093Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 110:0 2024-11-21T17:48:59.043096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 110:0 2024-11-21T17:48:59.043116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T17:48:59.043120Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 110, publications: 2, subscribers: 0 2024-11-21T17:48:59.043124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2024-11-21T17:48:59.043127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2024-11-21T17:48:59.043211Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T17:48:59.043219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T17:48:59.043225Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 110 2024-11-21T17:48:59.043228Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2024-11-21T17:48:59.043232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:48:59.043315Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T17:48:59.043322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2024-11-21T17:48:59.043325Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 110 2024-11-21T17:48:59.043328Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T17:48:59.043332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:48:59.043338Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 110, subscribers: 0 2024-11-21T17:48:59.044054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2024-11-21T17:48:59.044097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 2024-11-21T17:48:59.044149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: send EvNotifyTxCompletion 2024-11-21T17:48:59.044154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 110 2024-11-21T17:48:59.044210Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2024-11-21T17:48:59.044225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2024-11-21T17:48:59.044247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [2:669:2620] TestWaitNotification: OK eventTxId 110 >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks+StreamLookup [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: 2024-11-21T17:48:57.380073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:48:57.380474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:57.380495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0017ac/r3tmp/tmpAzHaHQ/pdisk_1.dat 2024-11-21T17:48:57.471358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:48:57.486427Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:57.528358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:57.528385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:57.538718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:57.641591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:57.654511Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:48:57.654646Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:48:57.654707Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:48:57.654739Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:57.661068Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:48:57.661175Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:57.661190Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:48:57.661281Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:48:57.661286Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:48:57.661290Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:48:57.661317Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:48:57.663472Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:48:57.663509Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:48:57.663523Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:48:57.663526Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:48:57.663528Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:48:57.663532Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:57.663602Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:57.663607Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:57.663696Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:48:57.663707Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:48:57.663715Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:57.663719Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:57.663723Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:48:57.663728Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:57.663732Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:57.663737Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:48:57.663740Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:48:57.663743Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:48:57.663745Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:48:57.663749Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:57.663762Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:48:57.663766Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:48:57.663779Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:48:57.663819Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:48:57.663826Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:48:57.663836Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:48:57.663841Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:48:57.663843Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:48:57.663847Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:48:57.663849Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:48:57.663875Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:48:57.663878Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:48:57.663880Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:48:57.663883Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:48:57.663888Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:48:57.663890Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:48:57.663892Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:48:57.663894Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:48:57.663898Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:48:57.664060Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:48:57.664065Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:57.674286Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:48:57.674309Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:48:57.674335Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:48:57.674346Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:48:57.674357Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:48:57.848328Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:57.848351Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:57.848359Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:48:57.848381Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:48:57.848386Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:48:57.848414Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:48:57.848424Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:48:57.848429Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:48:57.848434Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:48:57.849242Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:48:57.849263Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:57.849391Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:57.849399Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:57.849407Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:57.849413Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:48:57.849418Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:48:57.849426Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... e execution plan for [3000:281474976715667] at 72075186224037890 executing on unit ReadTableScan 2024-11-21T17:48:58.830763Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompleteOperation 2024-11-21T17:48:58.830766Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2024-11-21T17:48:58.830806Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is DelayComplete 2024-11-21T17:48:58.830810Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompleteOperation 2024-11-21T17:48:58.830813Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompletedOperations 2024-11-21T17:48:58.830816Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompletedOperations 2024-11-21T17:48:58.830821Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is Executed 2024-11-21T17:48:58.830840Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompletedOperations 2024-11-21T17:48:58.830843Z node 1 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715667] at 72075186224037890 has finished 2024-11-21T17:48:58.830847Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:58.830850Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2024-11-21T17:48:58.830853Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T17:48:58.830855Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T17:48:58.841131Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:48:58.841154Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:48:58.841161Z node 1 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2024-11-21T17:48:58.841175Z node 1 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715667] from 72075186224037890 at tablet 72075186224037890 send result to client [1:1076:2874], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:48:58.841183Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T17:48:59.253719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:48:59.253767Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:48:59.253797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0017ac/r3tmp/tmpFjeWw9/pdisk_1.dat 2024-11-21T17:48:59.341160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:48:59.355047Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:59.397225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:59.397261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:59.407873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:59.511717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:59.524275Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:630:2536] 2024-11-21T17:48:59.524340Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:59.532800Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:59.532854Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:48:59.533049Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:48:59.533062Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:48:59.533070Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:48:59.533130Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:48:59.533145Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:48:59.533175Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:48:59.533193Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:647:2545] 2024-11-21T17:48:59.533198Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:48:59.533203Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:48:59.533208Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:59.533455Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:48:59.533471Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:48:59.533484Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:628:2534], serverId# [2:638:2540], sessionId# [0:0:0] 2024-11-21T17:48:59.533494Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:59.533501Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:59.533511Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:48:59.533517Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:59.533556Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:48:59.533622Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:48:59.533644Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:48:59.533983Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:59.544399Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:48:59.544456Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:48:59.719304Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:665:2557], serverId# [2:667:2559], sessionId# [0:0:0] 2024-11-21T17:48:59.719486Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:48:59.719495Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:59.719652Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:59.719659Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:48:59.719667Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:48:59.719723Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:48:59.719753Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:48:59.719793Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:59.719803Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:48:59.719875Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:48:59.719939Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:59.720157Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:48:59.720161Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:59.720322Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:48:59.720328Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:48:59.720334Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:59.720501Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:59.720508Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:48:59.720512Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:48:59.720526Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:48:59.720535Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:48:59.720543Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:59.720621Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:59.720826Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:48:59.720833Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:48:59.720942Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:48:59.721645Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:701:2585], serverId# [2:702:2586], sessionId# [0:0:0] 2024-11-21T17:48:59.721671Z node 2 :TX_DATASHARD NOTICE: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> TBackupTests::BackupUuidColumn[Zstd] >> TBackupTests::BackupUuidColumn[Raw] >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> TBackupTests::ShouldSucceedOnLargeData[Raw] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadRowsLocks+StreamLookup [GOOD] Test command err: 2024-11-21T17:48:57.419424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:48:57.419737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:57.419751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00176e/r3tmp/tmpS9vNQt/pdisk_1.dat 2024-11-21T17:48:57.508167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:48:57.524015Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:57.566493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:57.566527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:57.577104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:57.680469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:57.695186Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:646:2546] 2024-11-21T17:48:57.695266Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:57.700571Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:57.700626Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:48:57.700754Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:48:57.700760Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:48:57.700765Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:48:57.700800Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:48:57.703187Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:48:57.703241Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:48:57.703264Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:680:2565] 2024-11-21T17:48:57.703269Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:48:57.703275Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:48:57.703278Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:57.703596Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:652:2548] 2024-11-21T17:48:57.703625Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:57.704531Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:48:57.704558Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:48:57.704583Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:637:2540], serverId# [1:678:2563], sessionId# [0:0:0] 2024-11-21T17:48:57.704611Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:57.704617Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:57.704624Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:48:57.704628Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:57.704663Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:48:57.704725Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:48:57.704738Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:48:57.704926Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:57.704951Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:48:57.705047Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:48:57.705053Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:48:57.705058Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:48:57.705085Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:48:57.705089Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:48:57.705099Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:48:57.705109Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:693:2573] 2024-11-21T17:48:57.705111Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:48:57.705113Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:48:57.705116Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:48:57.705200Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:48:57.705206Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:48:57.705280Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:48:57.705286Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:57.705290Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:48:57.705293Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:48:57.705449Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:638:2541], serverId# [1:686:2567], sessionId# [0:0:0] 2024-11-21T17:48:57.705488Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:48:57.705511Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:48:57.705520Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:48:57.705622Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:656:2550] 2024-11-21T17:48:57.705645Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:57.706662Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:57.706687Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:48:57.706761Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2024-11-21T17:48:57.706766Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2024-11-21T17:48:57.706770Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2024-11-21T17:48:57.706792Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:48:57.706797Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2024-11-21T17:48:57.706804Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:48:57.706810Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:721:2586] 2024-11-21T17:48:57.706813Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2024-11-21T17:48:57.706815Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2024-11-21T17:48:57.706818Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2024-11-21T17:48:57.706854Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:658:2552] 2024-11-21T17:48:57.706869Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:57.707591Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2024-11-21T17:48:57.707605Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2024-11-21T17:48:57.707649Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2024-11-21T17:48:57.707653Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:57.707658Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2024-11-21T17:48:57.707662Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2024-11-21T17:48:57.707729Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [1:640:2543], serverId# [1:694:2574], sessionId# [0:0:0] 2024-11-21T17:48:57.707756Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:57.707763Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:48:57.707775Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037891 2024-11-21T17:48:57.707801Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037891 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:48:57.707808Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037891 2024-11-21T17:48:57.707848Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:57.707859Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:48:57.707920Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2024-11-21T17:48:57.707924Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2024-11-21T17:48:57.707928Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2024-11-21T17:48:57.707960Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:48:57.707964Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2024-11-21T17:48:57.707970Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:48:57.707977Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:727:2589] 2024-11-21T17:48:57.707979Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 ... TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:48:59.568590Z node 3 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:48:59.568607Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:48:59.568618Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:647:2545] 2024-11-21T17:48:59.568622Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:48:59.568625Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:48:59.568628Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:59.568712Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:48:59.568719Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:48:59.568783Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:627:2533], serverId# [3:638:2540], sessionId# [0:0:0] 2024-11-21T17:48:59.568795Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:59.568799Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:59.568805Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:48:59.568809Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:59.568831Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:48:59.568876Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:48:59.568889Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:48:59.569123Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:59.579385Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:48:59.579424Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:48:59.753331Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:665:2557], serverId# [3:667:2559], sessionId# [0:0:0] 2024-11-21T17:48:59.753499Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 526 RawX2: 12884904347 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:48:59.753523Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:59.753765Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:59.753773Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:48:59.753781Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:48:59.753829Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:48:59.753857Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:48:59.753898Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:59.753908Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:48:59.753977Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:48:59.754044Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:59.754241Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:48:59.754246Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:59.754364Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:48:59.754370Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:48:59.754376Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:59.754487Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:59.754492Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:48:59.754496Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:48:59.754509Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:48:59.754516Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:48:59.754524Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:59.754639Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:59.754843Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:48:59.754913Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:48:59.754918Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:48:59.756105Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:699:2583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:59.756120Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:708:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:59.756159Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:59.756767Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:48:59.757345Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:59.942751Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:59.943107Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:713:2591], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:00.004177Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7xewfbewj9kwxtxw5gww9d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmNhYjZjZjMtNzY2MjBkNDUtN2NjZjIxNzctYzU4MzVmOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:00.005192Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:814:2652], serverId# [3:815:2653], sessionId# [0:0:0] 2024-11-21T17:49:00.005271Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:00.016031Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:00.016083Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:00.037299Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xewqj3kxk3vkx00zata3n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTNlMTg0MGEtNTMzNjcwMTMtNGMyMWIyNzEtMzEyNzQ0NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:00.037787Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint32_value: 300 } } 2024-11-21T17:49:00.039098Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2024-11-21T17:49:00.049524Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2024-11-21T17:49:00.049555Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:00.049571Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2024-11-21T17:49:00.049808Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2024-11-21T17:49:00.049821Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:00.059037Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xewrjf3mejxcjxyykkqfb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTNlMTg0MGEtNTMzNjcwMTMtNGMyMWIyNzEtMzEyNzQ0NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:00.059190Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:00.069663Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:00.069722Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:00.071855Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTNlMTg0MGEtNTMzNjcwMTMtNGMyMWIyNzEtMzEyNzQ0NjU=, ActorId: [3:821:2658], ActorState: ExecuteState, TraceId: 01jd7xewrjf3mejxcjxyykkqfb, Create QueryResponse for error on request, msg: 2024-11-21T17:49:00.072075Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xewrjf3mejxcjxyykkqfb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTNlMTg0MGEtNTMzNjcwMTMtNGMyMWIyNzEtMzEyNzQ0NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:00.072169Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:00.072330Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:00.072338Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> TxUsage::WriteToTopic_Demo_17 [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] >> TBackupTests::BackupUuidColumn[Raw] [GOOD] >> TColumnShardTestSchema::RebootColdTiers [GOOD] |79.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:00.211122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:00.211145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.211149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:00.211152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:00.211157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:00.211159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:00.211165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.211242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:00.220882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:00.220935Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:00.223728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:00.224424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:00.224482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:00.225883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:00.226080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:00.226178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.226272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:00.227274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.227529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.227539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.227577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:00.227583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.227589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:00.227602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.228761Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:00.242785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:00.242870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.242930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:00.242976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:00.242982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.243824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.243865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:00.243922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.243932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:00.243935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:00.243939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:00.244279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.244288Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:00.244290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:00.244668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.244679Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.244685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.244705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.245258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.245706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:00.245763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:00.245953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.245981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.245991Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.246046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:00.246053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.246079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.246090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:00.246579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.246590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.246633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.246638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:00.246716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.246723Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:00.246735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:00.246740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.246746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:00.246751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.246756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:00.246759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:00.246772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:00.246778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:00.246781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:00.247092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.247109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.247114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:00.247118Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:00.247124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.247140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.361644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:49:00.361671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T17:49:00.361814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.361852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.361859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:49:00.361880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T17:49:00.361914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:00.505986Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:405:2378], attempt# 0 2024-11-21T17:49:00.508270Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:405:2378], sender# [1:404:2377] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: 2024-11-21T17:49:00.509276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 Host: localhost:62395 2024-11-21T17:49:00.509295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5540C676-7833-4685-B030-176FBA6381F7 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T17:49:00.509372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.509376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:49:00.509468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.509475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:00.509559Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:49:00.510054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:00.510066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:00.510070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:49:00.510074Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:49:00.510078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:00.510095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:62395 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4EA26704-630D-4EAF-BC85-4555B4438909 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T17:49:00.510774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:49:00.510903Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:62395 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6A9B000B-FE06-40E2-A1B1-E93DB27219E3 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T17:49:00.511651Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2024-11-21T17:49:00.511662Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:404:2377] 2024-11-21T17:49:00.511675Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:405:2378], sender# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:62395 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E6907099-7EB4-4085-8EA4-252B900943CD amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2024-11-21T17:49:00.512202Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2024-11-21T17:49:00.512207Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:405:2378], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T17:49:00.512242Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T17:49:00.513513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.513527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:00.513547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.513559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.513572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.513576Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.513581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:00.513587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:49:00.513624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.513993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.514019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.514024Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:49:00.514032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:49:00.514035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:00.514039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:49:00.514048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T17:49:00.514052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:00.514056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:49:00.514059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:49:00.514079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:00.514413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:49:00.514421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:390:2364] TestWaitNotification: OK eventTxId 102 |79.2%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:00.325089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:00.325110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.325113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:00.325116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:00.325120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:00.325123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:00.325129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.325198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:00.333151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:00.333167Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:00.335101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:00.335623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:00.335651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:00.336757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:00.336905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:00.337005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.337069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:00.337904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.338111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.338119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.338147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:00.338152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.338156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:00.338166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.339187Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:00.351852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:00.351913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.351962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:00.351995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:00.352000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.352512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.352536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:00.352582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.352590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:00.352593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:00.352596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:00.352846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.352852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:00.352854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:00.353108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.353114Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.353119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.353123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.353544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.353889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:00.353932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:00.354040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.354056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.354062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.354107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:00.354114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.354137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.354145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:00.354588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.354594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.354621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.354624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:00.354675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.354680Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:00.354689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:00.354692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.354696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:00.354699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.354702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:00.354704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:00.354713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:00.354718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:00.354722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:00.354943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.354953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.354956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:00.354959Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:00.354962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.354974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.439960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:49:00.439994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T17:49:00.440136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.440157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.440164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:49:00.440184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T17:49:00.440213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:00.674927Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:405:2378], attempt# 0 2024-11-21T17:49:00.678538Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:405:2378], sender# [1:404:2377] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:4652 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E5B3B548-6225-485E-A4FE-1746816A4227 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T17:49:00.679754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.679772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:49:00.679851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.679859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:49:00.679971Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2024-11-21T17:49:00.680586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.680602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:49:00.680798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:00.680812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:00.680817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:49:00.680822Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:49:00.680829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:00.680847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:4652 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5FB51F64-D857-4B5E-8F5B-65EF4FAE04B8 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T17:49:00.681502Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } 2024-11-21T17:49:00.681767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:4652 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 69278E66-C1BC-4FB4-8222-7838C57C407C amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T17:49:00.682301Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2024-11-21T17:49:00.682311Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:404:2377] 2024-11-21T17:49:00.682381Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:405:2378], sender# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:4652 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B066CB8F-5DA2-45BC-958A-094D81493B8B amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2024-11-21T17:49:00.682986Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2024-11-21T17:49:00.682993Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:405:2378], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T17:49:00.683032Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T17:49:00.684138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.684150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:00.684163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.684172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.684180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.684183Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.684186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:00.684191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:49:00.684222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.684660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.684720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.684727Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:49:00.684737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:49:00.684741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:00.684747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:49:00.684758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T17:49:00.684763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:00.684769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:49:00.684773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:49:00.684794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:00.685127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:49:00.685137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:390:2364] TestWaitNotification: OK eventTxId 102 >> TFileStoreWithReboots::CheckMultipleAlterWithStorageLimitsError |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:00.330523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:00.330545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.330548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:00.330551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:00.330555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:00.330558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:00.330564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.330634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:00.338284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:00.338303Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:00.340474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:00.341260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:00.341303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:00.342713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:00.342896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:00.342991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.343076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:00.343937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.344180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.344188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.344238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:00.344246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.344252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:00.344267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.345675Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:00.364556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:00.364625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.364681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:00.364744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:00.364753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.365461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.365489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:00.365536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.365544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:00.365548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:00.365552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:00.366005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366019Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:00.366412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.366448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.367014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.367403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:00.367461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:00.367614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.367636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.367645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.367694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:00.367699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.367739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.367750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:00.368185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.368220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:00.368300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368306Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:00.368315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:00.368319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.368324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:00.368338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.368342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:00.368346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:00.368355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:00.368359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:00.368362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:00.368684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.368700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.368705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:00.368710Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:00.368715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.368729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Step: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.453365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:49:00.453388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T17:49:00.453509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.453528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.453534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:49:00.453550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T17:49:00.453572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:00.690996Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:405:2378], attempt# 0 2024-11-21T17:49:00.694215Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:405:2378], sender# [1:404:2377] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:65187 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 787A0345-528B-470F-8937-1D0275D02A4D amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T17:49:00.695513Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2024-11-21T17:49:00.696099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.696112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:49:00.696184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.696189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:49:00.696378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.696390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:49:00.696545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:00.696555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:00.696559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:49:00.696564Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:49:00.696570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:00.696586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:65187 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3CAD8F5B-4D46-48AE-A8E3-30AF34FA0E00 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T17:49:00.696920Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:65187 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3B6E23E7-4A85-4704-93F3-95E709A9FB7B amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T17:49:00.697480Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2024-11-21T17:49:00.697504Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:404:2377] 2024-11-21T17:49:00.697575Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:405:2378], sender# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } 2024-11-21T17:49:00.697678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:65187 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 39A51F66-65A6-475D-9D24-B5E45063C56B amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2024-11-21T17:49:00.698333Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2024-11-21T17:49:00.698359Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:405:2378], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T17:49:00.698394Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T17:49:00.699740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T17:49:00.699753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:00.699767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T17:49:00.699776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T17:49:00.699786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.699788Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.699792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:00.699798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:49:00.699829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.700165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.700218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.700223Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:49:00.700256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:49:00.700260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:00.700265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:49:00.700278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T17:49:00.700282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:00.700286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:49:00.700288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:49:00.700307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:00.700655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:49:00.700664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:390:2364] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:00.331674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:00.331696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.331701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:00.331704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:00.331708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:00.331711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:00.331718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.331775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:00.339877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:00.339892Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:00.343053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:00.343566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:00.343591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:00.344636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:00.344895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:00.345010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.345095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:00.346118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.346308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.346314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.346337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:00.346341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.346356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:00.346366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.347271Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:00.360543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:00.360616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.360668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:00.360710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:00.360718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.363931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.363967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:00.364023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.364035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:00.364039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:00.364044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:00.364604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.364617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:00.364622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:00.365013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.365025Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.365031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.365038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.365681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.366162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:00.366226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:00.366431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366467Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.366541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:00.366556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.366584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.366597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:00.367087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.367098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.367134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.367139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:00.367227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.367235Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:00.367247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:00.367250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.367256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:00.367261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.367265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:00.367268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:00.367280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:00.367297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:00.367301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:00.367605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.367619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.367623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:00.367628Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:00.367633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.367648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Host: localhost:10213 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EC11D86F-268A-45FF-A0F1-EEFAA62B686D amz-sdk-request: attempt=1 content-length: 20 content-md5: 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:49:00.731590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.731605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:49:00.731703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.731709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:10213 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA2024-11-21T17:49:00.731819Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2432], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2024-11-21T17:49:00.731836Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:473:2432], success# 1, error# , multipart# 0, uploadId# (empty maybe) amz-sdk-invocation-id: 9771BA20-91AE-453A-921B-A8D5183CB270 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD 2024-11-21T17:49:00.731864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2024-11-21T17:49:00.731873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:00.731972Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:472:2431], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T17:49:00.732651Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:466:2427], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2024-11-21T17:49:00.732878Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:465:2426] 2024-11-21T17:49:00.732918Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:466:2427], sender# [1:465:2426], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } 2024-11-21T17:49:00.733011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:00.733023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:00.733028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:49:00.733034Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:49:00.733041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:49:00.733059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:10213 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1FE4FC6D-08A3-41EE-BF77-D0264580770F amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2024-11-21T17:49:00.733787Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:466:2427], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2024-11-21T17:49:00.733797Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:466:2427], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T17:49:00.733873Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:465:2426], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T17:49:00.735756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:49:00.757068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.757096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:00.757117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.757126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.757138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.757172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.757256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.757259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T17:49:00.757267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.757272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.757276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.757278Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.757281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:00.757284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:49:00.757289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:49:00.757297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.758007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.758278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.758406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.758415Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:49:00.758431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:49:00.758436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:00.758442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:49:00.758463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2340] message: TxId: 102 2024-11-21T17:49:00.758470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:00.758476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:49:00.758480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:49:00.758508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:00.758911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:49:00.758920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:445:2407] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:00.335435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:00.335456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.335460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:00.335463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:00.335467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:00.335470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:00.335477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.335537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:00.344531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:00.344549Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:00.350572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:00.351339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:00.351377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:00.352805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:00.353028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:00.353113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.353197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:00.354236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.354491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.354503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.354538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:00.354545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.354551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:00.354563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.355833Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:00.371995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:00.372066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.372115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:00.372152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:00.372159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.372849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.372873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:00.372919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.372928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:00.372932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:00.372936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:00.373328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.373338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:00.373343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:00.373682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.373691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.373696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.373702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.374278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.374644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:00.374687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:00.374847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.374868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.374875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.374927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:00.374934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.374956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.374967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:00.375361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.375369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.375395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.375400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:00.375451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.375457Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:00.375467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:00.375472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.375477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:00.375481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.375486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:00.375489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:00.375498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:00.375504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:00.375507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:00.375803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.375817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.375821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:00.375826Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:00.375830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.375844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.464689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:49:00.464718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T17:49:00.464834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.464851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.464856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:49:00.464874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T17:49:00.464903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:00.706117Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:405:2378], attempt# 0 2024-11-21T17:49:00.708562Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:405:2378], sender# [1:404:2377] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:22118 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 48149A24-171A-4752-9E0A-2C447255AB5D amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T17:49:00.709740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.709760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:49:00.709865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.709872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:49:00.709994Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2024-11-21T17:49:00.710629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.710647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:49:00.710858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:00.710871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:00.710875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:49:00.710881Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:49:00.710888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:00.710905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:22118 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8A3AF29C-EAB1-439E-8798-2DA30BA3B045 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T17:49:00.711676Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } 2024-11-21T17:49:00.712012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:22118 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D45032EE-737D-401C-AD92-7A5535E1B664 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T17:49:00.712632Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2024-11-21T17:49:00.712651Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:404:2377] 2024-11-21T17:49:00.712669Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:405:2378], sender# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:22118 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AD311FC5-1D97-46E9-BC93-22C21BE47CBD amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2024-11-21T17:49:00.713451Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2024-11-21T17:49:00.713466Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:405:2378], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T17:49:00.713500Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T17:49:00.714878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T17:49:00.714894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:00.714916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T17:49:00.714929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T17:49:00.714940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.714944Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.714949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:00.714956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:49:00.714996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.715387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.715421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.715427Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:49:00.715435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:49:00.715438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:00.715442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:49:00.715453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T17:49:00.715458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:00.715461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:49:00.715464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:49:00.715484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:00.715807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:49:00.715815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:390:2364] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132211860.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132211860.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132211860.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112211860.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132211860.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132211860.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112210660.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112211860.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112211860.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112210660.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112210660.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112210660.000000s;Name=;Codec=}; 2024-11-21T17:47:40.531534Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:47:40.561208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:47:40.564288Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:47:40.564320Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:47:40.564380Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:47:40.565126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:47:40.565177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:47:40.565217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:47:40.565249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:47:40.565266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:47:40.565289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:47:40.565304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:47:40.565321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:47:40.565337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:47:40.565353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:47:40.565370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:47:40.565387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:47:40.570027Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:47:40.570056Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T17:47:40.571418Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:47:40.571498Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:47:40.571511Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:47:40.571545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:47:40.571624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:47:40.571639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:47:40.571644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:47:40.571654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:47:40.571663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:47:40.571671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:47:40.571674Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:47:40.571691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:47:40.571699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:47:40.571706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:47:40.571710Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:47:40.571719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:47:40.571726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:47:40.571734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:47:40.571738Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:47:40.571750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:47:40.571756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:47:40.571760Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:47:40.571770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:47:40.571777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:47:40.571781Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:47:40.571817Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=13; 2024-11-21T17:47:40.571827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2024-11-21T17:47:40.571836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2024-11-21T17:47:40.571848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=8; 2024-11-21T17:47:40.571871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:47:40.571879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:47:40.571883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:47:40.571908Z ... lerLoadingTime=4; 2024-11-21T17:49:00.717105Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T17:49:00.717109Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=10; 2024-11-21T17:49:00.717127Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=15; 2024-11-21T17:49:00.717133Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T17:49:00.717152Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15; 2024-11-21T17:49:00.717193Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=35; 2024-11-21T17:49:00.717200Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=3; 2024-11-21T17:49:00.717205Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=1; 2024-11-21T17:49:00.717209Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2024-11-21T17:49:00.717212Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2024-11-21T17:49:00.717216Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T17:49:00.717224Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=5; 2024-11-21T17:49:00.717227Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T17:49:00.717235Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=5; 2024-11-21T17:49:00.717238Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2024-11-21T17:49:00.717246Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=2; 2024-11-21T17:49:00.717249Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1278; 2024-11-21T17:49:00.717264Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=12;blobs=18;rows=320000;bytes=18990624;raw_bytes=32169236; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T17:49:00.717276Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T17:49:00.717281Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard_impl.cpp:1558;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T17:49:00.717284Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T17:49:00.717297Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T17:49:00.717313Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T17:49:00.717325Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T17:49:00.717329Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T17:49:00.717341Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=202797666304;kff=0.3; 2024-11-21T17:49:00.717345Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:00.717352Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:00.717363Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T17:49:00.717373Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T17:49:00.717377Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:00.717385Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:00.717388Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:00.717392Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:00.717402Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:00.717496Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:00.717506Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1413:3282];tablet_id=9437184;parent=[1:1374:3250];fline=manager.h:99;event=ask_data;request=request_id=108;1={portions_count=12};; 2024-11-21T17:49:00.717661Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T17:49:00.717950Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T17:49:00.717958Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:49:00.717962Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T17:49:00.717967Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:00.717975Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:00.717985Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=3; 2024-11-21T17:49:00.717994Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T17:49:00.717999Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:00.718005Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:00.718010Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:00.718015Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:00.718029Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:00.718150Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=12;path_id=1; 2024-11-21T17:49:00.718205Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=12;path_id=1; 2024-11-21T17:49:00.718271Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T17:49:00.718274Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1374:3250];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 80000/4749668 0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:00.335512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:00.335534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.335538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:00.335542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:00.335546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:00.335549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:00.335559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.335629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:00.343911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:00.343931Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:00.346727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:00.347526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:00.347560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:00.349488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:00.349696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:00.349773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.349844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:00.351116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.351294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.351300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.351324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:00.351328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.351332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:00.351344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.352271Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:00.367352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:00.367409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.367456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:00.367484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:00.367489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:00.368270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:00.368284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:00.368289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:00.368710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:00.369115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.369124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.369129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.369136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.369555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.369876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:00.369918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:00.370052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.370075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.370083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.370128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:00.370133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.370154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.370162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:00.370504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.370510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.370538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.370541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:00.370589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.370593Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:00.370602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:00.370604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.370608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:00.370611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.370614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:00.370616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:00.370624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:00.370628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:00.370630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:00.370825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.370833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.370835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:00.370839Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:00.370841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.370852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... .1 HEADERS: Host: localhost:15588 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7B88CE8D-39B9-4078-BF57-3B65E6899CA7 amz-sdk-request: attempt=1 content-length: 11 content-md5: jsMhyzH+cyrvZpBm0dQVGQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv / / 11 2024-11-21T17:49:00.746798Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2432], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2024-11-21T17:49:00.746807Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:473:2432], success# 1, error# , multipart# 0, uploadId# (empty maybe) FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:49:00.746989Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:472:2431], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:15588 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 89FC530B-C3C2-4BF8-87AF-FCE36D812E43 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2024-11-21T17:49:00.747513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.747523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:49:00.747586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.747589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:49:00.747674Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:466:2427], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2024-11-21T17:49:00.747832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.747840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:00.747892Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:465:2426] 2024-11-21T17:49:00.747905Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:466:2427], sender# [1:465:2426], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } 2024-11-21T17:49:00.748034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:00.748042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:00.748045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:49:00.748049Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:49:00.748054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:49:00.748067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:15588 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 80303971-A8EE-4F1F-8CB2-0E3EA5EE97FB amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2024-11-21T17:49:00.748533Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:466:2427], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2024-11-21T17:49:00.748540Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:466:2427], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T17:49:00.748570Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:465:2426], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T17:49:00.749817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:49:00.770920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.770948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:00.770968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.770977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.770987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.771020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.771107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.771110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T17:49:00.771117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.771122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 321 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:49:00.771126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.771128Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.771131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:00.771134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:49:00.771138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:49:00.771147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.771645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.771750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.771819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.771826Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:49:00.771835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:49:00.771838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:00.771844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:49:00.771859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2340] message: TxId: 102 2024-11-21T17:49:00.771865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:00.771870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:49:00.771875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:49:00.771899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:00.772289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:49:00.772301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:445:2407] TestWaitNotification: OK eventTxId 102 |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CheckMultipleAlterWithStorageLimitsError [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest |79.2%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CheckMultipleAlterWithStorageLimitsError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:01.433788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:01.433809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:01.433813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:01.433816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:01.433820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:01.433823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:01.433829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:01.433892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:01.440435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:01.440455Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:01.442978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:01.443505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:01.443531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:01.444770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:01.444956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:01.445041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:01.445119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:01.445923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:01.446182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:01.446194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:01.446230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:01.446236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:01.446240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:01.446250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.447145Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:01.459278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:01.459365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.459420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:01.459459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:01.459465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.460157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:01.460175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:01.460224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.460258Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:01.460263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:01.460267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:01.460618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.460628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:01.460632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:01.460912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.460921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.460927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:01.460933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:01.461404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:01.461753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:01.461795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:01.461952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:01.461974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:01.461981Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:01.462038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:01.462045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:01.462073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:01.462087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:01.462518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:01.462528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:01.462564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:01.462569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:01.462655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.462662Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:01.462674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:01.462680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:01.462685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:01.462690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:01.462694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:01.462698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:01.462709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:01.462714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:01.462718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:01.462990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:01.463003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:01.463007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:01.463012Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:01.463017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:01.463030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... o populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 106, path id: 2 2024-11-21T17:49:01.483440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.483448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 106:0 ProgressState, operation type: TxAlterFileStore, at tablet72057594046678944 2024-11-21T17:49:01.483487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 106:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 1 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } TabletID: 72075186233409546 AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:49:01.483626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T17:49:01.483635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T17:49:01.483638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2024-11-21T17:49:01.483642Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T17:49:01.483646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:01.483657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2024-11-21T17:49:01.484138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:1 msg type: 268697601 2024-11-21T17:49:01.484165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72057594037968897 2024-11-21T17:49:01.484170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 106, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T17:49:01.484243Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 1 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } TabletID: 72075186233409546 AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:49:01.484632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T17:49:01.484640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 106, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T17:49:01.484654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T17:49:01.484660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 106:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:49:01.484665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 106:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T17:49:01.484684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 2 -> 3 2024-11-21T17:49:01.484744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T17:49:01.485061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.485083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.485088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#106:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:01.485382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2024-11-21T17:49:01.485415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2024-11-21T17:49:01.485443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:01.485461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: TxId: 106 Origin: 72075186233409546 Status: OK 2024-11-21T17:49:01.485466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#106:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T17:49:01.485471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 3 -> 128 2024-11-21T17:49:01.485754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.485775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:49:01.485779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#106:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:01.485786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2024-11-21T17:49:01.485809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:01.486114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2024-11-21T17:49:01.486137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000005 2024-11-21T17:49:01.486185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:01.486200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:01.486205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#106:0 HandleReply TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2024-11-21T17:49:01.486229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2024-11-21T17:49:01.486231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T17:49:01.486239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:01.486244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2024-11-21T17:49:01.486248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T17:49:01.486251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2024-11-21T17:49:01.486252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2024-11-21T17:49:01.486265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:01.486268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2024-11-21T17:49:01.486271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:49:01.486685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:01.486695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:49:01.486721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:01.486725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 106, path id: 2 FAKE_COORDINATOR: Erasing txId 106 2024-11-21T17:49:01.486812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T17:49:01.486820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T17:49:01.486824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2024-11-21T17:49:01.486828Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:49:01.486833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:01.486845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2024-11-21T17:49:01.487106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T17:49:01.487152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T17:49:01.487157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T17:49:01.487213Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T17:49:01.487227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T17:49:01.487234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:425:2406] TestWaitNotification: OK eventTxId 106 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] Test command err: 2024-11-21T17:48:58.157355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:48:58.157716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:58.157736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00175b/r3tmp/tmpqY101L/pdisk_1.dat 2024-11-21T17:48:58.248791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:48:58.266693Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:58.309149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:58.309178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:58.319592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:58.423366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:58.437193Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:48:58.437259Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:58.442230Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:58.442260Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:48:58.442379Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:48:58.442385Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:48:58.442389Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:48:58.442419Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:48:58.444481Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:48:58.444522Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:48:58.444537Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:48:58.444541Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:48:58.444544Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:48:58.444547Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:58.444730Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:48:58.444741Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:48:58.444751Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:48:58.444757Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:58.444760Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:58.444767Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:48:58.444771Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:58.444798Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:48:58.444851Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:48:58.444864Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:48:58.445077Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:58.455348Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:48:58.455392Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:48:58.629132Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:48:58.629704Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:48:58.629717Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:58.629796Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:58.629802Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:48:58.629810Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:48:58.629861Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:48:58.629886Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:48:58.630003Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:58.630014Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:48:58.630261Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:48:58.630396Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:58.630634Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:48:58.630639Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:58.630719Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:48:58.630723Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:48:58.630729Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:58.630869Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:58.630875Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:48:58.630879Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:48:58.630892Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:48:58.630899Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:48:58.630916Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:58.631342Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:58.631615Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:48:58.631634Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:48:58.631638Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:48:58.633277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:58.633304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:58.633327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:58.633925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:48:58.634536Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:58.819782Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:58.820083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:48:58.876897Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7xevc801fv64vyxxjkz6x7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI2YzU2NTQtM2VmNThkOS1kOWI3OWU1Ny05NzhlMDBiZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:48:58.877630Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T17:48:58.877704Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:48:58.888315Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:48:58.888364Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:58.909684Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xevma9v12z8pfwnr4p1qv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjIzYzI1ZTctYmE1ZTViNzQtZTA0ODk5N2MtYTE0YmYxMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:48:58.910150Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired l ... ode 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T17:49:01.434077Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2024-11-21T17:49:01.434080Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:49:01.434416Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:904:2732], Recipient [3:631:2536]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 904 RawX2: 12884904620 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t\210\003\000\000\000\000\000\000\021\254\n\000\000\003\000\000\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2024-11-21T17:49:01.434426Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:49:01.434445Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:01.434478Z node 3 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2024-11-21T17:49:01.434488Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2024-11-21T17:49:01.434494Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:49:01.434497Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T17:49:01.434499Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:01.434502Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:49:01.434508Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T17:49:01.434515Z node 3 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2024-11-21T17:49:01.434518Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:49:01.434520Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:01.434522Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit MakeScanSnapshot 2024-11-21T17:49:01.434525Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit MakeScanSnapshot 2024-11-21T17:49:01.434528Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:49:01.434530Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit MakeScanSnapshot 2024-11-21T17:49:01.434531Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit WaitForStreamClearance 2024-11-21T17:49:01.434533Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2024-11-21T17:49:01.434539Z node 3 :TX_DATASHARD TRACE: Requested stream clearance from [3:904:2732] for [0:281474976715665] at 72075186224037888 2024-11-21T17:49:01.434542Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2024-11-21T17:49:01.434571Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [3:904:2732], Recipient [3:631:2536]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715665 2024-11-21T17:49:01.434574Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2024-11-21T17:49:01.434585Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [3:904:2732], Recipient [3:631:2536]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715665 Cleared: true 2024-11-21T17:49:01.434589Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2024-11-21T17:49:01.434597Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:631:2536], Recipient [3:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:01.434599Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:01.434602Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:01.434605Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:49:01.434608Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2024-11-21T17:49:01.434610Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2024-11-21T17:49:01.434613Z node 3 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715665] at 72075186224037888 2024-11-21T17:49:01.434615Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:49:01.434617Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit WaitForStreamClearance 2024-11-21T17:49:01.434619Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ReadTableScan 2024-11-21T17:49:01.434621Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2024-11-21T17:49:01.434651Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2024-11-21T17:49:01.434654Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:49:01.434656Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:01.434658Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:01.434660Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:01.434732Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [3:911:2737], Recipient [3:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T17:49:01.434735Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T17:49:01.434759Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2024-11-21T17:49:01.434852Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:49:01.434867Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2024-11-21T17:49:01.434871Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2024-11-21T17:49:01.434889Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:01.434892Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:01.434957Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:49:01.434960Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2024-11-21T17:49:01.434981Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:897:2725], Recipient [3:631:2536]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:49:01.434984Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:49:01.434990Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:631:2536], Recipient [3:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:01.434994Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:01.434997Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:01.434999Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:49:01.435002Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2024-11-21T17:49:01.435004Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2024-11-21T17:49:01.435008Z node 3 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715665] at 72075186224037888 error: , IsFatalError: 0 2024-11-21T17:49:01.435011Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:49:01.435014Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ReadTableScan 2024-11-21T17:49:01.435016Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:01.435018Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:01.435024Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T17:49:01.435033Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2024-11-21T17:49:01.435035Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:01.435037Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:01.435039Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:01.435044Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:49:01.435046Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:01.435048Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2024-11-21T17:49:01.435050Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:01.435052Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:01.435054Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:01.435056Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:01.435060Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:01.435062Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:01.435066Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> Sharding::XXUsage >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite |79.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 1856340002875486565 3517941200430826626 6579664496910514240 9299877457902174681 11928105078316466580 10445215091248136646 16775375650727042425 8305586894641087059 2861883699676010377 10988791963048964290 13551459357967668168 17696293398721044130 4381893343667362547 14162237615426836255 12351500971121432163 16471930263856948371 2117893559958937484 13545280923347699908 2184061395229763073 1662408712605407382 4535048076977605827 5491868179933717827 1021929314480964263 8053632110810231344 5264408900192581858 17340947975076292015 4412709404075130897 6625348801182145078 236323651951193266 11858477874786903496 3821151895090404365 3279719516268744013 1550283993715069344 12249514794384779279 3871223279235469890 17382045501440115604 10013759195765465395 3626037838920567867 13917329216592929274 10431572180130474140 6383495792609164197 9676306599537843472 14509204120786372410 15894148047654147261 1195664426394406074 12744809889967110244 10407404915772415728 5524609576577477988 14763487483939786495 6595590256164149187 12485775574321252452 17782049355337933185 6144421146189584904 2694217947323288751 3579557373950461086 272729542193705993 11589748628407703321 11982757725130127013 15428243157068705881 17668332787007634984 6864306351248909839 13574152541036739713 16084702479420634500 1548421630375947316 11773063339007513526 2305350003895516353 13616625025338481434 10890090419889581201 9169232265516467049 11426277376012045649 2630567490479862773 18444907635253814371 14977105753150586068 11574522513333970700 13661928704931616311 8866646439858485558 7754694446830210524 1395274786640948319 13478995741575108275 3014394169458043234 5512279645721818373 10921844710759828350 12490231210701190721 16840191832090877678 14150465142802832665 12797174265165511824 16908063084320937370 12717228345728415759 1699368444341414044 10148564823216644452 |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: 2024-11-21T17:48:58.834702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:48:58.835118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:58.835139Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001781/r3tmp/tmp5VnlZo/pdisk_1.dat 2024-11-21T17:48:58.924038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:48:58.940153Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:58.982035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:58.982067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:58.992528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:59.095611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:59.108658Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:48:59.108803Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:48:59.108870Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:48:59.108904Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:59.113707Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:48:59.113822Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:59.113837Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:48:59.113934Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:48:59.113939Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:48:59.113944Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:48:59.113974Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:48:59.116088Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:48:59.116131Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:48:59.116146Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:48:59.116149Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:48:59.116152Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:48:59.116155Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:59.116247Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:59.116254Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:59.116356Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:48:59.116367Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:48:59.116375Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:59.116379Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:59.116383Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:48:59.116389Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:59.116393Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:59.116397Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:48:59.116401Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:48:59.116403Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:48:59.116406Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:48:59.116410Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:59.116424Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:48:59.116428Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:48:59.116444Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:48:59.116493Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:48:59.116501Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:48:59.116513Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:48:59.116518Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:48:59.116520Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:48:59.116523Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:48:59.116526Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:48:59.116556Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:48:59.116558Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:48:59.116560Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:48:59.116562Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:48:59.116568Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:48:59.116570Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:48:59.116572Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:48:59.116574Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:48:59.116578Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:48:59.116742Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:48:59.116747Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:59.126938Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:48:59.126965Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:48:59.126971Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:48:59.126981Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:48:59.126994Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:48:59.300497Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:59.300519Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:48:59.300525Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:48:59.300541Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:48:59.300545Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:48:59.300566Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:48:59.300573Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:48:59.300577Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:48:59.300580Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:48:59.301096Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:48:59.301105Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:59.301179Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:59.301183Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:48:59.301190Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:59.301195Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:48:59.301199Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:48:59.301205Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 6Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2024-11-21T17:49:02.079189Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037889 2024-11-21T17:49:02.079269Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:921:2745], Recipient [2:921:2745]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:02.079275Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:02.079281Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:49:02.079286Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:49:02.079291Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037889 for ReadTableScan 2024-11-21T17:49:02.079295Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit ReadTableScan 2024-11-21T17:49:02.079299Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037889 error: , IsFatalError: 0 2024-11-21T17:49:02.079305Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2024-11-21T17:49:02.079308Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit ReadTableScan 2024-11-21T17:49:02.079313Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T17:49:02.079317Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:49:02.079360Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is DelayComplete 2024-11-21T17:49:02.079364Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T17:49:02.079367Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T17:49:02.079371Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompletedOperations 2024-11-21T17:49:02.079377Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2024-11-21T17:49:02.079380Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T17:49:02.079384Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037889 has finished 2024-11-21T17:49:02.079388Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:02.079391Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T17:49:02.079394Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T17:49:02.079398Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T17:49:02.089672Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:49:02.089689Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:49:02.089695Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:49:02.089710Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [2:1109:2906], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:49:02.089718Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:49:02.089801Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1109:2906], Recipient [2:926:2747]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2024-11-21T17:49:02.089805Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2024-11-21T17:49:02.089821Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037890 step# 3500 txid# 281474976715668} 2024-11-21T17:49:02.089825Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3500} 2024-11-21T17:49:02.089830Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:49:02.089833Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:49:02.089860Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:926:2747], Recipient [2:926:2747]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:02.089863Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:02.089870Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T17:49:02.089887Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:49:02.089891Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2024-11-21T17:49:02.089894Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2024-11-21T17:49:02.089897Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [3500:281474976715668] at 72075186224037890 2024-11-21T17:49:02.089901Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2024-11-21T17:49:02.089903Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2024-11-21T17:49:02.089906Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2024-11-21T17:49:02.089908Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2024-11-21T17:49:02.089955Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Continue 2024-11-21T17:49:02.089958Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:49:02.089960Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2024-11-21T17:49:02.089963Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2024-11-21T17:49:02.089965Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T17:49:02.090110Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1131:2925], Recipient [2:926:2747]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T17:49:02.090114Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T17:49:02.090160Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2024-11-21T17:49:02.090276Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T17:49:02.090333Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2024-11-21T17:49:02.090337Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2024-11-21T17:49:02.090353Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2024-11-21T17:49:02.090356Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2024-11-21T17:49:02.090393Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:926:2747], Recipient [2:926:2747]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:02.090396Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:02.090400Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T17:49:02.090402Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:49:02.090405Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for ReadTableScan 2024-11-21T17:49:02.090407Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2024-11-21T17:49:02.090410Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2024-11-21T17:49:02.090413Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2024-11-21T17:49:02.090416Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2024-11-21T17:49:02.090418Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2024-11-21T17:49:02.090420Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2024-11-21T17:49:02.090453Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is DelayComplete 2024-11-21T17:49:02.090455Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2024-11-21T17:49:02.090457Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2024-11-21T17:49:02.090459Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompletedOperations 2024-11-21T17:49:02.090463Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2024-11-21T17:49:02.090466Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2024-11-21T17:49:02.090468Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037890 has finished 2024-11-21T17:49:02.090470Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:02.090472Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2024-11-21T17:49:02.090474Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T17:49:02.090476Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T17:49:02.100839Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:49:02.100871Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:49:02.100881Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2024-11-21T17:49:02.100903Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1109:2906], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:49:02.100916Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> TFileStoreWithReboots::Create [GOOD] >> Normalizers::SchemaVersionsNormalizer >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString |79.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2024-11-21T17:48:45.614771Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.614803Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.618407Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.618434Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.639680Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.849083Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.849105Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.851746Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.851816Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.862976Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.083261Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.083286Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.085441Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.085477Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.106703Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.106803Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=3501945802314484704, session=0, seqNo=0) 2024-11-21T17:48:46.106841Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.117498Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=3501945802314484704, session=1) 2024-11-21T17:48:46.117628Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:137:2161], cookie=6780275524505647445) 2024-11-21T17:48:46.117641Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:137:2161], cookie=6780275524505647445) 2024-11-21T17:48:46.411573Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:46.422318Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:46.675552Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:46.686232Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:46.929506Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:46.940151Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.183343Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.194111Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.447495Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.458233Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.691112Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.701784Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.944705Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.955478Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.199239Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.210036Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.454273Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.465151Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.750516Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.761566Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.015724Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.026410Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.280174Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.290960Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.545049Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.555825Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.810149Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.820890Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.115862Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.126863Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.381234Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.392007Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.645891Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.656645Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.910911Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.921915Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.176499Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.187590Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.451987Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.462778Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.726935Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.738034Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.991695Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.002566Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.266542Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.277295Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.530595Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.541872Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.795589Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.806400Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.070142Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.081002Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.334393Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.345181Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.598973Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.609855Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.863712Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.874557Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.169550Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.180280Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.444187Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.455058Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.709057Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.719783Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.973841Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.984639Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:55.228590Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:55.239318Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:55.493080Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:55.503873Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:55.758340Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:55.769230Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:56.033261Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:56.044004Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:56.297972Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:56.308801Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:56.562773Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:56.573539Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:56.848314Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:56.859086Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:57.112858Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:57.123652Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:57.387890Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:57.398812Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:57.652158Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:57.662989Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:57.916806Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:57.927562Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:58.222249Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:58.233144Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:58.476567Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:58.487299Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:58.751592Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:58.762460Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:59.016438Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:59.027322Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:59.281344Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:59.292168Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:59.556836Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:59.567614Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:59.811388Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:59.822209Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:00.075829Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:00.086634Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:00.330408Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:00.341345Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:00.587629Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:00.598275Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:00.841674Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:00.852246Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:01.085826Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:01.096620Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:01.350808Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:01.361596Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:01.607756Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:01.618653Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:01.863693Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:01.874799Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:02.160842Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2024-11-21T17:49:02.160883Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2024-11-21T17:49:02.171746Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2024-11-21T17:49:02.182170Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:469:2476], cookie=5168414090926905389) 2024-11-21T17:49:02.182213Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:469:2476], cookie=5168414090926905389) 2024-11-21T17:49:02.396452Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:49:02.396487Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:49:02.399867Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:49:02.399902Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:49:02.421174Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:49:02.422042Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=1265022050925457304, path="Root", config={ MaxUnitsPerSecond: 100 }) 2024-11-21T17:49:02.422096Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T17:49:02.432866Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=1265022050925457304) 2024-11-21T17:49:02.433225Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:139:2163]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T17:49:02.433234Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:139:2163], cookie=0) 2024-11-21T17:49:02.433263Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:141:2165]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T17:49:02.433266Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:141:2165], cookie=0) 2024-11-21T17:49:02.474198Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2024-11-21T17:49:02.474238Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:139:2163]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2024-11-21T17:49:02.474313Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:144:2168]) 2024-11-21T17:49:02.474344Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2024-11-21T17:49:02.515084Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:139:2163]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } |79.3%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::WriteReadZSTD >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot >> UpsertLoad::ShouldCreateTable >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom >> ReadLoad::ShouldReadKqp >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom >> UpsertLoad::ShouldWriteDataBulkUpsertBatch >> UpsertLoad::ShouldWriteDataBulkUpsert >> TKesusTest::TestAcquireTimeout [GOOD] >> TColumnShardTestReadWrite::WriteRead >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad >> TSettingsValidation::TestDifferentDedupParams [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot >> Normalizers::SchemaVersionsNormalizer [GOOD] >> TKesusTest::TestAcquireSharedBlocked >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> TColumnShardTestReadWrite::RebootWriteRead >> Backup::ProposeBackup >> TKesusTest::TestAcquireTimeoutAfterReboot >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> TFileStoreWithReboots::AlterAssignDrop [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldCreateTable [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad |79.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom >> UpsertLoad::ShouldWriteDataBulkUpsert2 >> UpsertLoad::ShouldDropCreateTable >> TColumnShardTestReadWrite::WriteRead [GOOD] >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |79.3%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |79.3%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: 2024-11-21T17:49:03.974436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:03.974785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:03.974800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001988/r3tmp/tmp0rv1za/pdisk_1.dat 2024-11-21T17:49:04.085043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.120285Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:04.161956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:04.161976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:04.172347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:04.274938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.481459Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2024-11-21T17:49:04.481489Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T17:49:04.547299Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor finished in 0.065765s, errors=0 2024-11-21T17:49:04.547330Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 |79.3%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: 2024-11-21T17:49:03.974877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:03.975271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:03.975290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019c9/r3tmp/tmp8QAGjE/pdisk_1.dat 2024-11-21T17:49:04.084132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.120008Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:04.162167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:04.162235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:04.172665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:04.276008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.488453Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2024-11-21T17:49:04.488485Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T17:49:04.550570Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor finished in 0.062038s, errors=0 2024-11-21T17:49:04.550601Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 2024-11-21T17:49:04.901601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:04.901630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:49:04.901646Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019c9/r3tmp/tmpBZoyGN/pdisk_1.dat 2024-11-21T17:49:04.975046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.988351Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:05.030367Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:05.030407Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:05.040944Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:05.144466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:05.347734Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2024-11-21T17:49:05.347759Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2024-11-21T17:49:05.409966Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor finished in 0.062162s, errors=0 2024-11-21T17:49:05.409998Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:701:2585] with tag# 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2024-11-21T17:48:41.813660Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790975339626110:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:41.813846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001765/r3tmp/tmp7fZWQh/pdisk_1.dat 2024-11-21T17:48:41.851845Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31594, node 1 2024-11-21T17:48:41.863552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:41.863566Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:41.863567Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:41.863598Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:41.915161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:41.915185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:41.916394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:41.929945Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:41.932206Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:11965, port: 11965 2024-11-21T17:48:41.932253Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:41.992529Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2024-11-21T17:48:42.040920Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****63OA (0A66C669) () has now valid token of ldapuser@ldap 2024-11-21T17:48:42.246956Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790981047588627:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:42.247117Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001765/r3tmp/tmpwrOdxo/pdisk_1.dat 2024-11-21T17:48:42.257328Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25620, node 2 2024-11-21T17:48:42.266618Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.266631Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.266633Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.266674Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.322311Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.322532Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:2432, port: 2432 2024-11-21T17:48:42.322572Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:42.347350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:42.347380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:42.348611Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:42.372545Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:2432. Invalid credentials 2024-11-21T17:48:42.372799Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****QdTA (93D87853) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:48:42.595347Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790981086588696:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:42.595719Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001765/r3tmp/tmptwQOaM/pdisk_1.dat 2024-11-21T17:48:42.602583Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14465, node 3 2024-11-21T17:48:42.612544Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.612559Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.612561Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.612613Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.653982Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.655459Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:13290, port: 13290 2024-11-21T17:48:42.655496Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:42.697995Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:42.698025Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:42.699133Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:42.716490Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:13290. Invalid credentials 2024-11-21T17:48:42.716772Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****SgJQ (DFAB1CFB) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:48:42.962605Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790977807566106:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:42.962619Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001765/r3tmp/tmpCcBBnw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19198, node 4 2024-11-21T17:48:42.979897Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:42.980084Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.980093Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.980095Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.980136Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.017152Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.018673Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:20028, port: 20028 2024-11-21T17:48:43.018704Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:43.063062Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.063103Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.064126Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.084470Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:43.084633Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:20028 return no entries 2024-11-21T17:48:43.084798Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****9lIA (6EBF1865) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:48:43.334174Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439790982973994382:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.334194Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001765/r3tmp/tmpZARmnG/pdisk_1.dat 2024-11-21T17:48:43.344862Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17234, node 5 2024-11-21T17:48:43.352863Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.352893Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.352895Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.352937Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.386280Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.388446Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:7185, port: 7185 2024-11-21T17:48:43.388479Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:43.434366Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.434397Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.435425Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.468469Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:43.512416Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:43.512583Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:43.512601Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.560427Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.604458Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.605121Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****a3Tg (F174E81F) () has now valid token of ldapuser@ldap 2024-11-21T17:48:47.336060Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****a3Tg (F174E81F) 2024-11-21T17:48:47.336093Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:7185, port: 7185 2024-11-21T17:48:47.336114Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:47.396473Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:47.440434Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:47.440626Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:47.440639Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:47.484406Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:47.528400Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:47.528746Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****a3Tg (F174E81F) () has now valid token of ldapuser@ldap 2024-11-21T17:48:48.334505Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439790982973994382:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:48.334548Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:48:52.338301Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****a3Tg (F174E81F) 2024-11-21T17:48:52.338330Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:7185, port: 7185 2024-11-21T17:48:52.338347Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:52.396473Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:52.444429Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:52.444579Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:52.444590Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:52.488446Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:52.532396Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:52.532733Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****a3Tg (F174E81F) () has now valid token of ldapuser@ldap 2024-11-21T17:48:53.842391Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439791026002609668:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:53.842416Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001765/r3tmp/tmp3U47FM/pdisk_1.dat 2024-11-21T17:48:53.849397Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6146, node 6 2024-11-21T17:48:53.860490Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:53.860500Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:53.860501Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:53.860532Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:53.931047Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:53.933106Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:15153, port: 15153 2024-11-21T17:48:53.933159Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:53.942584Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:53.942617Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:53.943665Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:54.020448Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:54.068713Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****Zbgw (31420C66) () has now valid token of ldapuser@ldap 2024-11-21T17:48:58.842759Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439791026002609668:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:58.842790Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:48:58.844795Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Zbgw (31420C66) 2024-11-21T17:48:58.844827Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:15153, port: 15153 2024-11-21T17:48:58.844853Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:58.908474Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:58.952673Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****Zbgw (31420C66) () has now valid token of ldapuser@ldap 2024-11-21T17:49:02.846621Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Zbgw (31420C66) 2024-11-21T17:49:02.846668Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:15153, port: 15153 2024-11-21T17:49:02.846688Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:49:02.908425Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:49:02.952612Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****Zbgw (31420C66) () has now valid token of ldapuser@ldap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: 2024-11-21T17:49:03.974874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:03.975233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:03.975250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019ac/r3tmp/tmpvmi6UR/pdisk_1.dat 2024-11-21T17:49:04.083556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.119379Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:04.161207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:04.161243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:04.171687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:04.274787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.482352Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2024-11-21T17:49:04.482374Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2024-11-21T17:49:04.482645Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} started# 5 actors each with inflight# 4 2024-11-21T17:49:04.482649Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T17:49:04.482654Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T17:49:04.482656Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T17:49:04.482659Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T17:49:04.482661Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T17:49:04.483123Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} session: ydb://session/3?node_id=1&id=ODQyNGMzMGItNmFlYWFjYmQtYjI2YmQ4ZDAtMjhiODE4MjI= 2024-11-21T17:49:04.483132Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} session: ydb://session/3?node_id=1&id=YzFjYjQwMzQtZGQ3ODkxNzktOGY5OWU4ZDctZmJjMTg4NjA= 2024-11-21T17:49:04.483237Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} session: ydb://session/3?node_id=1&id=YjMzY2VhNGQtNmMzYjdmZWMtMTBlOTQzYzgtNWQ3MDQ2 2024-11-21T17:49:04.483335Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} session: ydb://session/3?node_id=1&id=ZWMxMzQzNzctZmRlMjlkMzYtYWExY2VlYmMtZjljNTE3M2Q= 2024-11-21T17:49:04.483477Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} session: ydb://session/3?node_id=1&id=ZTFkOTkzYWQtYmU3M2QzZGYtYTI1MmM1NjYtNDlmOGRiYjI= 2024-11-21T17:49:04.483987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:715:2599], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.484002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:741:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.484007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.484016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:743:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.484022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.484028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:745:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.484037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.484970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:04.671838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:04.671870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:04.671877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:755:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:04.671883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2634], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:04.671889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:757:2635], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:04.820191Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} finished in 1732211344.820176s, errors=0 2024-11-21T17:49:04.820298Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1732211344820 OperationsOK: 4 OperationsError: 0 } 2024-11-21T17:49:04.875866Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} finished in 1732211344.875853s, errors=0 2024-11-21T17:49:04.875943Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1732211344875 OperationsOK: 4 OperationsError: 0 } 2024-11-21T17:49:04.931496Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} finished in 1732211344.931481s, errors=0 2024-11-21T17:49:04.931562Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1732211344931 OperationsOK: 4 OperationsError: 0 } 2024-11-21T17:49:04.986884Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} finished in 1732211344.986872s, errors=0 2024-11-21T17:49:04.986949Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1732211344986 OperationsOK: 4 OperationsError: 0 } 2024-11-21T17:49:05.042763Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} finished in 1732211345.042739s, errors=0 2024-11-21T17:49:05.042833Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1732211345042 OperationsOK: 4 OperationsError: 0 } 2024-11-21T17:49:05.042841Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} finished in 0.560207s, oks# 20, errors# 0 2024-11-21T17:49:05.042858Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SchemaVersionsNormalizer [GOOD] Test command err: 2024-11-21T17:49:02.930272Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:02.947083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:02.948859Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:02.948872Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:02.948914Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:02.949379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2024-11-21T17:49:02.949399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:02.949417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:02.949430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:02.949447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:02.949458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:02.949468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:02.949477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:02.949489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:02.949500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:02.949513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:02.949523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:02.949536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:02.953297Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:02.954515Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:02.954581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2024-11-21T17:49:02.954589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2024-11-21T17:49:02.954674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:02.954689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2024-11-21T17:49:02.954694Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2024-11-21T17:49:02.954712Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:02.954725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:02.954732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:02.954736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2024-11-21T17:49:02.954743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:02.954750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:02.954756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:02.954760Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:02.954773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:02.954780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:02.954787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:02.954791Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:02.954799Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:02.954805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:02.954810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:02.954815Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:02.954824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:02.954830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:02.954833Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:02.954841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:02.954848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:02.954852Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:02.954881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2024-11-21T17:49:02.954891Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2024-11-21T17:49:02.954899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2024-11-21T17:49:02.954907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T17:49:02.954929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:02.954936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:02.954940Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:02.954958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:02.954963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:02.954967Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:02.954979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:02.954985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=a ... dx=0; 2024-11-21T17:49:03.673233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2024-11-21T17:49:03.673276Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:03.673282Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T17:49:03.673287Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:03.673327Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T17:49:03.673335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:03.673338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T17:49:03.673342Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:03.673455Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T17:49:03.673459Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T17:49:03.673465Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=2; 2024-11-21T17:49:03.673471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2024-11-21T17:49:03.673479Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T17:49:03.673487Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:03.673494Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2024-11-21T17:49:03.673498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:03.673540Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:03.673557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:03.673561Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T17:49:03.673570Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2024-11-21T17:49:03.673580Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2024-11-21T17:49:03.673602Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:325:2331] send ScanData to [1:323:2330] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 2405760 rows: 20048 page faults: 0 finished: 0 pageFault: 0 arrow schema: key1: uint64 key2: uint64 field: string 2024-11-21T17:49:03.673612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:03.673621Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:03.673628Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:03.674152Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:03.674174Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:03.674181Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:03.674186Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:325:2331] finished for tablet 9437184 2024-11-21T17:49:03.674202Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:325:2331] send ScanData to [1:323:2330] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:49:03.674260Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:325:2331] and sent to [1:323:2330] packs: 0 txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.019}],"full":{"a":1732211343654478,"name":"_full_task","f":1732211343654478,"d_finished":0,"c":0,"l":1732211343674208,"d":19730},"events":[{"name":"bootstrap","f":1732211343654527,"d_finished":380,"c":1,"l":1732211343654907,"d":380},{"a":1732211343674147,"name":"ack","f":1732211343673534,"d_finished":98,"c":1,"l":1732211343673632,"d":159},{"a":1732211343674141,"name":"processing","f":1732211343654920,"d_finished":16773,"c":9,"l":1732211343673632,"d":16840},{"name":"ProduceResults","f":1732211343654753,"d_finished":292,"c":12,"l":1732211343674183,"d":292},{"a":1732211343674184,"name":"Finish","f":1732211343674184,"d_finished":0,"c":0,"l":1732211343674208,"d":24},{"name":"task_result","f":1732211343654922,"d_finished":16658,"c":8,"l":1732211343673506,"d":16658}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;) 2024-11-21T17:49:03.674273Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:49:03.654345Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=256192;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=256192;selected_rows=0; 2024-11-21T17:49:03.674278Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:49:03.674292Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T17:49:03.674298Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: 2024-11-21T17:49:03.974432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:03.974779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:03.974795Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001969/r3tmp/tmp9QRRnH/pdisk_1.dat 2024-11-21T17:49:04.083621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.119550Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:04.161207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:04.161238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:04.171688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:04.274776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.481694Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2024-11-21T17:49:04.481717Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T17:49:04.554455Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor finished in 0.072697s, errors=0 2024-11-21T17:49:04.554479Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 2024-11-21T17:49:04.891091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:04.891121Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:49:04.891137Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001969/r3tmp/tmpg5iAc0/pdisk_1.dat 2024-11-21T17:49:04.967218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.981417Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:05.023088Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:05.023130Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:05.033593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:05.137227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:05.344180Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2024-11-21T17:49:05.344215Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T17:49:05.406453Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor finished in 0.062153s, errors=0 2024-11-21T17:49:05.406511Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:701:2585] with tag# 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2024-11-21T17:49:03.693880Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:03.708040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:03.710791Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:03.710812Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:03.710861Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:03.711389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:03.711429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:03.711458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:03.711470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:03.711480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:03.711491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:03.711500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:03.711511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:03.711521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:03.711531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.711545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:03.711563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:03.714472Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:03.715435Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:03.715502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:03.715512Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:03.715535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:03.715565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:03.715575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:03.715578Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:03.715583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:03.715590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:03.715594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:03.715597Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:03.715607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:03.715611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:03.715615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:03.715618Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:03.715624Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:03.715627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:03.715633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:03.715635Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:03.715643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:03.715647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:03.715649Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:03.715655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:03.715659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:03.715661Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:03.715683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2024-11-21T17:49:03.715689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T17:49:03.715694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T17:49:03.715700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T17:49:03.715714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:03.715718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:03.715721Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:03.715736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:03.715740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.715743Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.715750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:03.715754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:03.715757Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:03.715768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:03.715773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:03.715775Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:49:03.715783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... 2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=31;merger=0;interval_id=25; 2024-11-21T17:49:05.157954Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T17:49:05.157960Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:05.157962Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=31;finished=1; 2024-11-21T17:49:05.157965Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:05.157988Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:05.158001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:05.158003Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T17:49:05.158009Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2024-11-21T17:49:05.158015Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2024-11-21T17:49:05.158029Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:428:2443] send ScanData to [1:427:2442] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 1984 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string 2024-11-21T17:49:05.158036Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:05.158043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:05.158049Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:05.158064Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:05.158069Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:05.158074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:05.158076Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:428:2443] finished for tablet 9437184 2024-11-21T17:49:05.158081Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:428:2443] send ScanData to [1:427:2442] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:49:05.158112Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:428:2443] and sent to [1:427:2442] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults","f_task_result","l_task_result"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1732211345157059,"name":"_full_task","f":1732211345157059,"d_finished":0,"c":0,"l":1732211345158084,"d":1025},"events":[{"name":"bootstrap","f":1732211345157082,"d_finished":207,"c":1,"l":1732211345157289,"d":207},{"a":1732211345158062,"name":"ack","f":1732211345157986,"d_finished":65,"c":1,"l":1732211345158051,"d":87},{"a":1732211345158062,"name":"processing","f":1732211345157378,"d_finished":390,"c":10,"l":1732211345158051,"d":412},{"name":"ProduceResults","f":1732211345157200,"d_finished":182,"c":13,"l":1732211345158075,"d":182},{"a":1732211345158075,"name":"Finish","f":1732211345158075,"d_finished":0,"c":0,"l":1732211345158084,"d":9},{"name":"task_result","f":1732211345157379,"d_finished":315,"c":9,"l":1732211345157969,"d":315}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2024-11-21T17:49:05.158120Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:49:05.157004Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2024-11-21T17:49:05.158123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:49:05.158128Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T17:49:05.158132Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::AlterAssignDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:54.433394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:54.433408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:54.433411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:54.433413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:54.433417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:54.433419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:54.433425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:54.433473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:54.441994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:54.442008Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:54.443260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:54.443308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:54.443325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:54.444806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:54.444852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:54.444909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:54.445024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:54.445391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:54.445545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:54.445550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:54.445557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:54.445561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:54.445565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:54.445587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:54.446298Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:54.457110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:54.457170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.457213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:54.457248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:54.457253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.457772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:54.457785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:54.457825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.457831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:54.457835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:54.457838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:54.458042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.458048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:54.458050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:54.458226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.458230Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.458234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:54.458239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:54.458603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:54.458850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:54.458881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:54.458998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:54.459011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:54.459015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:54.459052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:54.459056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:54.459076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:54.459084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:54.459424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:54.459438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:54.459483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:54.459487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:54.459562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.459568Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:54.459577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:54.459579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:54.459583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:54.459586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:54.459589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:54.459591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:54.459610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:54.459614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:54.459617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... rdLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:49:04.445287Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:04.445317Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409546 2024-11-21T17:49:04.445492Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:49:04.445674Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1005 msg type: 269090816 2024-11-21T17:49:04.445688Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1005 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1005 at step: 5000005 2024-11-21T17:49:04.445812Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:04.445825Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 118 RawX2: 180388628576 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:04.445828Z node 42 :FLAT_TX_SCHEMESHARD INFO: TDropFileStore::TPropose, operationId: 1005:0 HandleReply TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2024-11-21T17:49:04.445837Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:04.445847Z node 42 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T17:49:04.445849Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:49:04.445853Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:04.445857Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:04.445860Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2024-11-21T17:49:04.445863Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:49:04.445865Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T17:49:04.445867Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T17:49:04.445875Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:04.445877Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2024-11-21T17:49:04.445879Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T17:49:04.445881Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:49:04.445985Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:49:04.445992Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:49:04.446140Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:04.446156Z node 42 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:04.446159Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:04.446171Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:49:04.446182Z node 42 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:04.446184Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [42:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T17:49:04.446187Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [42:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 3 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T17:49:04.446230Z node 42 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:04.446239Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:04.446243Z node 42 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:49:04.446247Z node 42 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:49:04.446250Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:04.446287Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:04.446290Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:49:04.446294Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:04.446319Z node 42 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T17:49:04.446335Z node 42 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:04.446339Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:04.446341Z node 42 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:49:04.446343Z node 42 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T17:49:04.446345Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:04.446349Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2024-11-21T17:49:04.446360Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:04.446697Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:49:04.446768Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:49:04.446776Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:49:04.446921Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T17:49:04.446956Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T17:49:04.446959Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T17:49:04.446992Z node 42 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T17:49:04.447007Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:49:04.447010Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [42:448:2429] TestWaitNotification: OK eventTxId 1005 2024-11-21T17:49:04.447044Z node 42 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/FS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:04.447058Z node 42 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/FS" took 19us result status StatusPathDoesNotExist 2024-11-21T17:49:04.447077Z node 42 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/FS\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/FS" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted 2024-11-21T17:49:04.447109Z node 42 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:49:04.447114Z node 42 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:49:04.447119Z node 42 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:49:04.447122Z node 42 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T17:49:04.447127Z node 42 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::Create [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:50.852809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:50.852828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:50.852832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:50.852835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:50.852839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:50.852841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:50.852848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:50.852906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:50.863175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:50.863197Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:50.865349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:50.865433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:50.865461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:50.867689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:50.867747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:50.867839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:50.867991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:50.868533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:50.868763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:50.868772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:50.868783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:50.868789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:50.868794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:50.868831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:50.869918Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:50.885919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:50.886006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.886069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:50.886116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:50.886125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.886865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:50.886890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:50.886952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.886962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:50.886966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:50.886970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:50.887351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.887365Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:50.887370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:50.887674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.887685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.887691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:50.887698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:50.888300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:50.888732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:50.888782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:50.888977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:50.889005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:50.889013Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:50.889078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:50.889086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:50.889118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:50.889133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:50.889573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:50.889585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:50.889625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:50.889631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:50.889716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.889723Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:50.889737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:50.889741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:50.889746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:50.889751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:50.889755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:50.889759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:50.889769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:50.889775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:50.889779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... sState, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [49:325:2313] sender: [49:326:2058] recipient: [49:316:2306] 2024-11-21T17:49:02.591000Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1001:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2024-11-21T17:49:02.591019Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1001, partId: 0, tablet: 72075186233409546 Leader for TabletID 72075186233409546 is [49:325:2313] sender: [49:333:2058] recipient: [49:15:2062] 2024-11-21T17:49:02.592200Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1001, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:02.592248Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1001:0, at schemeshard: 72057594046678944, message: TxId: 1001 Origin: 72075186233409546 Status: OK 2024-11-21T17:49:02.592256Z node 49 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TConfigureParts operationId#1001:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T17:49:02.592263Z node 49 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1001:0 3 -> 128 2024-11-21T17:49:02.592946Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1001:0, at schemeshard: 72057594046678944 2024-11-21T17:49:02.592992Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1001:0, at schemeshard: 72057594046678944 2024-11-21T17:49:02.592998Z node 49 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId#1001:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:02.593006Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1001 ready parts: 1/1 2024-11-21T17:49:02.593039Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1001 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:02.593338Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1001:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1001 msg type: 269090816 2024-11-21T17:49:02.593360Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1001, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1001 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1001 at step: 5000003 2024-11-21T17:49:02.593425Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:02.593439Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1001 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 210453399658 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:02.593444Z node 49 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId#1001:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:49:02.593457Z node 49 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1001:0 128 -> 240 2024-11-21T17:49:02.593480Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:49:02.593489Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: Erasing txId 1001 2024-11-21T17:49:02.593782Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:02.593787Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1001, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:49:02.593813Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1001, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:49:02.593826Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:02.593829Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1001, path id: 2 2024-11-21T17:49:02.593833Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1001, path id: 3 2024-11-21T17:49:02.593883Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1001:0, at schemeshard: 72057594046678944 2024-11-21T17:49:02.593889Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1001:0 ProgressState 2024-11-21T17:49:02.593901Z node 49 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1001:0 progress is 1/1 2024-11-21T17:49:02.593905Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1001 ready parts: 1/1 2024-11-21T17:49:02.593911Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1001, ready parts: 1/1, is published: false 2024-11-21T17:49:02.593916Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1001 ready parts: 1/1 2024-11-21T17:49:02.593919Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1001:0 2024-11-21T17:49:02.593922Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1001:0 2024-11-21T17:49:02.593940Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:49:02.593943Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1001, publications: 2, subscribers: 0 2024-11-21T17:49:02.593946Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1001, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T17:49:02.593948Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1001, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T17:49:02.594030Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1001 2024-11-21T17:49:02.594037Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1001 2024-11-21T17:49:02.594042Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1001 2024-11-21T17:49:02.594045Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1001, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:49:02.594048Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:49:02.594134Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1001 2024-11-21T17:49:02.594140Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1001 2024-11-21T17:49:02.594143Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1001 2024-11-21T17:49:02.594145Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1001, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:49:02.594147Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:02.594152Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1001, subscribers: 0 2024-11-21T17:49:02.594670Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1001 2024-11-21T17:49:02.594711Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1001 TestModificationResult got TxId: 1001, wait until txId: 1001 TestWaitNotification wait txId: 1001 2024-11-21T17:49:02.594745Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1001: send EvNotifyTxCompletion 2024-11-21T17:49:02.594750Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1001 2024-11-21T17:49:02.594795Z node 49 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1001, at schemeshard: 72057594046678944 2024-11-21T17:49:02.594808Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1001: got EvNotifyTxCompletionResult 2024-11-21T17:49:02.594811Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1001: satisfy waiter [49:369:2350] TestWaitNotification: OK eventTxId 1001 2024-11-21T17:49:02.594855Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:02.594878Z node 49 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_1" took 30us result status StatusSuccess 2024-11-21T17:49:02.594949Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/FS_1" PathDescription { Self { Name: "FS_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1001 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } FileStoreDescription { Name: "FS_1" PathId: 3 IndexTabletId: 72075186233409546 Config { Version: 1 FileSystemId: "FS_1" FolderId: "folder" CloudId: "cloud" BlockSize: 4096 BlocksCount: 4096 ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-2" } } Version: 1 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: 2024-11-21T17:49:03.974436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:03.974786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:03.974802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001975/r3tmp/tmpZFzTAb/pdisk_1.dat 2024-11-21T17:49:04.083569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.119879Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:04.161455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:04.161479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:04.171857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:04.274776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.482969Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2024-11-21T17:49:04.482996Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T17:49:04.548384Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor finished in 0.065328s, errors=0 2024-11-21T17:49:04.548412Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 2024-11-21T17:49:04.940110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:04.940143Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:49:04.940161Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001975/r3tmp/tmpMb4vAJ/pdisk_1.dat 2024-11-21T17:49:05.016332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:05.030151Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:05.072113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:05.072145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:05.082695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:05.186113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:05.389591Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2024-11-21T17:49:05.389622Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2024-11-21T17:49:05.455603Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor finished in 0.065936s, errors=0 2024-11-21T17:49:05.455656Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:701:2585] with tag# 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: 2024-11-21T17:48:58.933677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:48:58.934012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:48:58.934027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001756/r3tmp/tmp0D0UVT/pdisk_1.dat 2024-11-21T17:48:59.025759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:48:59.041823Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:59.084078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:59.084108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:59.094604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:59.197991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:48:59.212955Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T17:48:59.213033Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:59.219921Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:59.219977Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:48:59.220096Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:48:59.220102Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:48:59.220107Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:48:59.220141Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:48:59.222464Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:48:59.222538Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:48:59.222562Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T17:48:59.222566Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:48:59.222569Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:48:59.222573Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:59.222939Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T17:48:59.222971Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:59.223847Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:48:59.223869Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:48:59.223955Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T17:48:59.223991Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:59.223996Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:59.224004Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:48:59.224009Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:48:59.224043Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:48:59.224102Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:48:59.224116Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:48:59.224334Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:59.224352Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:48:59.224435Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:48:59.224439Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:48:59.224443Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:48:59.224469Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:48:59.224474Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:48:59.224491Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:48:59.224501Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T17:48:59.224504Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:48:59.224507Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:48:59.224509Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:48:59.224598Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:48:59.224604Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:48:59.224615Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:48:59.224617Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:59.224620Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:48:59.224622Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:48:59.224661Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T17:48:59.224674Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:48:59.224699Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:48:59.224706Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:48:59.224793Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:48:59.224799Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:48:59.235082Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:48:59.235125Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:48:59.235252Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:48:59.235257Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T17:48:59.409179Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T17:48:59.409245Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T17:48:59.409807Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T17:48:59.409818Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:48:59.409858Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:48:59.409864Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:48:59.409873Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T17:48:59.409936Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:48:59.409968Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:48:59.410067Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:48:59.410071Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:48:59.410084Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:48:59.410094Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:48:59.410366Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:48:59.410444Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:48:59.410579Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:59.410584Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:48:59.410588Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:48:59.410613Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:48:59.410627Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:48:59.410664Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T17:48:59.410667Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:48:59.410690Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:48:59.410700Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... an for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:03.445875Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit DirectOp 2024-11-21T17:49:03.445879Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit DirectOp 2024-11-21T17:49:03.445887Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2024-11-21T17:49:03.445914Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:49:03.445917Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit DirectOp 2024-11-21T17:49:03.445919Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:03.445922Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:03.445931Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T17:49:03.445934Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:03.445937Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2024-11-21T17:49:03.456298Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2024-11-21T17:49:03.456331Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit DirectOp 2024-11-21T17:49:03.456343Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status SCHEME_ERROR 2024-11-21T17:49:03.472108Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T17:49:03.472139Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] TxId# 281474976715662 ProcessProposeKqpTransaction 2024-11-21T17:49:03.472357Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xf031a0ra7kvstkq8h0jq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTAxZjk5MDQtN2M2MTBmODgtNmEyN2FlY2ItMmVjMGYwOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:03.473048Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1068:2869], Recipient [3:634:2539]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T17:49:03.473099Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:49:03.473113Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v8000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v8000/18446744073709551615 ImmediateWriteEdgeReplied# v8000/18446744073709551615 2024-11-21T17:49:03.473120Z node 3 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v8000/18446744073709551615 2024-11-21T17:49:03.473132Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckRead 2024-11-21T17:49:03.473151Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T17:49:03.473157Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:49:03.473161Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:03.473165Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:49:03.473178Z node 3 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2024-11-21T17:49:03.473184Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T17:49:03.473187Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:03.473191Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:49:03.473194Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:49:03.473209Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T17:49:03.473264Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1068:2869], 0} after executionsCount# 1 2024-11-21T17:49:03.473272Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1068:2869], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:03.473302Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1068:2869], 0} finished in read 2024-11-21T17:49:03.473321Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T17:49:03.473324Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:49:03.473328Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:03.473331Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:03.473342Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T17:49:03.473345Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:03.473349Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2024-11-21T17:49:03.473354Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:49:03.473375Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T17:49:03.473806Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1068:2869], Recipient [3:634:2539]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:49:03.473817Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2024-11-21T17:49:03.489096Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T17:49:03.489126Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] TxId# 281474976715663 ProcessProposeKqpTransaction 2024-11-21T17:49:03.489305Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xf03jcmrdzqae08vwcxda, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjUzODZhOGItOTU2MGY1ZGMtNTM2ZTMxNTYtNjE2ZWI3Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:03.489948Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1097:2892], Recipient [3:867:2702]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2024-11-21T17:49:03.489988Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2024-11-21T17:49:03.490002Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v5000/18446744073709551615 ImmediateWriteEdgeReplied# v5000/18446744073709551615 2024-11-21T17:49:03.490009Z node 3 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v8000/18446744073709551615 2024-11-21T17:49:03.490021Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2024-11-21T17:49:03.490039Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T17:49:03.490045Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2024-11-21T17:49:03.490049Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:03.490053Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T17:49:03.490067Z node 3 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2024-11-21T17:49:03.490072Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T17:49:03.490075Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:03.490079Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2024-11-21T17:49:03.490082Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2024-11-21T17:49:03.490095Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2024-11-21T17:49:03.490164Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[3:1097:2892], 0} after executionsCount# 1 2024-11-21T17:49:03.490172Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1097:2892], 0} sends rowCount# 2, bytes# 48, quota rows left# 32765, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:03.490201Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1097:2892], 0} finished in read 2024-11-21T17:49:03.490212Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T17:49:03.490217Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T17:49:03.490221Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T17:49:03.490224Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2024-11-21T17:49:03.490236Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T17:49:03.490239Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T17:49:03.490243Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2024-11-21T17:49:03.490248Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T17:49:03.490267Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T17:49:03.490719Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1097:2892], Recipient [3:867:2702]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:49:03.490731Z node 3 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TSettingsValidation::TestDifferentDedupParams [GOOD] Test command err: 2024-11-21T17:47:18.210659Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790619205310135:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:18.210768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001420/r3tmp/tmpGDWJ6u/pdisk_1.dat 2024-11-21T17:47:18.240223Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:18.270461Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23356, node 1 2024-11-21T17:47:18.297622Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001420/r3tmp/yandexGD02SE.tmp 2024-11-21T17:47:18.297631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001420/r3tmp/yandexGD02SE.tmp 2024-11-21T17:47:18.297712Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001420/r3tmp/yandexGD02SE.tmp 2024-11-21T17:47:18.297752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:18.300573Z INFO: TTestServer started on Port 6216 GrpcPort 23356 TClient is connected to server localhost:6216 2024-11-21T17:47:18.312449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:18.312497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:18.313541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:23356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:18.392951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.394791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.403766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:47:18.553239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790619205310682:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.553265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790619205310674:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.553329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.554017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:18.555816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790619205310688:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:47:18.583853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.589476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.603163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439790619205311010:2584] 2024-11-21T17:47:23.210974Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790619205310135:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:23.211006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:23.826848Z :Restarts INFO: TTopicSdkTestSetup started 2024-11-21T17:47:23.829925Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:47:23.833054Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439790640680147842:2774] connected; active server actors: 1 2024-11-21T17:47:23.833125Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T17:47:23.833202Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T17:47:23.833243Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T17:47:23.833293Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T17:47:23.833656Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:47:23.833740Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T17:47:23.833893Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:47:23.833966Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T17:47:23.833974Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:47:23.833976Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T17:47:23.833979Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:47:23.833983Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:47:23.833991Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T17:47:23.834021Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T17:47:23.834029Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.834032Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790640680147865:2433], now have 1 active actors on pipe 2024-11-21T17:47:23.834041Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T17:47:23.882241Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.882259Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790640680147841:2773], now have 1 active actors on pipe 2024-11-21T17:47:23.882264Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:47:23.884010Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7439790619205310317:2188] txId 281474976715674 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T17:47:23.884348Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "t ... 17:49:03.426793Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 104 2024-11-21T17:49:03.426856Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 105 to 105 2024-11-21T17:49:03.426861Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.426863Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 105 2024-11-21T17:49:03.426866Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 106 to 106 2024-11-21T17:49:03.426869Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.426871Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 106 2024-11-21T17:49:03.426873Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 107 to 107 2024-11-21T17:49:03.426874Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.426876Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 107 2024-11-21T17:49:03.426877Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 108 to 108 2024-11-21T17:49:03.426880Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.426881Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 108 2024-11-21T17:49:03.426882Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 109 to 109 2024-11-21T17:49:03.426884Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.426886Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 109 2024-11-21T17:49:03.426888Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 110 to 110 2024-11-21T17:49:03.426889Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.426890Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 110 2024-11-21T17:49:03.426892Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 111 to 111 2024-11-21T17:49:03.426894Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.426897Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 111 2024-11-21T17:49:03.426898Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 112 to 112 2024-11-21T17:49:03.426900Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.426901Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 112 2024-11-21T17:49:03.426903Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 113 to 113 2024-11-21T17:49:03.426905Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.426906Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 113 2024-11-21T17:49:03.427004Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 114 to 114 2024-11-21T17:49:03.427006Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.427007Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 114 2024-11-21T17:49:03.427009Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 115 to 115 2024-11-21T17:49:03.427011Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.427012Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 115 2024-11-21T17:49:03.427029Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 116 to 116 2024-11-21T17:49:03.427032Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.427033Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 116 2024-11-21T17:49:03.427033Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T17:49:03.427035Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 117 to 117 2024-11-21T17:49:03.427046Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.427049Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 117 2024-11-21T17:49:03.427067Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 10 written { offset: 2062 } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T17:49:03.427069Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] OnAck: seqNo=10, txId=? 2024-11-21T17:49:03.427071Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: acknoledged message 10 2024-11-21T17:49:03.427074Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write 1 messages with Id from 118 to 118 2024-11-21T17:49:03.427076Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T17:49:03.427077Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 118 2024-11-21T17:49:03.427118Z :INFO: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T17:49:03.427120Z :INFO: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T17:49:03.427123Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T17:49:03.427169Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T17:49:03.427177Z :WARNING: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T17:49:03.427195Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 11 written { offset: 2063 } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T17:49:03.427199Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] OnAck: seqNo=11, txId=? 2024-11-21T17:49:03.427201Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: acknoledged message 11 2024-11-21T17:49:03.427772Z :DEBUG: [/Root] SessionId [ce265254-47c5bd1f-9ceef404-f2ec736b|dd736665-c1c0510d-428b143c-334b7412_0] PartitionId [0] Generation [1] Write session: destroy === === END TEST (supposed ok)=== === >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade >> Backup::ProposeBackup [GOOD] >> UpsertLoad::ShouldDropCreateTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2024-11-21T17:49:03.362082Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:03.377543Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:03.379444Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:03.379458Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:03.379501Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:03.379996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:03.380030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:03.380060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:03.380077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:03.380088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:03.380099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:03.380109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:03.380121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:03.380133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:03.380143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.380156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:03.380176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:03.383400Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:03.384483Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:03.384550Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:03.384559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:03.384584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:03.384616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:03.384626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:03.384629Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:03.384636Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:03.384642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:03.384648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:03.384651Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:03.384668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:03.384676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:03.384684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:03.384687Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:03.384694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:03.384698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:03.384704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:03.384706Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:03.384715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:03.384719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:03.384722Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:03.384729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:03.384734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:03.384736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:03.384763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2024-11-21T17:49:03.384788Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2024-11-21T17:49:03.384798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2024-11-21T17:49:03.384808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2024-11-21T17:49:03.384827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:03.384833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:03.384835Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:03.384864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:03.384870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.384872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.384881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:03.384886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:03.384889Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:03.384902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:03.384907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:03.384910Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:49:03.384918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... d=0;TxId=103;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=31;merger=0;interval_id=26; 2024-11-21T17:49:06.083594Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T17:49:06.083604Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.083608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=31;finished=1; 2024-11-21T17:49:06.083612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:06.083649Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:06.083663Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.083666Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T17:49:06.083673Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2024-11-21T17:49:06.083682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2024-11-21T17:49:06.083707Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1024:2895] send ScanData to [1:1023:2894] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 1984 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string 2024-11-21T17:49:06.083718Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.083730Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.083740Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.083761Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:06.083766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.083772Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.083774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1024:2895] finished for tablet 9437184 2024-11-21T17:49:06.083781Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1024:2895] send ScanData to [1:1023:2894] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:49:06.083816Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:1024:2895] and sent to [1:1023:2894] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1732211346082246,"name":"_full_task","f":1732211346082246,"d_finished":0,"c":0,"l":1732211346083785,"d":1539},"events":[{"name":"bootstrap","f":1732211346082281,"d_finished":304,"c":1,"l":1732211346082585,"d":304},{"a":1732211346083759,"name":"ack","f":1732211346083647,"d_finished":96,"c":1,"l":1732211346083743,"d":122},{"a":1732211346083758,"name":"processing","f":1732211346082713,"d_finished":615,"c":10,"l":1732211346083743,"d":642},{"name":"ProduceResults","f":1732211346082457,"d_finished":273,"c":13,"l":1732211346083773,"d":273},{"a":1732211346083773,"name":"Finish","f":1732211346083773,"d_finished":0,"c":0,"l":1732211346083785,"d":12},{"name":"task_result","f":1732211346082715,"d_finished":503,"c":9,"l":1732211346083620,"d":503}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2024-11-21T17:49:06.083825Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:49:06.082165Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2024-11-21T17:49:06.083828Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:49:06.083835Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T17:49:06.083839Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> TColumnShardTestReadWrite::WriteReadStandalone >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot >> TColumnShardTestReadWrite::ReadWithProgram ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Backup::ProposeBackup [GOOD] Test command err: 2024-11-21T17:49:04.222143Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:04.235440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:04.237471Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:04.237492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:04.237541Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:04.238210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:04.238254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:04.238302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:04.238319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:04.238339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:04.238352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:04.238368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:04.238398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:04.238414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:04.238427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:04.238446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:04.238461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:04.241459Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:04.241470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T17:49:04.242282Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:04.242322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:04.242326Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:04.242345Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:04.242370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:04.242390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:04.242394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:04.242399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:04.242406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:04.242411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:04.242413Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:04.242423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:04.242427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:04.242431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:04.242433Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:04.242439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:04.242442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:04.242448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:04.242450Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:04.242457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:04.242461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:04.242465Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:04.242470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:04.242474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:04.242476Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:04.242498Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T17:49:04.242504Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T17:49:04.242511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T17:49:04.242519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2024-11-21T17:49:04.242531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:04.242535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:04.242538Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:04.242570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:04.242574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:04.242577Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:04.242583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:04.242588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:04.242590Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:04.242601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:04.242604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:04.242607Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;f ... Id=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2400;num_rows=100;batch_columns=_yql_plan_step,_yql_tx_id,_yql_write_id; 2024-11-21T17:49:06.221115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:321:2338] send ScanData to [1:315:2332] txId: 1 scanId: 1 gen: 9437184 tablet: 9437184 bytes: 2400 rows: 100 page faults: 0 finished: 0 pageFault: 0 arrow schema: _yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64 2024-11-21T17:49:06.221134Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:1;records_count:100;schema=_yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2024-11-21T17:49:06.221152Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:100;schema=_yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2024-11-21T17:49:06.221157Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T17:49:06.221162Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:06.221294Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:129;method=PutObject;id=[9437184:0:0:1:3:632:0]; 2024-11-21T17:49:06.232154Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:06.232204Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:100;schema=_yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2024-11-21T17:49:06.232213Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T17:49:06.232226Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;);columns=3;rows=100; 2024-11-21T17:49:06.232255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2400;num_rows=100;batch_columns=_yql_plan_step,_yql_tx_id,_yql_write_id; 2024-11-21T17:49:06.232318Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:321:2338] send ScanData to [1:315:2332] txId: 1 scanId: 1 gen: 9437184 tablet: 9437184 bytes: 2400 rows: 100 page faults: 0 finished: 0 pageFault: 0 arrow schema: _yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64 2024-11-21T17:49:06.232334Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2024-11-21T17:49:06.232350Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2024-11-21T17:49:06.232359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2024-11-21T17:49:06.232499Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:129;method=PutObject;id=[9437184:0:0:1:4:632:0]; 2024-11-21T17:49:06.243366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:06.243404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2024-11-21T17:49:06.243414Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;); 2024-11-21T17:49:06.243422Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:321:2338] finished for tablet 9437184 2024-11-21T17:49:06.243440Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:321:2338] send ScanData to [1:315:2332] txId: 1 scanId: 1 gen: 9437184 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:49:06.243511Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:321:2338] and sent to [1:315:2332] packs: 0 txId: 1 scanId: 1 gen: 9437184 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.002},{"events":["l_task_result"],"t":0.003},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.048}],"full":{"a":1732211346195251,"name":"_full_task","f":1732211346195251,"d_finished":0,"c":0,"l":1732211346243449,"d":48198},"events":[{"name":"bootstrap","f":1732211346195370,"d_finished":885,"c":1,"l":1732211346196255,"d":885},{"a":1732211346243354,"name":"ack","f":1732211346197699,"d_finished":706,"c":4,"l":1732211346232362,"d":801},{"a":1732211346243347,"name":"processing","f":1732211346196305,"d_finished":1738,"c":24,"l":1732211346232362,"d":1840},{"name":"ProduceResults","f":1732211346195745,"d_finished":1195,"c":30,"l":1732211346243418,"d":1195},{"a":1732211346243419,"name":"Finish","f":1732211346243419,"d_finished":0,"c":0,"l":1732211346243449,"d":30},{"name":"task_result","f":1732211346196308,"d_finished":968,"c":20,"l":1732211346198788,"d":968}],"id":"9437184::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;;) 2024-11-21T17:49:06.243530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:49:06.195107Z;index_granules=0;index_portions=4;index_batches=4;committed_batches=0;schema_columns=0;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=18544;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=18544;selected_rows=0; 2024-11-21T17:49:06.243536Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:49:06.243553Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;fline=context.h:72;profile=; 2024-11-21T17:49:06.243560Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:321:2338];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=;column_names=;);;program_input=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;; 2024-11-21T17:49:06.243637Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 9437184 2024-11-21T17:49:06.265131Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnExecute;tx_id=116;fline=tx_controller.cpp:360;event=start; 2024-11-21T17:49:06.265171Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnComplete;tx_id=116;fline=tx_controller.cpp:371;event=start;tx_info=116:TX_KIND_BACKUP;min=0;max=18446744073709551615;plan=0;src=[1:239:2257];cookie=0; 2024-11-21T17:49:06.265180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnComplete;tx_id=116;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:239:2257]; 2024-11-21T17:49:06.265187Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnComplete;tx_id=116;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=116; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: 2024-11-21T17:49:03.992018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:03.992520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:03.992554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00197e/r3tmp/tmpbujAze/pdisk_1.dat 2024-11-21T17:49:04.084214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.120040Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:04.162017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:04.162040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:04.172380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:04.289419Z node 1 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# BrandNewTable in dir# /Root 2024-11-21T17:49:04.391949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:611:2519], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.391982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.394459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.611804Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2024-11-21T17:49:04.612028Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2024-11-21T17:49:04.632922Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 1} TUpsertActor finished in 0.020851s, errors=0 2024-11-21T17:49:04.633056Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2024-11-21T17:49:04.633077Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2024-11-21T17:49:04.685530Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 3} TUpsertActor finished in 0.052394s, errors=0 2024-11-21T17:49:04.685559Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:718:2595] with tag# 3 2024-11-21T17:49:05.124315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:05.124349Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:49:05.124366Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00197e/r3tmp/tmpuzIkpm/pdisk_1.dat 2024-11-21T17:49:05.200189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:05.213925Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:05.255664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:05.255697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:05.266195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:05.369882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:05.574563Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2024-11-21T17:49:05.574598Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 2024-11-21T17:49:05.952932Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor finished in 0.378274s, errors=0 2024-11-21T17:49:05.952970Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:701:2585] with tag# 2 2024-11-21T17:49:05.954163Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 drops table# table in dir# /Root 2024-11-21T17:49:05.955748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:742:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:05.955773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.126700Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# table in dir# /Root 2024-11-21T17:49:06.128316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:808:2672], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.128355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.129773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:49:06.164960Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2024-11-21T17:49:06.343593Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2024-11-21T17:49:06.343659Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:739:2623], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2024-11-21T17:49:06.354169Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:739:2623], subTag: 1} TUpsertActor finished in 0.010461s, errors=0 2024-11-21T17:49:06.354266Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2024-11-21T17:49:06.354282Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:739:2623], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2024-11-21T17:49:06.406621Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:739:2623], subTag: 3} TUpsertActor finished in 0.052286s, errors=0 2024-11-21T17:49:06.406654Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:901:2746] with tag# 3 >> EvWrite::WriteWithSplit >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] >> EvWrite::WriteInTransaction >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |79.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> Normalizers::EmptyTablesNormalizer >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |79.4%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 304 176 28 48 32 24 16 24 56 |79.4%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |79.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> TFileStoreWithReboots::CreateWithIntermediateDirs [GOOD] >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] |79.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2024-11-21T17:49:04.218391Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:04.230706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:04.232485Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:04.232500Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:04.232542Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:04.233266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:04.233303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:04.233339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:04.233356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:04.233371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:04.233388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:04.233399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:04.233410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:04.233420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:04.233430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:04.233443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:04.233458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:04.236355Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:04.237436Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:04.237485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:04.237490Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:04.237514Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:04.237541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:04.237551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:04.237554Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:04.237559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:04.237565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:04.237570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:04.237572Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:04.237582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:04.237586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:04.237590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:04.237592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:04.237598Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:04.237602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:04.237607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:04.237609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:04.237617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:04.237621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:04.237623Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:04.237628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:04.237633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:04.237636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:04.237658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T17:49:04.237663Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T17:49:04.237669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T17:49:04.237675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T17:49:04.237688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:04.237693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:04.237695Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:04.237709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:04.237713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:04.237715Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:04.237723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:04.237727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:04.237730Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:04.237742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:04.237746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:04.237748Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:49:04.237755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... =0;TxId=103;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=31;merger=0;interval_id=26; 2024-11-21T17:49:06.908089Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T17:49:06.908099Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.908103Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=31;finished=1; 2024-11-21T17:49:06.908110Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:06.908158Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:06.908177Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.908181Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T17:49:06.908192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2024-11-21T17:49:06.908203Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2024-11-21T17:49:06.908245Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1024:2895] send ScanData to [1:1023:2894] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 1984 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string 2024-11-21T17:49:06.908270Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.908285Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.908293Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.908319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:06.908328Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.908335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:06.908340Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1024:2895] finished for tablet 9437184 2024-11-21T17:49:06.908348Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1024:2895] send ScanData to [1:1023:2894] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:49:06.908400Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:1024:2895] and sent to [1:1023:2894] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1732211346906631,"name":"_full_task","f":1732211346906631,"d_finished":0,"c":0,"l":1732211346908356,"d":1725},"events":[{"name":"bootstrap","f":1732211346906674,"d_finished":316,"c":1,"l":1732211346906990,"d":316},{"a":1732211346908317,"name":"ack","f":1732211346908153,"d_finished":143,"c":1,"l":1732211346908296,"d":182},{"a":1732211346908316,"name":"processing","f":1732211346907001,"d_finished":676,"c":10,"l":1732211346908296,"d":716},{"name":"ProduceResults","f":1732211346906853,"d_finished":328,"c":13,"l":1732211346908337,"d":328},{"a":1732211346908338,"name":"Finish","f":1732211346908338,"d_finished":0,"c":0,"l":1732211346908356,"d":18},{"name":"task_result","f":1732211346907003,"d_finished":516,"c":9,"l":1732211346908116,"d":516}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2024-11-21T17:49:06.908412Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:49:06.906538Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2024-11-21T17:49:06.908416Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:49:06.908426Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T17:49:06.908435Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1024:2895];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> EvWrite::WriteInTransaction [GOOD] >> TFileStoreWithReboots::CreateAlterChannels [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] >> EvWrite::WriteWithSplit [GOOD] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteInTransaction [GOOD] Test command err: 2024-11-21T17:49:06.891994Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:06.908780Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:06.911288Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:06.911309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:06.911359Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:06.912028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:06.912062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:06.912097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:06.912115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:06.912134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:06.912148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:06.912163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:06.912179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:06.912194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:06.912209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.912243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:06.912261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:06.916693Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:06.917743Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:06.917793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:06.917804Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:06.917825Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.917857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:06.917868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:06.917872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:06.917881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:06.917890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:06.917897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:06.917901Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:06.917916Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.917923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:06.917930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:06.917933Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:06.917941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:06.917946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:06.917953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:06.917957Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:06.917968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:06.917974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:06.917978Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:06.917988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:06.917994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:06.917998Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:06.918022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T17:49:06.918030Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T17:49:06.918038Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2024-11-21T17:49:06.918047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2024-11-21T17:49:06.918062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:06.918068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:06.918072Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:06.918094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:06.918100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.918104Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.918116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:06.918122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:06.918126Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:06.918143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:06.918149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:06.918153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:49:06.918183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... LUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.460697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T17:49:07.460700Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:07.460714Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T17:49:07.460719Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.460723Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T17:49:07.460725Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:07.460739Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T17:49:07.460741Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T17:49:07.460745Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=1; 2024-11-21T17:49:07.460749Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=2048;merger=0;interval_id=1; 2024-11-21T17:49:07.460754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T17:49:07.460759Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.460761Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=2048;finished=1; 2024-11-21T17:49:07.460765Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:07.460813Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:07.460825Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.460827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T17:49:07.460833Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=2048; 2024-11-21T17:49:07.460840Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=229376;num_rows=2048;batch_columns=key,field; 2024-11-21T17:49:07.460860Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:270:2288] send ScanData to [1:267:2285] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 229376 rows: 2048 page faults: 0 finished: 0 pageFault: 0 arrow schema: key: uint64 field: string 2024-11-21T17:49:07.460868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.460874Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.460878Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.460936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:07.460940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.460944Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.460947Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:270:2288] finished for tablet 9437184 2024-11-21T17:49:07.460956Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:270:2288] send ScanData to [1:267:2285] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:49:07.461001Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:270:2288] and sent to [1:267:2285] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":1732211347457959,"name":"_full_task","f":1732211347457959,"d_finished":0,"c":0,"l":1732211347460961,"d":3002},"events":[{"name":"bootstrap","f":1732211347458043,"d_finished":271,"c":1,"l":1732211347458314,"d":271},{"a":1732211347460934,"name":"ack","f":1732211347460809,"d_finished":71,"c":1,"l":1732211347460880,"d":98},{"a":1732211347460934,"name":"processing","f":1732211347458432,"d_finished":1814,"c":9,"l":1732211347460880,"d":1841},{"name":"ProduceResults","f":1732211347458208,"d_finished":183,"c":12,"l":1732211347460946,"d":183},{"a":1732211347460946,"name":"Finish","f":1732211347460946,"d_finished":0,"c":0,"l":1732211347460961,"d":15},{"name":"task_result","f":1732211347458435,"d_finished":1731,"c":8,"l":1732211347460767,"d":1731}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;) 2024-11-21T17:49:07.461011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:49:07.457911Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=19232;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=19232;selected_rows=0; 2024-11-21T17:49:07.461015Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:49:07.461021Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T17:49:07.461025Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:270:2288];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> Normalizers::EmptyTablesNormalizer [GOOD] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2024-11-21T17:49:06.886717Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:06.903667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:06.906824Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:06.906839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:06.906877Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:06.907504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:06.907533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:06.907564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:06.907582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:06.907598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:06.907614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:06.907630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:06.907646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:06.907663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:06.907679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.907701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:06.907728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:06.912516Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:06.913386Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:06.913439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:06.913449Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:06.913472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.913504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:06.913516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:06.913520Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:06.913528Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:06.913537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:06.913543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:06.913547Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:06.913563Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.913569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:06.913575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:06.913579Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:06.913588Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:06.913594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:06.913600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:06.913604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:06.913615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:06.913621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:06.913625Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:06.913635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:06.913640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:06.913644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:06.913670Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T17:49:06.913679Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2024-11-21T17:49:06.913686Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T17:49:06.913695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2024-11-21T17:49:06.913711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:06.913717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:06.913721Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:06.913743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:06.913749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.913753Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.913765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:06.913771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:06.913775Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:06.913793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:06.913798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:06.913802Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:49:06.913814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... ult=0;count=0;finished=0; 2024-11-21T17:49:07.469720Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:07.469729Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T17:49:07.469744Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:152;scan_step=name=ASSEMBLER::SPEC;duration=0.000000s;size=0;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2; 2024-11-21T17:49:07.469787Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:152;scan_step=name=ASSEMBLER::PK;duration=0.000000s;size=0;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3; 2024-11-21T17:49:07.469831Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:152;scan_step=name=SNAPSHOT;duration=0.000000s;size=0;details={};;scan_step_idx=4; 2024-11-21T17:49:07.469854Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:152;scan_step=name=ALLOCATE_MEMORY::Filter;duration=0.000000s;size=0;details={stage=Filter;};;scan_step_idx=5; 2024-11-21T17:49:07.469862Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:152;scan_step=name=ASSEMBLER::EF;duration=0.000000s;size=0;details={columns=(column_ids=9;column_names=saved_at;);;};;scan_step_idx=6; 2024-11-21T17:49:07.469879Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:152;scan_step=name=PROGRAM;duration=0.000000s;size=0;details={};;scan_step_idx=7; 2024-11-21T17:49:07.469940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T17:49:07.469945Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=fetching.cpp:15;event=apply; 2024-11-21T17:49:07.469950Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=interval.cpp:31;event=fetched;interval_idx=0; 2024-11-21T17:49:07.469955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=interval.cpp:15;event=start_construct_result;interval_idx=0;interval_id=2; 2024-11-21T17:49:07.469964Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2024-11-21T17:49:07.469975Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2024-11-21T17:49:07.469980Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T17:49:07.469984Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:07.469994Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T17:49:07.470002Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2024-11-21T17:49:07.470007Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T17:49:07.470011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:07.470020Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T17:49:07.470025Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T17:49:07.470030Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=2; 2024-11-21T17:49:07.470035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=2; 2024-11-21T17:49:07.470043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T17:49:07.470053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2024-11-21T17:49:07.470062Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2024-11-21T17:49:07.470095Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:07.470107Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2024-11-21T17:49:07.470115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2024-11-21T17:49:07.470120Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:281:2299] finished for tablet 9437184 2024-11-21T17:49:07.470130Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:281:2299] send ScanData to [1:280:2298] txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:49:07.470176Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:281:2299] and sent to [1:280:2298] packs: 0 txId: 100 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1732211347469029,"name":"_full_task","f":1732211347469029,"d_finished":0,"c":0,"l":1732211347470136,"d":1107},"events":[{"name":"bootstrap","f":1732211347469061,"d_finished":338,"c":1,"l":1732211347469399,"d":338},{"a":1732211347470092,"name":"ack","f":1732211347470092,"d_finished":0,"c":0,"l":1732211347470136,"d":44},{"a":1732211347470090,"name":"processing","f":1732211347469526,"d_finished":260,"c":8,"l":1732211347470077,"d":306},{"name":"ProduceResults","f":1732211347469244,"d_finished":185,"c":10,"l":1732211347470118,"d":185},{"a":1732211347470118,"name":"Finish","f":1732211347470118,"d_finished":0,"c":0,"l":1732211347470136,"d":18},{"name":"task_result","f":1732211347469528,"d_finished":243,"c":8,"l":1732211347470077,"d":243}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;) 2024-11-21T17:49:07.470190Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:49:07.468972Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2024-11-21T17:49:07.470195Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:49:07.470203Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T17:49:07.470212Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:281:2299];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: 2024-11-21T17:49:03.974873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:03.975253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:03.975275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019bf/r3tmp/tmpTK5F62/pdisk_1.dat 2024-11-21T17:49:04.084349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.119386Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:04.161222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:04.161265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:04.171684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:04.274776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:04.482057Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2024-11-21T17:49:04.482226Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2024-11-21T17:49:04.503259Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 1} TUpsertActor finished in 0.020996s, errors=0 2024-11-21T17:49:04.503350Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2024-11-21T17:49:04.503363Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2024-11-21T17:49:04.503633Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2024-11-21T17:49:04.503658Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} started fullscan actor# [1:713:2597] 2024-11-21T17:49:04.503665Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Bootstrap called, sample# 100 2024-11-21T17:49:04.503667Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Connect to# 72075186224037888 called 2024-11-21T17:49:04.503823Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Handle TEvClientConnected called, Status# OK 2024-11-21T17:49:04.503976Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} finished in 0.000138s, sampled# 100, iter finished# 1, oks# 100 2024-11-21T17:49:04.503992Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} received keyCount# 100 2024-11-21T17:49:04.504014Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} started# 10 actors each with inflight# 1 2024-11-21T17:49:04.504021Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 2} Bootstrap called 2024-11-21T17:49:04.504023Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T17:49:04.504029Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 3} Bootstrap called 2024-11-21T17:49:04.504032Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T17:49:04.504034Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 4} Bootstrap called 2024-11-21T17:49:04.504036Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T17:49:04.504039Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 5} Bootstrap called 2024-11-21T17:49:04.504041Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T17:49:04.504044Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 6} Bootstrap called 2024-11-21T17:49:04.504046Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T17:49:04.504048Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 7} Bootstrap called 2024-11-21T17:49:04.504052Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T17:49:04.504055Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 8} Bootstrap called 2024-11-21T17:49:04.504057Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T17:49:04.504060Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 9} Bootstrap called 2024-11-21T17:49:04.504062Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T17:49:04.504065Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 10} Bootstrap called 2024-11-21T17:49:04.504067Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T17:49:04.504070Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 11} Bootstrap called 2024-11-21T17:49:04.504072Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T17:49:04.504501Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 2} session: ydb://session/3?node_id=1&id=ZDNjODBlNDgtM2MzYTNmNmYtZTUyODJlYTgtZTA5YTVhOWQ= 2024-11-21T17:49:04.504573Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 3} session: ydb://session/3?node_id=1&id=OTVlMTJhYTUtZGE2MzIzMzUtYmMyOTU3MjItMTQ0YjU5ZTQ= 2024-11-21T17:49:04.504720Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 4} session: ydb://session/3?node_id=1&id=YzRlODMyNTQtYTc2MTYyYjMtMjczOGI5YTctNGNlODU3Njc= 2024-11-21T17:49:04.504828Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 5} session: ydb://session/3?node_id=1&id=NTM5OGU4M2EtNTc5ODRkYy0zYTQ3ZjE0Ny1kZjQzMWQyMg== 2024-11-21T17:49:04.504932Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 6} session: ydb://session/3?node_id=1&id=NDFhZjA0NDItNWQ1OTk2NjEtNTY1ZDFmYmYtOTg1YjU4YmI= 2024-11-21T17:49:04.505050Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 7} session: ydb://session/3?node_id=1&id=NWMyNzkwMmEtMzZkYzMwYTQtNjIwZmNkOGItZjlkMzdmY2Y= 2024-11-21T17:49:04.505299Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 8} session: ydb://session/3?node_id=1&id=MmI2ZDY0YTMtYjljYjZkMDUtYjc5ZjNkNzgtMWFkYWUwMGM= 2024-11-21T17:49:04.505310Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 9} session: ydb://session/3?node_id=1&id=NmVkZGQyNi1hZWY3N2QxZS1lNjFhMTVhZS00MDQ3NWZlNQ== 2024-11-21T17:49:04.505501Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 10} session: ydb://session/3?node_id=1&id=ODM4MDgxNWItOTg1OWViNmUtNzI0ZjlmNDctOGY2ZTE3ZGE= 2024-11-21T17:49:04.505509Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 11} session: ydb://session/3?node_id=1&id=ODllMjgxODYtMzYwZjk5ODktZGI4YWFjYTgtYzQxMzdiZjM= 2024-11-21T17:49:04.506150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:738:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.506172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:774:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.506181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:775:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.506185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:776:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.506189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:777:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.506193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.506197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.506201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:04.506214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2661], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Erro ... _id=2&id=ZTg4MDhiNTItM2NmYTNiNjYtMTMyMDQ5ODYtODI1NTVl 2024-11-21T17:49:06.386517Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 3} session: ydb://session/3?node_id=2&id=ZjA2OTdlMGMtYWRiODIyYTktMTcyNzNjYjUtMjgyNjIzY2E= 2024-11-21T17:49:06.386757Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 4} session: ydb://session/3?node_id=2&id=NjI0MTI5NWEtYTA5NzMwNjEtNmY3ZWVmYzEtODViNGM2NmU= 2024-11-21T17:49:06.386766Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 5} session: ydb://session/3?node_id=2&id=NDVlYjk4NTAtZTc0NDIyZTYtMWI4MjRiNzgtN2U1ODUxODk= 2024-11-21T17:49:06.386953Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 6} session: ydb://session/3?node_id=2&id=YzY1MGQyOGMtZjgwZjdkMDgtZjZkYmQ5ODAtNzliNzNlMjA= 2024-11-21T17:49:06.386960Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 7} session: ydb://session/3?node_id=2&id=ZmU2NTlhMDMtMzU3YTVhMTctYjExYThlMi00ZDA1ZTJm 2024-11-21T17:49:06.387105Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 8} session: ydb://session/3?node_id=2&id=NGE3MTM5OGEtNjJlMTU3YmUtYmVlN2Y3Mi1iMzNhMWY1NA== 2024-11-21T17:49:06.387290Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 9} session: ydb://session/3?node_id=2&id=ODU4OWMzYjItODM3MGRlNTMtM2M2Y2NlNS05MWM1Zjc5Mg== 2024-11-21T17:49:06.387544Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 10} session: ydb://session/3?node_id=2&id=MWEwYjZlYTItZGQyYWU1YmYtY2MyZmExZWQtNTQxODk4Yzc= 2024-11-21T17:49:06.387560Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 11} session: ydb://session/3?node_id=2&id=YjIzNjZhZjktMjJjMTY5MzQtOTRkMmVlYmQtZjg5MGM2Zjk= 2024-11-21T17:49:06.388118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:736:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.388130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:763:2641], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.388136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:764:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.388140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:765:2643], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.388144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:766:2644], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.388149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:770:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.388153Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:772:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.388196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:774:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.388343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2668], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.388422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:795:2673], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.388440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.388511Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:804:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:06.389280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:06.557735Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:784:2662], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:06.557768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:785:2663], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:06.557776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:786:2664], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:06.557783Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:787:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:06.557790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:788:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:06.557797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:789:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:06.557807Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:794:2672], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:06.557815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:803:2681], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:06.557823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:806:2684], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:06.557831Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:822:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:06.639935Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 7} finished in 0.252960s, errors=0 2024-11-21T17:49:06.639990Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 7 { Tag: 7 DurationMs: 252 OperationsOK: 100 OperationsError: 0 } 2024-11-21T17:49:06.701123Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 5} finished in 0.314342s, errors=0 2024-11-21T17:49:06.701200Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 5 { Tag: 5 DurationMs: 314 OperationsOK: 100 OperationsError: 0 } 2024-11-21T17:49:06.768132Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 8} finished in 0.381008s, errors=0 2024-11-21T17:49:06.768204Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 8 { Tag: 8 DurationMs: 381 OperationsOK: 100 OperationsError: 0 } 2024-11-21T17:49:06.838271Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 9} finished in 0.450964s, errors=0 2024-11-21T17:49:06.838327Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 9 { Tag: 9 DurationMs: 450 OperationsOK: 100 OperationsError: 0 } 2024-11-21T17:49:06.921664Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 6} finished in 0.534687s, errors=0 2024-11-21T17:49:06.921742Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 6 { Tag: 6 DurationMs: 534 OperationsOK: 100 OperationsError: 0 } 2024-11-21T17:49:07.009125Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 10} finished in 0.621562s, errors=0 2024-11-21T17:49:07.009188Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 10 { Tag: 10 DurationMs: 621 OperationsOK: 100 OperationsError: 0 } 2024-11-21T17:49:07.101704Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 2} finished in 0.715205s, errors=0 2024-11-21T17:49:07.101786Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 2 { Tag: 2 DurationMs: 715 OperationsOK: 100 OperationsError: 0 } 2024-11-21T17:49:07.206508Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 11} finished in 0.818934s, errors=0 2024-11-21T17:49:07.206578Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 11 { Tag: 11 DurationMs: 818 OperationsOK: 100 OperationsError: 0 } 2024-11-21T17:49:07.314072Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 3} finished in 0.927539s, errors=0 2024-11-21T17:49:07.314143Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 3 { Tag: 3 DurationMs: 927 OperationsOK: 100 OperationsError: 0 } 2024-11-21T17:49:07.436990Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 4} finished in 1.050217s, errors=0 2024-11-21T17:49:07.437089Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 4 { Tag: 4 DurationMs: 1050 OperationsOK: 100 OperationsError: 0 } 2024-11-21T17:49:07.437095Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished in 1.051185s, oks# 1000, errors# 0 2024-11-21T17:49:07.437135Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:709:2593] with tag# 3 |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |79.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateAlterChannels [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:50.273152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:50.273177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:50.273182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:50.273187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:50.273193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:50.273197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:50.273207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:50.273287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:50.280481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:50.280498Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:50.282620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:50.282708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:50.282736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:50.285147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:50.285208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:50.285318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:50.285492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:50.286082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:50.286338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:50.286349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:50.286361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:50.286368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:50.286374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:50.286412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:50.287681Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:50.304579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:50.304668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.304740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:50.304787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:50.304794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.305649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:50.305674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:50.305732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.305743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:50.305747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:50.305752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:50.306077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.306086Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:50.306090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:50.306360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.306369Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.306374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:50.306381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:50.306884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:50.307292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:50.307338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:50.307494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:50.307517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:50.307521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:50.307572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:50.307577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:50.307600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:50.307610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:50.307881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:50.307888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:50.307920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:50.307923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:50.307984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.307988Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:50.307996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:50.307999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:50.308002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:50.308005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:50.308008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:50.308010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:50.308017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:50.308021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:50.308024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 1 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-2" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-2" IOPS: 0 Throughput: 0 Size: 0 } TabletID: 72075186233409546 AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:49:07.498269Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T17:49:07.498272Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 1002, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T17:49:07.498284Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T17:49:07.498291Z node 72 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:49:07.498297Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T17:49:07.498313Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 2 -> 3 2024-11-21T17:49:07.498374Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:49:07.498646Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:49:07.498664Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:49:07.498668Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:07.498889Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2024-11-21T17:49:07.498905Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72075186233409546 2024-11-21T17:49:07.498927Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:07.498941Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxId: 1002 Origin: 72075186233409546 Status: OK 2024-11-21T17:49:07.498945Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#1002:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T17:49:07.498949Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 3 -> 128 2024-11-21T17:49:07.499231Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:49:07.499247Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:49:07.499250Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:07.499255Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T17:49:07.499275Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:07.499583Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T17:49:07.499599Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000004 2024-11-21T17:49:07.499644Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:07.499656Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 309237647465 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:07.499661Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#1002:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T17:49:07.499683Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:49:07.499685Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:49:07.499691Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:07.499696Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T17:49:07.499700Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:49:07.499703Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:49:07.499705Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:49:07.499719Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:49:07.499723Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 1, subscribers: 0 2024-11-21T17:49:07.499727Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:49:07.500039Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:07.500046Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:49:07.500064Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:07.500067Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T17:49:07.500128Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:49:07.500134Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:49:07.500136Z node 72 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:49:07.500139Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:49:07.500141Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:07.500151Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2024-11-21T17:49:07.500586Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2024-11-21T17:49:07.500636Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T17:49:07.500642Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T17:49:07.500687Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T17:49:07.500698Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:49:07.500701Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:397:2378] TestWaitNotification: OK eventTxId 1002 2024-11-21T17:49:07.500750Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:07.500769Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_2" took 26us result status StatusSuccess 2024-11-21T17:49:07.500843Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/FS_2" PathDescription { Self { Name: "FS_2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1001 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } FileStoreDescription { Name: "FS_2" PathId: 3 IndexTabletId: 72075186233409546 Config { Version: 2 FolderId: "bar" CloudId: "baz" BlockSize: 4096 ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-2" } ExplicitChannelProfiles { PoolKind: "pool-kind-2" } } Version: 2 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:52.608145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:52.608162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:52.608167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:52.608170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:52.608175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:52.608178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:52.608186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:52.608264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:52.615398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:52.615412Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:52.616904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:52.616961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:52.616982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:52.618922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:52.618975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:52.619042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:52.619191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:52.619683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:52.619849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:52.619855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:52.619863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:52.619867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:52.619870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:52.619895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:52.620814Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:52.629918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:52.629975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:52.630015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:52.630046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:52.630051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:52.630562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:52.630576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:52.630612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:52.630618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:52.630621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:52.630624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:52.630857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:52.630863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:52.630866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:52.631058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:52.631063Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:52.631067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:52.631071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:52.631455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:52.631746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:52.631785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:52.631898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:52.631912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:52.631916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:52.631952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:52.631956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:52.631973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:52.631981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:52.632219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:52.632223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:52.632265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:52.632270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:52.632328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:52.632332Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:52.632339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:52.632343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:52.632348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:52.632353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:52.632357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:52.632360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:52.632368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:52.632373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:52.632377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... txid 1003:1 2024-11-21T17:49:07.255244Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:49:07.255246Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T17:49:07.255247Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T17:49:07.255249Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:49:07.255251Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T17:49:07.255253Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T17:49:07.255260Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T17:49:07.255262Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 5, subscribers: 0 2024-11-21T17:49:07.255264Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:49:07.255266Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T17:49:07.255268Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T17:49:07.255269Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2024-11-21T17:49:07.255271Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 6], 2 2024-11-21T17:49:07.255534Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.255542Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.255545Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:07.255548Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:49:07.255550Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:07.255773Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.255780Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.255782Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:07.255786Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:49:07.255789Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:07.255835Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.255840Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.255841Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:07.255843Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T17:49:07.255845Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:49:07.255878Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.255883Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.255885Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:07.255886Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T17:49:07.255888Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:49:07.255920Z node 61 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.255925Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.255926Z node 61 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:07.255928Z node 61 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 2 2024-11-21T17:49:07.255930Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:49:07.255934Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T17:49:07.256203Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.256515Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.256525Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.256531Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:07.256537Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:49:07.256576Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:49:07.256581Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:49:07.256665Z node 61 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:49:07.256681Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:49:07.256685Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [61:388:2369] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:49:07.256741Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:07.256766Z node 61 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 33us result status StatusSuccess 2024-11-21T17:49:07.256833Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } FileStoreDescription { Name: "z" PathId: 6 IndexTabletId: 72075186233409546 Config { Version: 1 FileSystemId: "Valid/x/y/z" FolderId: "folder" CloudId: "cloud" BlockSize: 4096 BlocksCount: 4096 ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-2" } } Version: 1 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:07.256875Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:07.256891Z node 61 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 17us result status StatusPathDoesNotExist 2024-11-21T17:49:07.256906Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2024-11-21T17:49:07.037560Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:07.050326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:07.052073Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:07.052083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:07.052119Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:07.052588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=PortionsCleaner; 2024-11-21T17:49:07.052612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:07.052629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:07.052645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:07.052655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:07.052665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:07.052676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:07.052685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:07.052696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:07.052706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:07.052719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:07.052730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:07.052745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:07.055918Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:07.056841Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:07.056889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=PortionsCleaner; 2024-11-21T17:49:07.056895Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2024-11-21T17:49:07.056932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2024-11-21T17:49:07.056938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T17:49:07.056945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2024-11-21T17:49:07.056953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T17:49:07.056987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=PortionsCleaner;id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:07.056998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2024-11-21T17:49:07.057001Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2024-11-21T17:49:07.057013Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:07.057021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:07.057025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:07.057027Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2024-11-21T17:49:07.057033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:07.057038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:07.057042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:07.057044Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:07.057052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:07.057057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:07.057062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:07.057064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:07.057071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:07.057075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:07.057080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:07.057082Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:07.057090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:07.057095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:07.057097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:07.057102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:07.057107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:07.057109Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:07.057117Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=1; 2024-11-21T17:49:07.057121Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=1; 2024-11-21T17:49:07.057124Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=1; 2024-11-21T17:49:07.057127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=1; 2024-11-21T17:49:07.057141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:07.057145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:07.057148Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:07.057161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:07.057165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switch ... rk;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T17:49:07.739399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:278:2291];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T17:49:07.739405Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T17:49:07.739416Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=202797666304;kff=0.3; 2024-11-21T17:49:07.739420Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:07.739427Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:07.739434Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:49:07.739440Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:07.739448Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:07.739452Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:07.739457Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:07.739462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:49:07.739467Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:07.739519Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:07.739579Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T17:49:07.739589Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T17:49:07.739591Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:49:07.739593Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T17:49:07.739596Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:07.739599Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:07.739602Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T17:49:07.739605Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:07.739608Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:07.739610Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:07.739613Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:07.739616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:440;event=StartTtl;skip=not_ready_tiers; 2024-11-21T17:49:07.739619Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:07.782109Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {11:111} readable: {11:max} at tablet 9437184 2024-11-21T17:49:07.782176Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2024-11-21T17:49:07.782249Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "key1" } Columns { Name: "key2" } Columns { Name: "field" } } } ; 2024-11-21T17:49:07.782272Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[key1;key2;field;];};]; 2024-11-21T17:49:07.782413Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:331:2336];trace_detailed=; 2024-11-21T17:49:07.782537Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2024-11-21T17:49:07.782561Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2024-11-21T17:49:07.782596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:07.782606Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:07.782643Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:07.782652Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:07.782658Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T17:49:07.782663Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:331:2336] finished for tablet 9437184 2024-11-21T17:49:07.782679Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:331:2336] send ScanData to [1:329:2335] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:49:07.782741Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:331:2336] and sent to [1:329:2335] packs: 0 txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1732211347782386,"name":"_full_task","f":1732211347782386,"d_finished":0,"c":0,"l":1732211347782688,"d":302},"events":[{"name":"bootstrap","f":1732211347782440,"d_finished":172,"c":1,"l":1732211347782612,"d":172},{"a":1732211347782638,"name":"ack","f":1732211347782638,"d_finished":0,"c":0,"l":1732211347782688,"d":50},{"a":1732211347782635,"name":"processing","f":1732211347782635,"d_finished":0,"c":0,"l":1732211347782688,"d":53},{"name":"ProduceResults","f":1732211347782584,"d_finished":41,"c":2,"l":1732211347782661,"d":41},{"a":1732211347782661,"name":"Finish","f":1732211347782661,"d_finished":0,"c":0,"l":1732211347782688,"d":27}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;) 2024-11-21T17:49:07.782761Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:49:07.782289Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T17:49:07.782767Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:49:07.782775Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T17:49:07.782782Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:331:2336];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithSplit [GOOD] Test command err: 2024-11-21T17:49:06.865491Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:06.882585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:06.884812Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:06.884832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:06.884865Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:06.885333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:06.885359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:06.885380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:06.885391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:06.885404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:06.885414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:06.885423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:06.885434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:06.885444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:06.885453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.885466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:06.885475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:06.889178Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:06.889980Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:06.890025Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:06.890033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:06.890055Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.890083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:06.890093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:06.890096Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:06.890102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:06.890108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:06.890113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:06.890115Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:06.890126Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.890131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:06.890135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:06.890138Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:06.890143Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:06.890147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:06.890152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:06.890154Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:06.890162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:06.890166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:06.890168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:06.890175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:06.890179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:06.890182Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:06.890202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T17:49:06.890208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T17:49:06.890213Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2024-11-21T17:49:06.890220Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T17:49:06.890232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:06.890237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:06.890240Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:06.890254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:06.890259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.890262Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.890269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:06.890274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:06.890276Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:06.890288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:06.890292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:06.890295Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:49:06.890302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... BUG: Scan [1:272:2287] send ScanData to [1:264:2279] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 8400896 rows: 1024 page faults: 0 finished: 0 pageFault: 0 arrow schema: key: uint64 field: string 2024-11-21T17:49:07.611308Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.611333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.611338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T17:49:07.611342Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:07.612569Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:07.612590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.612594Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T17:49:07.612600Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:219;stage=no data is ready yet;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.612626Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T17:49:07.612634Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T17:49:07.612639Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=1; 2024-11-21T17:49:07.612643Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=1024;merger=0;interval_id=1; 2024-11-21T17:49:07.612648Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T17:49:07.612653Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.612656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=1024;finished=1; 2024-11-21T17:49:07.612662Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=1024; 2024-11-21T17:49:07.612669Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=8400896;num_rows=1024;batch_columns=key,field; 2024-11-21T17:49:07.612701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:272:2287] send ScanData to [1:264:2279] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 8400896 rows: 1024 page faults: 0 finished: 0 pageFault: 0 arrow schema: key: uint64 field: string 2024-11-21T17:49:07.612708Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.612714Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.612718Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.613890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:07.613908Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.613914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T17:49:07.613919Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:272:2287] finished for tablet 9437184 2024-11-21T17:49:07.613931Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:272:2287] send ScanData to [1:264:2279] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:49:07.613982Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:272:2287] and sent to [1:264:2279] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack"],"t":0.04},{"events":["l_task_result"],"t":0.046},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.047}],"full":{"a":1732211347566284,"name":"_full_task","f":1732211347566284,"d_finished":0,"c":0,"l":1732211347613935,"d":47651},"events":[{"name":"bootstrap","f":1732211347566428,"d_finished":397,"c":1,"l":1732211347566825,"d":397},{"a":1732211347613883,"name":"ack","f":1732211347607064,"d_finished":4324,"c":2,"l":1732211347612604,"d":4376},{"a":1732211347613879,"name":"processing","f":1732211347567067,"d_finished":24366,"c":11,"l":1732211347612723,"d":24422},{"name":"ProduceResults","f":1732211347566681,"d_finished":4510,"c":15,"l":1732211347613916,"d":4510},{"a":1732211347613916,"name":"Finish","f":1732211347613916,"d_finished":0,"c":0,"l":1732211347613935,"d":19},{"name":"task_result","f":1732211347567069,"d_finished":20016,"c":9,"l":1732211347612723,"d":20016}],"id":"9437184::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;) 2024-11-21T17:49:07.613992Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:49:07.566142Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14766240;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14766240;selected_rows=0; 2024-11-21T17:49:07.613997Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:49:07.614011Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.020153s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.019886s;size=0.014766232;details={columns=1,2,4294967040,4294967041,4294967042;};};]};; 2024-11-21T17:49:07.614015Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> TFileStoreWithReboots::CreateAlterNoVersion [GOOD] >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateAlterNoVersion [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:50.912780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:50.912807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:50.912812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:50.912817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:50.912823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:50.912827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:50.912837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:50.912913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:50.921561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:50.921583Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:50.923385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:50.923464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:50.923487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:50.925621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:50.925667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:50.925737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:50.925905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:50.926348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:50.926531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:50.926537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:50.926545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:50.926549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:50.926554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:50.926582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:50.927600Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:50.939005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:50.939087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.939141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:50.939176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:50.939182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.939875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:50.939893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:50.939937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.939944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:50.939947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:50.939951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:50.940326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.940338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:50.940343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:50.940745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.940755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.940761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:50.940768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:50.941388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:50.941787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:50.941827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:50.942024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:50.942060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:50.942069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:50.942121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:50.942127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:50.942154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:50.942163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:50.942510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:50.942516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:50.942551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:50.942555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:50.942622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:50.942627Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:50.942636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:50.942639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:50.942643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:50.942646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:50.942649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:50.942652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:50.942659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:50.942663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:50.942666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... AT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72057594037968897 2024-11-21T17:49:08.139922Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 1002, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T17:49:08.139984Z node 72 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 1 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-2" IOPS: 0 Throughput: 0 Size: 0 } TabletID: 72075186233409546 AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:49:08.140164Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T17:49:08.140170Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 1002, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T17:49:08.140183Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T17:49:08.140188Z node 72 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:49:08.140192Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T17:49:08.140204Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 2 -> 3 2024-11-21T17:49:08.140291Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:49:08.140630Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.140659Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.140664Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:08.140951Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2024-11-21T17:49:08.140967Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72075186233409546 2024-11-21T17:49:08.140992Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:08.141008Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxId: 1002 Origin: 72075186233409546 Status: OK 2024-11-21T17:49:08.141012Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#1002:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T17:49:08.141016Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 3 -> 128 2024-11-21T17:49:08.141266Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.141282Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.141287Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:08.141303Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T17:49:08.141320Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:08.141564Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T17:49:08.141578Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000004 2024-11-21T17:49:08.141614Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:08.141625Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 309237647465 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:08.141629Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#1002:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T17:49:08.141645Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:49:08.141648Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:49:08.141654Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:08.141658Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T17:49:08.141662Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:49:08.141665Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:49:08.141667Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:49:08.141678Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:49:08.141694Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 1, subscribers: 0 2024-11-21T17:49:08.141696Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:49:08.142060Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:08.142071Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:49:08.142091Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:08.142095Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T17:49:08.142169Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:49:08.142183Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:49:08.142187Z node 72 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:49:08.142191Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:49:08.142195Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:08.142206Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2024-11-21T17:49:08.142545Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2024-11-21T17:49:08.142592Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T17:49:08.142598Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T17:49:08.142649Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T17:49:08.142663Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:49:08.142667Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:397:2378] TestWaitNotification: OK eventTxId 1002 2024-11-21T17:49:08.142714Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:08.142733Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_2" took 24us result status StatusSuccess 2024-11-21T17:49:08.142775Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/FS_2" PathDescription { Self { Name: "FS_2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1001 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } FileStoreDescription { Name: "FS_2" PathId: 3 IndexTabletId: 72075186233409546 Config { Version: 2 FolderId: "bar" CloudId: "baz" BlockSize: 4096 } Version: 2 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |79.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2024-11-21T17:49:06.850302Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:06.864913Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:06.867404Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:06.867417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:06.867451Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:06.867934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:06.867958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:06.867981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:06.867993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:06.868004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:06.868013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:06.868023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:06.868034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:06.868044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:06.868053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.868065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:06.868075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:06.871868Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:06.872738Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:06.872789Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:06.872799Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:06.872822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.872850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:06.872858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:06.872861Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:06.872867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:06.872873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:06.872877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:06.872880Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:06.872890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.872895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:06.872900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:06.872902Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:06.872908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:06.872912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:06.872917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:06.872919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:06.872926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:06.872931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:06.872933Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:06.872939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:06.872943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:06.872946Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:06.872966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T17:49:06.872972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2024-11-21T17:49:06.872977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T17:49:06.872983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T17:49:06.872996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:06.873001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:06.873003Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:06.873024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:06.873028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.873031Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.873038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:06.873043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:06.873045Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:06.873057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:06.873061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:06.873064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:49:06.873071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... 43];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=31;merger=0;interval_id=25; 2024-11-21T17:49:08.370866Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T17:49:08.370872Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:08.370874Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=31;finished=1; 2024-11-21T17:49:08.370877Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T17:49:08.370911Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:08.370931Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:08.370936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T17:49:08.370946Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2024-11-21T17:49:08.370954Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2024-11-21T17:49:08.370972Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:428:2443] send ScanData to [1:427:2442] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 1984 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string 2024-11-21T17:49:08.370980Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:08.370987Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:08.370994Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:08.371010Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T17:49:08.371016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:08.371021Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T17:49:08.371024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:428:2443] finished for tablet 9437184 2024-11-21T17:49:08.371029Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:428:2443] send ScanData to [1:427:2442] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T17:49:08.371065Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:428:2443] and sent to [1:427:2442] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1732211348369607,"name":"_full_task","f":1732211348369607,"d_finished":0,"c":0,"l":1732211348371033,"d":1426},"events":[{"name":"bootstrap","f":1732211348369631,"d_finished":243,"c":1,"l":1732211348369874,"d":243},{"a":1732211348371009,"name":"ack","f":1732211348370907,"d_finished":89,"c":1,"l":1732211348370996,"d":113},{"a":1732211348371008,"name":"processing","f":1732211348369976,"d_finished":514,"c":10,"l":1732211348370996,"d":539},{"name":"ProduceResults","f":1732211348369770,"d_finished":255,"c":13,"l":1732211348371023,"d":255},{"a":1732211348371023,"name":"Finish","f":1732211348371023,"d_finished":0,"c":0,"l":1732211348371033,"d":10},{"name":"task_result","f":1732211348369979,"d_finished":412,"c":9,"l":1732211348370883,"d":412}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2024-11-21T17:49:08.371087Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T17:49:08.369549Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2024-11-21T17:49:08.371090Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T17:49:08.371096Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T17:49:08.371101Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] >> YdbTableSplit::RenameTablesAndSplit >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] >> TTablesWithReboots::ChainedCopyTableAndDropWithReboots [GOOD] >> YdbTableSplit::SplitByLoadWithDeletes >> YdbTableSplit::SplitByLoadWithUpdates >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |79.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |79.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> TFileStoreWithReboots::CreateDrop [GOOD] >> YdbTableSplit::SplitByLoadWithReads >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated >> YdbTableSplit::MergeByNoLoadAfterSplit |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:49.267705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:49.267722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:49.267725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:49.267728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:49.267732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:49.267734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:49.267740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:49.267791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:49.274620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:49.274637Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:49.276443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:49.276514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:49.276533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:49.278784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:49.278849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:49.278916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:49.279129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:49.279719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:49.279958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:49.279967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:49.279977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:49.279982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:49.279986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:49.280024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:49.281012Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:49.291157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:49.291223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.291267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:49.291301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:49.291306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.291920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:49.291936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:49.291975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.291982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:49.291985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:49.291988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:49.292285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.292296Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:49.292301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:49.292585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.292590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.292594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:49.292599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:49.292965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:49.293266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:49.293296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:49.293420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:49.293434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:49.293438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:49.293476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:49.293480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:49.293499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:49.293507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:49.293820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:49.293825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:49.293850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:49.293853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:49.293910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:49.293914Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:49.293924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:49.293927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:49.293931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:49.293934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:49.293936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:49.293939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:49.293944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:49.293948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:49.293950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... chemeshard: 72057594046678944 2024-11-21T17:49:09.511756Z node 84 :FLAT_TX_SCHEMESHARD INFO: TDropFileStore::TPropose, operationId: 1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:09.511763Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T17:49:09.511792Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:09.511935Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:49:09.511970Z node 84 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:49:09.512206Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.512269Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:09.512320Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:49:09.513696Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T17:49:09.513728Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000004 2024-11-21T17:49:09.513830Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.513849Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 360777255017 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:09.513855Z node 84 :FLAT_TX_SCHEMESHARD INFO: TDropFileStore::TPropose, operationId: 1002:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T17:49:09.513873Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:09.513897Z node 84 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:49:09.513901Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:49:09.513912Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:49:09.513922Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:09.513928Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T17:49:09.513935Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:49:09.513940Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:49:09.513943Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:49:09.513967Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:09.513972Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 2, subscribers: 0 2024-11-21T17:49:09.513977Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2024-11-21T17:49:09.513980Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:49:09.514123Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:49:09.514131Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:49:09.514474Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:09.514498Z node 84 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:09.514502Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:49:09.514526Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:49:09.514545Z node 84 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:09.514548Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [84:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2024-11-21T17:49:09.514552Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [84:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T17:49:09.514656Z node 84 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:49:09.514666Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:49:09.514670Z node 84 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:49:09.514675Z node 84 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:49:09.514679Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:09.514720Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:09.514724Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:49:09.514733Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:49:09.514774Z node 84 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:49:09.514782Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:49:09.514785Z node 84 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:49:09.514789Z node 84 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T17:49:09.514792Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:49:09.514799Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2024-11-21T17:49:09.514852Z node 84 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T17:49:09.514892Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.515518Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:49:09.515550Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:49:09.515764Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:49:09.515777Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2024-11-21T17:49:09.515835Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T17:49:09.515840Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T17:49:09.515883Z node 84 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T17:49:09.515897Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:49:09.515900Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [84:403:2384] TestWaitNotification: OK eventTxId 1002 2024-11-21T17:49:09.515946Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:09.515969Z node 84 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_3" took 32us result status StatusPathDoesNotExist 2024-11-21T17:49:09.515996Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/FS_3\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/FS_3" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TFileStoreWithReboots::CreateWithIntermediateDirsForceDrop [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit [GOOD] Test command err: 2024-11-21T17:47:18.191175Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790619343550502:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:18.193445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:47:18.224460Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013d0/r3tmp/tmpRuvOF7/pdisk_1.dat 2024-11-21T17:47:18.271285Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8395, node 1 2024-11-21T17:47:18.298608Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/0013d0/r3tmp/yandexjhJeXd.tmp 2024-11-21T17:47:18.298619Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/0013d0/r3tmp/yandexjhJeXd.tmp 2024-11-21T17:47:18.298669Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/0013d0/r3tmp/yandexjhJeXd.tmp 2024-11-21T17:47:18.298698Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:47:18.304117Z INFO: TTestServer started on Port 14857 GrpcPort 8395 TClient is connected to server localhost:14857 2024-11-21T17:47:18.336247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:18.336295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:18.338197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:8395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:18.392351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:18.401935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:47:18.467068Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.487023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790619343551258:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.487084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790619343551248:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.487095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.487649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:18.487930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790619343551287:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.487964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.489304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790619343551262:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:47:18.580934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.586642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.588347Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790619343551396:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:18.589484Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDg5MzA0OTQtZWVjMDU2ZTYtN2E4Y2QwNTgtZGM0ZWMyMzE=, ActorId: [1:7439790619343551245:2304], ActorState: ExecuteState, TraceId: 01jd7xbsjp9mx2ct68zav43r33, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:18.589946Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:47:18.602131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439790619343551604:2596] 2024-11-21T17:47:23.191091Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790619343550502:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:23.191144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:23.729153Z :WriteToTopic_Demo_1 INFO: TTopicSdkTestSetup started 2024-11-21T17:47:23.757487Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:47:23.783812Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439790640818388409:2781] connected; active server actors: 1 2024-11-21T17:47:23.783876Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T17:47:23.783990Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T17:47:23.784581Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T17:47:23.784658Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T17:47:23.786367Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:47:23.786970Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T17:47:23.787116Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:47:23.787139Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T17:47:23.787143Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:47:23.787144Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T17:47:23.787147Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:47:23.787151Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:47:23.787209Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T17:47:23.787609Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T17:47:23.787617Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T17:47:23.787623Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.787628Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790640818388442:2428], now have 1 active actors on pipe 2024-11-21T17:47:23.832701Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.832719Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790640818388408:2780], now have 1 active actors on pipe 2024-11-21T17:47:23.835047Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439790619343550872 RawX2: 4294969475 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelPr ... 7} (19-19) 2024-11-21T17:49:08.564997Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 8} (20-20) 2024-11-21T17:49:08.565001Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 9} (21-21) 2024-11-21T17:49:08.565008Z :DEBUG: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] [] The application data is transferred to the client. Number of messages 6, size 14400000 bytes 2024-11-21T17:49:08.565011Z :DEBUG: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] [] The application data is transferred to the client. Number of messages 10, size 1000000 bytes 2024-11-21T17:49:08.565015Z :DEBUG: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] [] Returning serverBytesSize = 0 to budget 2024-11-21T17:49:08.565026Z :DEBUG: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] [] Returning serverBytesSize = 0 to budget 0 16 2024-11-21T17:49:08.565050Z :DEBUG: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] [] Commit offsets [6, 22). Partition stream id: 1 2024-11-21T17:49:08.564920Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1000678 } } 2024-11-21T17:49:08.564975Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 got read request: guid# 4f6b7862-bfe1ce5f-8ca96a81-830e52f7 2024-11-21T17:49:08.565275Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 6 end: 22 } } } } 2024-11-21T17:49:08.565391Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 22 prev 0 end 22 by cookie 3 2024-11-21T17:49:08.565581Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T17:49:08.565593Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T17:49:08.568372Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:49:08.568410Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 22 (startOffset 0) session test-consumer_10_1_16395533911146657454_v1 2024-11-21T17:49:08.568450Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 2 2024-11-21T17:49:08.568465Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:49:08.568501Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2024-11-21T17:49:08.568513Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 6 endOffset 22 with cookie 2 2024-11-21T17:49:08.568659Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 6 2024-11-21T17:49:08.568964Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 22 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:49:08.568979Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:49:08.568991Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 3 2024-11-21T17:49:08.569013Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2024-11-21T17:49:08.569022Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 22 endOffset 22 with cookie 3 2024-11-21T17:49:08.569029Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 22 2024-11-21T17:49:08.569366Z :DEBUG: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 6 } } 2024-11-21T17:49:08.569469Z :DEBUG: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 22 } } 2024-11-21T17:49:09.530349Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:21:22 2024-11-21T17:49:09.530370Z :INFO: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 16000000 MessagesRead: 22 BytesReadCompressed: 16000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:49:09.530947Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 checking auth because of timeout 2024-11-21T17:49:09.530978Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 auth for : test-consumer 2024-11-21T17:49:09.531109Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 Handle describe topics response 2024-11-21T17:49:09.531131Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 auth is DEAD 2024-11-21T17:49:09.531149Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 auth ok: topics# 1, initDone# 1 2024-11-21T17:49:10.530005Z :INFO: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] Closing read session. Close timeout: 0.000000s 2024-11-21T17:49:10.530021Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:21:22 2024-11-21T17:49:10.530028Z :INFO: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2000 BytesRead: 16000000 MessagesRead: 22 BytesReadCompressed: 16000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:49:10.530047Z :NOTICE: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:49:10.530055Z :DEBUG: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] [] Abort session to cluster 2024-11-21T17:49:10.530278Z :NOTICE: [/Root] [/Root] [c8cc96d-4a3c4b37-ab27b7d6-4c3162e3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:49:10.530646Z :INFO: [/Root] SessionId [test-message_group_id|2ce77c59-570fbaa4-d3da86c4-998b051e_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2024-11-21T17:49:10.530652Z :INFO: [/Root] SessionId [test-message_group_id|2ce77c59-570fbaa4-d3da86c4-998b051e_0] PartitionId [0] Generation [2] Write session will now close 2024-11-21T17:49:10.530657Z :DEBUG: [/Root] SessionId [test-message_group_id|2ce77c59-570fbaa4-d3da86c4-998b051e_0] PartitionId [0] Generation [2] Write session: aborting 2024-11-21T17:49:10.530806Z :DEBUG: [/Root] SessionId [test-message_group_id|2ce77c59-570fbaa4-d3da86c4-998b051e_0] PartitionId [0] Generation [2] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2024-11-21T17:49:10.530982Z :INFO: [/Root] SessionId [test-message_group_id|2ce77c59-570fbaa4-d3da86c4-998b051e_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2024-11-21T17:49:10.531763Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2024-11-21T17:49:10.531771Z :DEBUG: [/Root] SessionId [test-message_group_id|2ce77c59-570fbaa4-d3da86c4-998b051e_0] PartitionId [0] Generation [2] Write session is aborting and will not restart 2024-11-21T17:49:10.531797Z :DEBUG: [/Root] SessionId [test-message_group_id|2ce77c59-570fbaa4-d3da86c4-998b051e_0] PartitionId [0] Generation [2] Write session: destroy 2024-11-21T17:49:10.533765Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 grpc read done: success# 0, data# { } 2024-11-21T17:49:10.533779Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 grpc read failed 2024-11-21T17:49:10.533787Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 grpc closed 2024-11-21T17:49:10.533795Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_16395533911146657454_v1 is DEAD 2024-11-21T17:49:10.534001Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|2ce77c59-570fbaa4-d3da86c4-998b051e_0 grpc read done: success: 0 data: 2024-11-21T17:49:10.534009Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|2ce77c59-570fbaa4-d3da86c4-998b051e_0 grpc read failed 2024-11-21T17:49:10.534013Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|2ce77c59-570fbaa4-d3da86c4-998b051e_0 grpc closed 2024-11-21T17:49:10.534015Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|2ce77c59-570fbaa4-d3da86c4-998b051e_0 is DEAD 2024-11-21T17:49:10.534140Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:49:10.534170Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [10:7439791090627975378:2531] disconnected; active server actors: 1 2024-11-21T17:49:10.534173Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [10:7439791090627975378:2531] client test-consumer disconnected session test-consumer_10_1_16395533911146657454_v1 2024-11-21T17:49:10.534196Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:49:10.534208Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439791082038040672:2500] destroyed 2024-11-21T17:49:10.534214Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:49:10.534217Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_10_1_16395533911146657454_v1 2024-11-21T17:49:10.534220Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439791090627975381:2534] destroyed 2024-11-21T17:49:10.534231Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:49:10.534233Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_1_16395533911146657454_v1 |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:58.793559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:58.793575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:58.793579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:58.793582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:58.793586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:58.793588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:58.793594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:58.793652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:58.800775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:58.800791Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:58.802314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:58.802383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:58.802401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:58.804285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:58.804333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:58.804401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:58.804589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:58.805174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:58.805373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:58.805380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:58.805387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:58.805392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:58.805409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:58.805437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:58.806395Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:58.817980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:58.818040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.818080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:58.818116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:58.818123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.818608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:58.818622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:58.818657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.818663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:58.818666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:58.818669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:58.818894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.818899Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:58.818902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:58.819101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.819105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.819111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:58.819115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:58.819521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:58.819791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:58.819817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:58.819927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:58.819942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:58.819946Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:58.819987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:58.819991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:58.820007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:58.820015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:58.820300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:58.820307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:58.820328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:58.820331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:58.820375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:58.820379Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:58.820385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:58.820388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:58.820391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:58.820394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:58.820396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:58.820400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:58.820407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:58.820410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:58.820413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.763435Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:10.763439Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T17:49:10.763446Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2024-11-21T17:49:10.763449Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:49:10.763453Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T17:49:10.763507Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:49:10.763514Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T17:49:10.763524Z node 49 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:49:10.763528Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:49:10.763533Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:49:10.763537Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:49:10.763540Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:49:10.763544Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:49:10.763568Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:10.763573Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 4, subscribers: 1 2024-11-21T17:49:10.763577Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2024-11-21T17:49:10.763580Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:49:10.763583Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:49:10.763586Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T17:49:10.763651Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.763659Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.763663Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:10.763682Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T17:49:10.763686Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:49:10.763804Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.763812Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.763817Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:10.763821Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T17:49:10.763824Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:10.764041Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.764050Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.764053Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:10.764056Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:49:10.764060Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:10.764109Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.764115Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.764118Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:10.764121Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:49:10.764125Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:49:10.764131Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T17:49:10.764136Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [49:301:2293] 2024-11-21T17:49:10.764316Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 Leader for TabletID 72057594037968897 is [49:213:2213] sender: [49:339:2058] recipient: [49:15:2062] 2024-11-21T17:49:10.770166Z node 49 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T17:49:10.770237Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:10.770326Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:49:10.770489Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:10.770499Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:49:10.770512Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:49:10.770518Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:49:10.770524Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:10.770528Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:49:10.770534Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:10.770624Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.770740Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.771135Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.771156Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:10.771171Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:49:10.771176Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [49:302:2294] 2024-11-21T17:49:10.771571Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:49:10.771603Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T17:49:10.771695Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:10.771734Z node 49 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/x" took 47us result status StatusPathDoesNotExist 2024-11-21T17:49:10.771768Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/x" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::ChainedCopyTableAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:56.521245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:56.521267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:56.521272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:56.521277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:56.521288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:56.521291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:56.521300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:56.521388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:56.534389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:56.534409Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:56.538214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:56.538352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:56.538386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:56.542454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:56.542554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:56.542674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:56.543124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:56.543873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:56.544163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:56.544174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:56.544187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:56.544195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:56.544201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:56.544275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:56.546629Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:56.565322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:56.565401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.565455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:56.565502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:56.565509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.566248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:56.566271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:56.566307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.566316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:56.566320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:56.566324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:56.567104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.567113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:56.567119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:56.567421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.567429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.567434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:56.567440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:56.567990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:56.568436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:56.568494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:56.568678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:56.568705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:56.568712Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:56.568769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:56.568775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:56.568798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:56.568810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:56.569295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:56.569309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:56.569371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:56.569377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:56.569448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:56.569454Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:56.569465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:56.569469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:56.569475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:56.569480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:56.569483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:56.569487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:56.569498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:56.569505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:56.569511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... id#1009:0 progress is 1/1 2024-11-21T17:49:09.231359Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1009 ready parts: 1/1 2024-11-21T17:49:09.231362Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1009, ready parts: 1/1, is published: true 2024-11-21T17:49:09.231365Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1009 ready parts: 1/1 2024-11-21T17:49:09.231367Z node 162 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1009:0 2024-11-21T17:49:09.231369Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1009:0 2024-11-21T17:49:09.231382Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T17:49:09.231510Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1009 2024-11-21T17:49:09.231747Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1009 2024-11-21T17:49:09.233912Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 563 RawX2: 695784704476 } TabletId: 72075186233409548 State: 4 2024-11-21T17:49:09.233930Z node 162 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:49:09.233972Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 684 RawX2: 695784704586 } TabletId: 72075186233409549 State: 4 2024-11-21T17:49:09.233977Z node 162 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409549, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:49:09.234008Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 695784704269 } TabletId: 72075186233409546 State: 4 2024-11-21T17:49:09.234014Z node 162 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:49:09.234475Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:09.234677Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:09.234870Z node 162 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:49:09.235438Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:49:09.235498Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409548 2024-11-21T17:49:09.235606Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:09.235725Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:09.235732Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:49:09.235744Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T17:49:09.235809Z node 162 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T17:49:09.235856Z node 162 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409549 2024-11-21T17:49:09.236600Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:49:09.236632Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 Forgetting tablet 72075186233409546 2024-11-21T17:49:09.236705Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.236717Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:09.236772Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 436 RawX2: 695784704360 } TabletId: 72075186233409547 State: 4 2024-11-21T17:49:09.236780Z node 162 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:49:09.237147Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:49:09.237154Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:49:09.237166Z node 162 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046678944 2024-11-21T17:49:09.237180Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:09.237183Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T17:49:09.237205Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:49:09.237208Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:49:09.237211Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:09.237452Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:49:09.237459Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:49:09.237472Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:49:09.237477Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:49:09.237503Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:09.237549Z node 162 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2024-11-21T17:49:09.237845Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:49:09.237874Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:49:09.238245Z node 162 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:49:09.238272Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:09.238275Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:49:09.238283Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:09.238573Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:49:09.238580Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:49:09.238624Z node 162 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1009, wait until txId: 1009 TestWaitNotification wait txId: 1009 2024-11-21T17:49:09.238669Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1009: send EvNotifyTxCompletion 2024-11-21T17:49:09.238672Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1009 2024-11-21T17:49:09.238710Z node 162 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1009, at schemeshard: 72057594046678944 2024-11-21T17:49:09.238721Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1009: got EvNotifyTxCompletionResult 2024-11-21T17:49:09.238723Z node 162 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1009: satisfy waiter [162:1058:3000] TestWaitNotification: OK eventTxId 1009 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted wait until 72075186233409554 is deleted 2024-11-21T17:49:09.238772Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:49:09.238780Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:49:09.238783Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:49:09.238787Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T17:49:09.238792Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T17:49:09.238796Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T17:49:09.238801Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 2024-11-21T17:49:09.238805Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409553 2024-11-21T17:49:09.238809Z node 162 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409554 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409554 >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] >> TFileStoreWithReboots::CreateAlter [GOOD] >> TFileStoreWithReboots::SimultaneousCreateDropNfs [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:11.483977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:11.484005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:11.484010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:11.484015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:11.484031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:11.484035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:11.484044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:11.484115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:11.496086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:11.496109Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:11.505901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:11.506796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:11.506837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:11.517698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:11.518014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:11.518147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:11.518242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:11.519499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:11.519826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:11.519839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:11.519881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:11.519889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:11.519896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:11.519912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.522698Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:11.551221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:11.551306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.551367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:11.551411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:11.551419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.552081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:11.552113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:11.552182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.552194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:11.552199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:11.552204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:11.552838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.552853Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:11.552858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:11.553242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.553255Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.553262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:11.553269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:11.553896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:11.554323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:11.554378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:11.554564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:11.554587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:11.554595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:11.554661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:11.554668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:11.554696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:11.554708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:11.555075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:11.555083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:11.555124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:11.555129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:11.555221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.555228Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:11.555241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:11.555245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:11.555251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:11.555256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:11.555261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:11.555264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:11.555274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:11.555280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:11.555283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:11.555582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:11.555594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:11.555598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:11.555603Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:11.555606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:11.555617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... thTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "embedding" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:11.657284Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:11.657326Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 46us result status StatusSuccess 2024-11-21T17:49:11.657415Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:11.657488Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:11.657506Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 19us result status StatusSuccess 2024-11-21T17:49:11.657534Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] >> TVectorIndexTests::CreateTableWithError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateAlter [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:54.476550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:54.476568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:54.476572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:54.476576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:54.476580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:54.476584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:54.476592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:54.476645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:54.488173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:54.488192Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:54.490221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:54.490318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:54.490341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:54.492627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:54.492689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:54.492776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:54.492929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:54.493535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:54.493755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:54.493765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:54.493775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:54.493781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:54.493786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:54.493819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:54.495045Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:54.508460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:54.508526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.508566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:54.508599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:54.508603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.509257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:54.509290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:54.509347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.509356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:54.509360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:54.509364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:54.509782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.509790Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:54.509793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:54.510068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.510075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.510079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:54.510085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:54.510576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:54.510958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:54.510997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:54.511162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:54.511184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:54.511191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:54.511248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:54.511254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:54.511278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:54.511288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:54.511616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:54.511622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:54.511656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:54.511659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:54.511735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:54.511740Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:54.511748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:54.511751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:54.511755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:54.511758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:54.511761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:54.511763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:54.511771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:54.511775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:54.511777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... AT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72057594037968897 2024-11-21T17:49:11.915941Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 1002, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T17:49:11.916022Z node 72 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 1 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-1" IOPS: 0 Throughput: 0 Size: 0 } BindedChannels { StoragePoolName: "pool-2" IOPS: 0 Throughput: 0 Size: 0 } TabletID: 72075186233409546 AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:49:11.916077Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T17:49:11.916082Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 1002, shardIdx: 72057594046678944:1, partId: 0 2024-11-21T17:49:11.916096Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T17:49:11.916101Z node 72 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:49:11.916106Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2024-11-21T17:49:11.916122Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 2 -> 3 2024-11-21T17:49:11.916183Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:49:11.916493Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.916515Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.916520Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:11.916859Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2024-11-21T17:49:11.916887Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72075186233409546 2024-11-21T17:49:11.916924Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:11.916948Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxId: 1002 Origin: 72075186233409546 Status: OK 2024-11-21T17:49:11.916953Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TConfigureParts operationId#1002:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2024-11-21T17:49:11.916960Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 3 -> 128 2024-11-21T17:49:11.917355Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.917383Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.917388Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:11.917397Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T17:49:11.917425Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:11.917832Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T17:49:11.917866Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000004 2024-11-21T17:49:11.917932Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:11.917949Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 309237647465 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:11.917954Z node 72 :FLAT_TX_SCHEMESHARD INFO: TAlterFileStore::TPropose operationId#1002:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T17:49:11.917983Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:49:11.917986Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:49:11.917996Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:11.918004Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T17:49:11.918009Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:49:11.918013Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:49:11.918016Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:49:11.918035Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:49:11.918040Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 1, subscribers: 0 2024-11-21T17:49:11.918043Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:49:11.918397Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:11.918404Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:49:11.918429Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:11.918446Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T17:49:11.918542Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:49:11.918550Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:49:11.918556Z node 72 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:49:11.918560Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:49:11.918563Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:11.918575Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2024-11-21T17:49:11.918992Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2024-11-21T17:49:11.919049Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T17:49:11.919055Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T17:49:11.919117Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T17:49:11.919131Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:49:11.919135Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:397:2378] TestWaitNotification: OK eventTxId 1002 2024-11-21T17:49:11.919192Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:11.919218Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_2" took 34us result status StatusSuccess 2024-11-21T17:49:11.919294Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/FS_2" PathDescription { Self { Name: "FS_2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1001 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } FileStoreDescription { Name: "FS_2" PathId: 3 IndexTabletId: 72075186233409546 Config { Version: 2 FolderId: "bar" CloudId: "baz" BlockSize: 4096 } Version: 2 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::SimultaneousCreateDropNfs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:51.508333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:51.508354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:51.508358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:51.508362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:51.508365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:51.508368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:51.508374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:51.508436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:51.515324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:51.515339Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:51.516976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:51.517053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:51.517078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:51.519357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:51.519416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:51.519488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:51.519629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:51.520182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:51.520393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:51.520401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:51.520409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:51.520413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:51.520417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:51.520441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:51.521430Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:51.532856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:51.532942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:51.532990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:51.533032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:51.533039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:51.533635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:51.533653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:51.533701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:51.533709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:51.533711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:51.533714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:51.533966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:51.533972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:51.533975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:51.534198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:51.534204Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:51.534207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:51.534213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:51.534572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:51.534882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:51.534914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:51.535037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:51.535055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:51.535059Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:51.535099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:51.535103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:51.535122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:51.535129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:51.535414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:51.535420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:51.535450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:51.535455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:51.535522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:51.535526Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:51.535534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:51.535537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:51.535540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:51.535543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:51.535546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:51.535548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:51.535556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:51.535559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:51.535561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... pdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:11.932766Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:11.932774Z node 84 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:49:11.932780Z node 84 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:49:11.932789Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:49:11.932822Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T17:49:11.934200Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:11.934218Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:11.934224Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:11.934528Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:49:11.934906Z node 84 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:49:11.934977Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:11.935066Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:49:11.935186Z node 84 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:49:11.935445Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:49:11.935488Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409546 2024-11-21T17:49:11.935716Z node 84 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:49:11.935778Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:11.935786Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:49:11.935816Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409548 2024-11-21T17:49:11.935915Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:49:11.935946Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409547 2024-11-21T17:49:11.936122Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:11.936129Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:49:11.936157Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:11.936202Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:49:11.936246Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:49:11.937790Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:49:11.937812Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:49:11.937830Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:49:11.937833Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:49:11.937846Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:49:11.937860Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:49:11.937866Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:49:11.937912Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:11.937918Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:49:11.937939Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:49:11.938007Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:49:11.938392Z node 84 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1003 2024-11-21T17:49:11.938478Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:49:11.938486Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2024-11-21T17:49:11.938505Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:49:11.938510Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:49:11.938595Z node 84 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:49:11.938618Z node 84 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:49:11.938627Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:49:11.938631Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [84:527:2484] 2024-11-21T17:49:11.938644Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:49:11.938647Z node 84 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [84:527:2484] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T17:49:11.938706Z node 84 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:49:11.938718Z node 84 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:49:11.938726Z node 84 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2024-11-21T17:49:11.938802Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:11.938846Z node 84 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 65us result status StatusPathDoesNotExist 2024-11-21T17:49:11.938890Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:49:11.938934Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:11.938956Z node 84 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 24us result status StatusSuccess 2024-11-21T17:49:11.939021Z node 84 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |79.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |79.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |79.5%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |79.5%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> TVectorIndexTests::CreateTableWithError [GOOD] |79.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |79.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |79.5%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:12.781411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:12.781435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:12.781441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:12.781445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:12.781460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:12.781463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:12.781471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:12.781551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:12.790909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:12.790923Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:12.793456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:12.794184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:12.794223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:12.795837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:12.796067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:12.796185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:12.796290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:12.797299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:12.797552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:12.797562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:12.797593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:12.797600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:12.797607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:12.797620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.800469Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:12.820798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:12.820889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.820956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:12.821007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:12.821016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.821730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:12.821754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:12.821800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.821808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:12.821811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:12.821814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:12.822326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.822357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:12.822364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:12.822960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.822974Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.822980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:12.822988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:12.823627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:12.824321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:12.824381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:12.824567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:12.824597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:12.824608Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:12.824680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:12.824688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:12.824722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:12.824737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:12.825351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:12.825362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:12.825411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:12.825417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:12.825516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.825524Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:12.825536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:12.825539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:12.825544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:12.825548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:12.825552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:12.825554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:12.825565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:12.825569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:12.825572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:12.825867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:12.825884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:12.825888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:12.825893Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:12.825898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:12.825916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:49:12.826815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:49:12.826902Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:49:12.827310Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:49:12.828542Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:49:12.829097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "__ydb_parent" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "__ydb_parent" Type: EIndexTypeGlobalVectorKmeansTree VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:12.829179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2024-11-21T17:49:12.829204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: index key column shouldn't have a reserved name: __ydb_parent, at schemeshard: 72057594046678944 2024-11-21T17:49:12.829208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: index key column shouldn't have a reserved name: __ydb_parent, at schemeshard: 72057594046678944 2024-11-21T17:49:12.829431Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:49:12.831366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "index key column shouldn\'t have a reserved name: __ydb_parent" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:12.831407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: index key column shouldn't have a reserved name: __ydb_parent, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2024-11-21T17:49:12.831520Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T17:49:12.832197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "id" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:12.832299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2024-11-21T17:49:12.832326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2024-11-21T17:49:12.832331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2024-11-21T17:49:12.833084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "the same column can\'t be used as key and data column for one index, for example id" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:12.833117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors TestModificationResult got TxId: 102, wait until txId: 102 >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] >> DataShardReadIterator::ShouldReadKeyCellVec >> DataShardReadIteratorSysTables::ShouldRead >> DataShardReadIterator::ShouldHandleReadAck >> TTablesWithReboots::CopyTableAndDropWithReboots2 [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] Test command err: 2024-11-21T17:48:06.274567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790825319328257:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:06.274595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b87/r3tmp/tmpjwnsUm/pdisk_1.dat 2024-11-21T17:48:06.346128Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16452 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:48:06.357772Z node 1 :TX_PROXY DEBUG: actor# [1:7439790825319328481:2100] Handle TEvNavigate describe path dc-1 2024-11-21T17:48:06.357797Z node 1 :TX_PROXY DEBUG: Actor# [1:7439790825319328743:2247] HANDLE EvNavigateScheme dc-1 2024-11-21T17:48:06.357858Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790825319328563:2137], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:06.357877Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439790825319328563:2137], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:48:06.357945Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825319328744:2248][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:48:06.358343Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790825319328203:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790825319328748:2248] 2024-11-21T17:48:06.358351Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790825319328206:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790825319328749:2248] 2024-11-21T17:48:06.358369Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790825319328206:2052] Subscribe: subscriber# [1:7439790825319328749:2248], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:06.358369Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790825319328203:2049] Subscribe: subscriber# [1:7439790825319328748:2248], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:06.358382Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790825319328209:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790825319328750:2248] 2024-11-21T17:48:06.358389Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825319328749:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790825319328206:2052] 2024-11-21T17:48:06.358391Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790825319328209:2055] Subscribe: subscriber# [1:7439790825319328750:2248], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:06.358393Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825319328748:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790825319328203:2049] 2024-11-21T17:48:06.358398Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790825319328206:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790825319328749:2248] 2024-11-21T17:48:06.358401Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825319328750:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790825319328209:2055] 2024-11-21T17:48:06.358407Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790825319328203:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790825319328748:2248] 2024-11-21T17:48:06.358407Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825319328744:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790825319328746:2248] 2024-11-21T17:48:06.358410Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790825319328209:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790825319328750:2248] 2024-11-21T17:48:06.358414Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825319328744:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790825319328745:2248] 2024-11-21T17:48:06.358424Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439790825319328744:2248][/dc-1] Set up state: owner# [1:7439790825319328563:2137], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:06.358473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825319328744:2248][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790825319328747:2248] 2024-11-21T17:48:06.358486Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439790825319328744:2248][/dc-1] Path was already updated: owner# [1:7439790825319328563:2137], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:06.358504Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825319328748:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790825319328745:2248], cookie# 1 2024-11-21T17:48:06.358508Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825319328749:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790825319328746:2248], cookie# 1 2024-11-21T17:48:06.358515Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825319328750:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790825319328747:2248], cookie# 1 2024-11-21T17:48:06.358519Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790825319328203:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790825319328748:2248], cookie# 1 2024-11-21T17:48:06.358528Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790825319328206:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790825319328749:2248], cookie# 1 2024-11-21T17:48:06.358532Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790825319328209:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790825319328750:2248], cookie# 1 2024-11-21T17:48:06.358537Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825319328748:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790825319328203:2049], cookie# 1 2024-11-21T17:48:06.358543Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825319328749:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790825319328206:2052], cookie# 1 2024-11-21T17:48:06.358546Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790825319328750:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790825319328209:2055], cookie# 1 2024-11-21T17:48:06.358550Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825319328744:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790825319328745:2248], cookie# 1 2024-11-21T17:48:06.358560Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825319328744:2248][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:06.358567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825319328744:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790825319328746:2248], cookie# 1 2024-11-21T17:48:06.358571Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825319328744:2248][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:06.358574Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825319328744:2248][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790825319328747:2248], cookie# 1 2024-11-21T17:48:06.358576Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790825319328744:2248][/dc-1] Unexpected sync response: sender# [1:7439790825319328747:2248], cookie# 1 2024-11-21T17:48:06.365649Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439790825319328563:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:48:06.365726Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439790825319328563:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... :2147], cacheItem# { Subscriber: { Subscriber: [2:7439790847088405836:2616] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:11.407527Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439791104786444641:3124], recipient# [2:7439791104786444638:2410], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:11.652207Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439790842793437866:2147], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:11.652271Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439790842793437866:2147], cacheItem# { Subscriber: { Subscriber: [2:7439790842793438428:2542] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:11.652294Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439791104786444646:3125], recipient# [2:7439791104786444645:2411], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:12.079213Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439790847514199789:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:12.079258Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439790847514199789:2100], cacheItem# { Subscriber: { Subscriber: [3:7439790851809167549:2354] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:12.079281Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791109507205672:2530], recipient# [3:7439791109507205671:2481], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:12.096997Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439790847514199789:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:12.097048Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439790847514199789:2100], cacheItem# { Subscriber: { Subscriber: [3:7439790851809167549:2354] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:12.097080Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791109507205674:2531], recipient# [3:7439791109507205673:2482], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:12.353643Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439790847514199789:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:12.353690Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439790847514199789:2100], cacheItem# { Subscriber: { Subscriber: [3:7439790847514199966:2207] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:12.353717Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791109507205676:2532], recipient# [3:7439791109507205675:2483], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:12.407920Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439790842793437866:2147], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:12.407964Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439790842793437866:2147], cacheItem# { Subscriber: { Subscriber: [2:7439790847088405836:2616] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:12.407984Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439791109081411950:3129], recipient# [2:7439791109081411947:2412], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:12.653969Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439790842793437866:2147], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:12.654023Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439790842793437866:2147], cacheItem# { Subscriber: { Subscriber: [2:7439790842793438428:2542] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:12.654056Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439791109081411955:3130], recipient# [2:7439791109081411954:2413], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec >> DataShardReadIterator::ShouldReceiveErrorAfterSplit |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |79.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> DataShardReadIteratorBatchMode::RangeFull |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] |79.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup >> DataShardReadIterator::ShouldReverseReadMultipleKeys >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2024-11-21T17:48:45.928896Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:45.928925Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:45.931676Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:45.931696Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:45.952726Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:45.952845Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=8504438639750547167, session=0, seqNo=0) 2024-11-21T17:48:45.952877Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:45.963491Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=8504438639750547167, session=1) 2024-11-21T17:48:45.963574Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=16300907868770237566, session=0, seqNo=0) 2024-11-21T17:48:45.963592Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:45.974167Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=16300907868770237566, session=2) 2024-11-21T17:48:45.974307Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:141:2165], cookie=16329620931407777284, name="Sem1", limit=1) 2024-11-21T17:48:45.974341Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2024-11-21T17:48:45.985125Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:141:2165], cookie=16329620931407777284) 2024-11-21T17:48:45.985215Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2024-11-21T17:48:45.985299Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2024-11-21T17:48:45.985328Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2024-11-21T17:48:45.995936Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2024-11-21T17:48:45.995960Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2024-11-21T17:48:45.996086Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:149:2173], cookie=13178032120586551001, name="Sem1") 2024-11-21T17:48:45.996114Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:149:2173], cookie=13178032120586551001) 2024-11-21T17:48:45.996172Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:152:2176], cookie=10577193426162674591, name="Sem1") 2024-11-21T17:48:45.996179Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:152:2176], cookie=10577193426162674591) 2024-11-21T17:48:46.290393Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:46.301224Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:46.555143Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:46.565896Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:46.819862Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:46.830591Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.074330Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.085163Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.339119Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.349899Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.603448Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.614321Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.848031Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.858622Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.102456Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.113127Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.357044Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.367879Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.652619Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.663346Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.927548Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.938325Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.192384Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.203146Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.457188Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.467986Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.721818Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.732710Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.017036Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.027861Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.292089Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.302932Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.556934Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.567774Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.821665Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.832621Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.086635Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.097658Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.373154Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.383977Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.648344Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.659460Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.913712Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.924464Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.178480Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.189210Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.443358Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.454242Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.718283Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.729126Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.993222Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.004061Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.257640Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.268508Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.522737Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.533671Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.787727Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.798584Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.084136Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.094958Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.349143Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.359988Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.624574Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.635545Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.879150Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.889970Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:55.143764Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:55.154707Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:55.418515Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:55.429379Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:55.682870Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:55.693639Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:55.957310Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:55.968059Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:56.221668Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:56.232426Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:56.486457Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:56.497379Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:56.782409Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:56.793181Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:57.046846Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:57.057818Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:57.321772Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:57.332609Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:57.586403Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:57.597187Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:57.851065Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:57.861902Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:58.146462Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:58. ... 2057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:07.439390Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:07.693656Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:07.704375Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:07.958982Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:07.969768Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:08.265268Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:08.276155Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:08.530713Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:08.542242Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:08.796861Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:08.807780Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:09.061545Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:09.072410Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:09.326051Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:09.336908Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:09.614823Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:09.632733Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:09.895689Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:09.908744Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:10.147277Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:10.161286Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:10.426560Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:10.440715Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:10.700979Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:10.716710Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:11.040844Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:11.051827Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:11.314817Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:11.329238Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:11.600139Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:11.611437Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:11.884552Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:11.895696Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:12.169963Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:12.184539Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:12.472639Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:12.489994Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:12.771486Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:12.783569Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.047195Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.060705Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.318351Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.339451Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.584327Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.595224Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.849979Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2024-11-21T17:49:13.850011Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2024-11-21T17:49:13.850022Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2024-11-21T17:49:13.864699Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2024-11-21T17:49:13.875060Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:409:2409], cookie=17769661333183632447, name="Sem1") 2024-11-21T17:49:13.875097Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:409:2409], cookie=17769661333183632447) 2024-11-21T17:49:14.040284Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:49:14.040325Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:49:14.045155Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:49:14.045214Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:49:14.059026Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:49:14.059185Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=1036028784044684742, session=0, seqNo=0) 2024-11-21T17:49:14.059230Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:49:14.080830Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=1036028784044684742, session=1) 2024-11-21T17:49:14.080928Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=1157200710504657331, session=0, seqNo=0) 2024-11-21T17:49:14.080962Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:49:14.092184Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=1157200710504657331, session=2) 2024-11-21T17:49:14.092299Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=4565857262190665527, session=0, seqNo=0) 2024-11-21T17:49:14.092336Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2024-11-21T17:49:14.103481Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=4565857262190665527, session=3) 2024-11-21T17:49:14.103652Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:143:2167], cookie=4681890592008201703, name="Sem1", limit=3) 2024-11-21T17:49:14.103696Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2024-11-21T17:49:14.115328Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:143:2167], cookie=4681890592008201703) 2024-11-21T17:49:14.115454Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=111, session=1, semaphore="Sem1" count=2) 2024-11-21T17:49:14.115510Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2024-11-21T17:49:14.115559Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2024-11-21T17:49:14.115572Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2024-11-21T17:49:14.115591Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=333, session=3, semaphore="Sem1" count=1) 2024-11-21T17:49:14.132503Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=111) 2024-11-21T17:49:14.132539Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=222) 2024-11-21T17:49:14.132546Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=333) 2024-11-21T17:49:14.132702Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:151:2175], cookie=3647196254271645101, name="Sem1") 2024-11-21T17:49:14.132729Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:151:2175], cookie=3647196254271645101) 2024-11-21T17:49:14.132795Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:154:2178], cookie=14767208222876579738, name="Sem1") 2024-11-21T17:49:14.132804Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:154:2178], cookie=14767208222876579738) 2024-11-21T17:49:14.132835Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=444, session=1, semaphore="Sem1" count=1) 2024-11-21T17:49:14.132872Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2024-11-21T17:49:14.145810Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=444) 2024-11-21T17:49:14.146009Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:159:2183], cookie=4372524607506655603, name="Sem1") 2024-11-21T17:49:14.146037Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:159:2183], cookie=4372524607506655603) 2024-11-21T17:49:14.146095Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2186], cookie=12964220800994596496, name="Sem1") 2024-11-21T17:49:14.146102Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2186], cookie=12964220800994596496) 2024-11-21T17:49:14.157542Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:49:14.157582Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:49:14.157650Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:49:14.157913Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:49:14.217024Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:49:14.217099Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2024-11-21T17:49:14.217106Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2024-11-21T17:49:14.217111Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2024-11-21T17:49:14.217223Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:202:2216], cookie=17793444702972606879, name="Sem1") 2024-11-21T17:49:14.217249Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:202:2216], cookie=17793444702972606879) 2024-11-21T17:49:14.217365Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:211:2224], cookie=8623446987699237973, name="Sem1") 2024-11-21T17:49:14.217371Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:211:2224], cookie=8623446987699237973) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] Test command err: 2024-11-21T17:48:42.187465Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790978940166183:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:42.187761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00175e/r3tmp/tmpvoOzvn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24306, node 1 2024-11-21T17:48:42.255743Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:42.256017Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.256028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.256030Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.256059Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.291169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:42.291208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:42.292512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:42.307737Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.310701Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:15674, port: 15674 2024-11-21T17:48:42.311059Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:42.323164Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:15674. Invalid credentials 2024-11-21T17:48:42.323502Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****gqmg (B2050284) () has now permanent error message 'Could not login via LDAP' test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00175e/r3tmp/tmpd1PFHM/pdisk_1.dat 2024-11-21T17:48:42.548408Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:42.548522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 25158, node 2 2024-11-21T17:48:42.558088Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.558098Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.558100Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.558137Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.641512Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:42.641550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:42.642606Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:42.671567Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.673617Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10310, port: 10310 2024-11-21T17:48:42.673648Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:42.698823Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:42.698958Z node 2 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:10310 return no entries 2024-11-21T17:48:42.699041Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****KN7A (318DB1BB) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:48:42.870525Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790978745910456:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:42.870736Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00175e/r3tmp/tmpIkniPp/pdisk_1.dat 2024-11-21T17:48:42.877164Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16211, node 3 2024-11-21T17:48:42.889236Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:42.889252Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:42.889254Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:42.889291Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:42.965975Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:42.967856Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:61165, port: 61165 2024-11-21T17:48:42.967885Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:42.972346Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:42.972391Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:42.973351Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:42.981698Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:43.028410Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:43.028682Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:43.028712Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.076416Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.120391Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:43.120779Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****c51g (811C7ADE) () has now valid token of ldapuser@ldap 2024-11-21T17:48:47.870974Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439790978745910456:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:47.871010Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:48:47.873007Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****c51g (811C7ADE) 2024-11-21T17:48:47.873034Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:61165, port: 61165 2024-11-21T17:48:47.873064Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:47.874793Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:47.916389Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:47.916566Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:47.916576Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:47.964381Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:48.008361Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:48.008613Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****c51g (811C7ADE) () has now valid token of ldapuser@ldap 2024-11-21T17:48:51.874795Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****c51g (811C7ADE) 2024-11-21T17:48:51.874825Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:61165, port: 61165 2024-11-21T17:48:51.874848Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:51.888372Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:51.932421Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:51.932643Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:51.932656Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:51.976409Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:52.024378Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:52.024751Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****c51g (811C7ADE) () has now valid token of ldapuser@ldap 2024-11-21T17:48:53.352332Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439791027427402649:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:53.352459Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00175e/r3tmp/tmpvukQdG/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5634, node 4 2024-11-21T17:48:53.365967Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:53.368013Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:53.368020Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:53.368022Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:53.368044Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:53.423633Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:53.424921Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:2181, port: 2181 2024-11-21T17:48:53.424944Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:53.452447Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:53.452472Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:53.453526Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:53.461030Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:53.504543Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****uYlw (C9652D59) () has now valid token of ldapuser@ldap 2024-11-21T17:48:58.352630Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439791027427402649:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:58.352663Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:48:58.354672Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****uYlw (C9652D59) 2024-11-21T17:48:58.354711Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:2181, port: 2181 2024-11-21T17:48:58.354745Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:58.395705Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:58.436529Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****uYlw (C9652D59) () has now valid token of ldapuser@ldap 2024-11-21T17:49:02.356481Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****uYlw (C9652D59) 2024-11-21T17:49:02.356524Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:2181, port: 2181 2024-11-21T17:49:02.356559Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:49:02.371635Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:49:02.412619Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****uYlw (C9652D59) () has now valid token of ldapuser@ldap 2024-11-21T17:49:03.656570Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439791071091788098:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:03.656590Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00175e/r3tmp/tmpMz2piD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8470, node 5 2024-11-21T17:49:03.671531Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:03.673744Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:03.673756Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:03.673759Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:03.673805Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:49:03.746907Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:49:03.748495Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:27196, port: 27196 2024-11-21T17:49:03.748555Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:49:03.756937Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:03.756965Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:03.758087Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:03.763183Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:49:03.804675Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:49:03.804909Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:49:03.804922Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:49:03.852433Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:49:03.896434Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:49:03.896831Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****ttgA (C80D8B3C) () has now valid token of ldapuser@ldap 2024-11-21T17:49:07.658238Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****ttgA (C80D8B3C) 2024-11-21T17:49:07.658277Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:27196, port: 27196 2024-11-21T17:49:07.658306Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:49:07.669548Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:49:07.669661Z node 5 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:27196 return no entries 2024-11-21T17:49:07.669746Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****ttgA (C80D8B3C) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:49:08.657171Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439791071091788098:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:08.657215Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:49:10.660580Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****ttgA (C80D8B3C) >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey >> TKesusTest::TestSessionStealingDifferentKey [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore |79.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:00.331420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:00.331444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.331450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:00.331454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:00.331459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:00.331462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:00.331470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.331540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:00.342567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:00.342589Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:00.345116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:00.345876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:00.345912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:00.347605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:00.347867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:00.347965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.348065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:00.349379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.349622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.349630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.349665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:00.349672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.349678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:00.349693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.351696Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:00.366030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:00.366077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:00.366160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:00.366166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:00.366899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:00.366910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:00.366914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:00.367317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.367326Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:00.367331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:00.367666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.367674Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.367678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.367683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.368214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.368627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:00.368662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:00.368792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368818Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.368863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:00.368868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.368888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.368897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:00.369219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.369225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.369248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.369252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:00.369294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.369298Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:00.369305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:00.369307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.369310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:00.369313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.369316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:00.369318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:00.369324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:00.369327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:00.369329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:00.369514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.369522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.369525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:00.369527Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:00.369530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.369540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... canner] Handle TEvExportScan::TEvFeed: self# [1:3450:5416] 2024-11-21T17:49:14.329281Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3451:5417], sender# [1:3450:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:10967 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DBC0EAE0-683A-4475-A331-D8014901594A amz-sdk-request: attempt=1 content-length: 130 content-md5: rsyfbQ5vVOk4oQ1A/altew== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=99&uploadId=1 / 130 2024-11-21T17:49:14.329935Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3451:5417], result# UploadPartResult { ETag: aecc9f6d0e6f54e938a10d40fda96d7b } 2024-11-21T17:49:14.329992Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3450:5416] 2024-11-21T17:49:14.330019Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3451:5417], sender# [1:3450:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:10967 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EE048160-503C-4890-AB8F-EC49D9A5F475 amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2024-11-21T17:49:14.330429Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3451:5417], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2024-11-21T17:49:14.330488Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3450:5416] 2024-11-21T17:49:14.330500Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3451:5417], sender# [1:3450:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:10967 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BB9DD79B-1E61-4168-B7AA-BFBDE88C2A23 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2024-11-21T17:49:14.331118Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3451:5417], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2024-11-21T17:49:14.331131Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3451:5417], success# 1, error# , multipart# 1, uploadId# 1 2024-11-21T17:49:14.332640Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3451:5417], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:10967 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A95E36F6-4507-4B10-ACF3-BD8B68E10702 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2024-11-21T17:49:14.334436Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3451:5417], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2024-11-21T17:49:14.334537Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3450:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T17:49:14.337040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T17:49:14.337060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:14.337082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T17:49:14.337093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T17:49:14.337105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:14.337109Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:14.337114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:14.337121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:49:14.337202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:14.338084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:14.338218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:14.338227Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:49:14.338240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:49:14.338244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:14.338251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:49:14.338265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T17:49:14.338272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:14.338277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:49:14.338284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:49:14.338329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:14.339098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:49:14.339111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3436:5403] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:00.363418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:00.363442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.363447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:00.363454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:00.363460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:00.363463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:00.363472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.363554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:00.372585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:00.372603Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:00.374652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:00.375188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:00.375219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:00.376376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:00.376600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:00.376682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.376749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:00.377760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.377996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.378006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.378040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:00.378047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.378054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:00.378066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.379199Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:00.390848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:00.390909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.390961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:00.390995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:00.391000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.391556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.391578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:00.391623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.391633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:00.391637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:00.391642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:00.391955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.391963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:00.391969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:00.392361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.392379Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.392385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.392393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.392966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.393366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:00.393407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:00.393538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.393557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.393564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.393607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:00.393612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.393640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.393652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:00.394013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.394018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.394052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.394055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:00.394120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.394125Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:00.394137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:00.394141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.394145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:00.394150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.394154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:00.394158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:00.394169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:00.394173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:00.394177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:00.394426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.394438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.394441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:00.394444Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:00.394448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.394460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 3709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:14.373373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:49:14.373416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T17:49:14.373705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:14.373770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:14.373781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:49:14.373843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T17:49:14.373877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:14.518738Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:3451:5417], attempt# 0 2024-11-21T17:49:14.522775Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3451:5417], sender# [1:3450:5416] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:18353 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4CECFA67-02E0-47F4-8E28-E83F25794A14 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T17:49:14.524939Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3451:5417], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:18353 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: ECE1565B-BB81-4BE4-95AD-6753B0167C5E amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T17:49:14.527038Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3451:5417], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:49:14.527834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:14.527848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:49:14.527943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:14.527952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:18353 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 54F4041D-8540-437B-9381-29EC4930CDD6 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T17:49:14.528210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:14.528246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:14.528501Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3451:5417], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2024-11-21T17:49:14.528550Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3450:5416] 2024-11-21T17:49:14.528760Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3451:5417], sender# [1:3450:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } 2024-11-21T17:49:14.528875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:14.528892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:14.528900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:49:14.528906Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:49:14.528912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:14.528944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:18353 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E8A77508-DA76-4672-B0B1-056A61214FA3 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2024-11-21T17:49:14.530240Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3451:5417], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2024-11-21T17:49:14.530258Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3451:5417], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T17:49:14.530401Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3450:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T17:49:14.532268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:49:14.556012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T17:49:14.556041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:14.556072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T17:49:14.556085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T17:49:14.556100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:14.556104Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:14.556109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:14.556116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:49:14.556193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:14.557161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:14.557320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:14.557330Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:49:14.557344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:49:14.557348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:14.557354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:49:14.557371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T17:49:14.557379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:14.557383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:49:14.557388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:49:14.557447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:14.558489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:49:14.558502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3436:5403] TestWaitNotification: OK eventTxId 102 >> DataShardReadIteratorSysTables::ShouldRead [GOOD] >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2024-11-21T17:48:38.843950Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:38.843984Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:38.847583Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:38.847619Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:38.869131Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:38.869324Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=17519430063252082124, session=0, seqNo=0) 2024-11-21T17:48:38.869394Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:38.880258Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=17519430063252082124, session=1) 2024-11-21T17:48:38.880437Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:130:2156], cookie=11776856489878698466 2024-11-21T17:48:38.880499Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:143:2167], cookie=2307265971849668020) 2024-11-21T17:48:38.880517Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:143:2167], cookie=2307265971849668020) 2024-11-21T17:48:39.189528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:39.204659Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:39.463868Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:39.475804Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:39.726528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:39.738751Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:39.996418Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:40.010361Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:40.293377Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:40.304250Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:40.549580Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:40.560770Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:40.807722Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:40.820527Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:41.065014Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:41.076027Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:41.320525Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:41.331555Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:41.627356Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:41.638269Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:41.892761Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:41.903760Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:42.158076Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:42.168884Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:42.413514Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:42.428679Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:42.684562Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:42.695547Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:42.981425Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:42.992430Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:43.258989Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:43.270005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:43.524278Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:43.535198Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:43.789312Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:43.800200Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:44.054490Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:44.065340Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:44.340497Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:44.351484Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:44.616159Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:44.627188Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:44.882034Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:44.893173Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:45.147150Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:45.157990Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:45.411733Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:45.422430Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:45.686366Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:45.697094Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:45.961137Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:45.971871Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:46.225674Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:46.236553Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:46.490319Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:46.501038Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:46.754909Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:46.765754Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.050624Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.061505Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.315690Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.326467Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.590878Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.601525Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.845519Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.856177Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.110294Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.121040Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.384987Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.395793Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.649391Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.660031Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.923990Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.934739Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.188586Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.199371Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.453208Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.464121Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.739051Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.749906Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.004162Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.014996Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.279621Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.290553Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.544694Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.555545Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.809676Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.820619Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.105392Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.116296Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.370367Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.381432Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.645691Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.656888Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.911098Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.921863Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.175840Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.186746Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.451271Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.462117Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.705765Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.716726Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.960889Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.971701Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.225529Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.236364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.480486Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.491350Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TT ... eck::Execute 2024-11-21T17:49:01.710060Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:01.974807Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:01.985591Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:02.240138Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:02.251121Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:02.508497Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:02.519420Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:02.773925Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:02.784909Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:03.039030Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:03.049826Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:03.344959Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:03.355717Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:03.610109Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:03.621000Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:03.875310Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:03.886290Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:04.140333Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:04.151248Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:04.416098Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:04.427055Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:04.691626Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:04.702629Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:04.957228Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:04.968086Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:05.222081Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:05.232888Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:05.487067Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:05.497897Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:05.751711Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:05.762541Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:06.058236Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:06.069114Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:06.323678Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:06.334528Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:06.589559Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:06.600450Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:06.854919Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:06.865914Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:07.120801Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:07.131779Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:07.427408Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:07.438354Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:07.692966Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:07.703831Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:07.958471Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:07.969272Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:08.223537Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:08.234451Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:08.486391Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:08.498802Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:08.776691Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:08.787767Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:09.042641Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:09.053479Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:09.297608Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:09.308536Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:09.553833Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:09.564779Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:09.813896Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:09.824951Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:10.081363Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:10.092584Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:10.353013Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:10.369249Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:10.604780Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:10.616548Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:10.861963Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:10.876708Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:11.115574Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:11.128726Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:11.505817Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:11.520719Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:11.798657Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:11.810580Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:12.083497Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:12.096859Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:12.368985Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:12.379996Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:12.626682Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:12.640742Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:12.907054Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:12.920049Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.175305Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.186295Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.454063Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.465171Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.719907Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.730963Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.985647Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.996638Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:14.253821Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2024-11-21T17:49:14.253854Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2024-11-21T17:49:14.264954Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2024-11-21T17:49:14.275339Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:571:2565], cookie=5152950401027661511) 2024-11-21T17:49:14.275375Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:571:2565], cookie=5152950401027661511) 2024-11-21T17:49:14.480788Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:49:14.480818Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:49:14.483821Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:49:14.483872Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:49:14.505367Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:49:14.505558Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=12345, session=0, seqNo=0) 2024-11-21T17:49:14.505592Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:49:14.516312Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=12345, session=1) 2024-11-21T17:49:14.516481Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:135:2159], cookie=23456, session=1, seqNo=0) 2024-11-21T17:49:14.527258Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:135:2159], cookie=23456, session=1) 2024-11-21T17:49:14.732481Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:49:14.732516Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:49:14.736363Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:49:14.736421Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:49:14.757943Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:49:14.758121Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=12345, session=0, seqNo=0) 2024-11-21T17:49:14.758168Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:49:14.768972Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=12345, session=1) 2024-11-21T17:49:14.769162Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=23456, session=1, seqNo=0) 2024-11-21T17:49:14.779921Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=23456, session=1) >> TKesusTest::TestAcquireSemaphore [GOOD] >> DataShardReadIterator::ShouldHandleReadAck [GOOD] >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:00.331428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:00.331448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.331452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:00.331456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:00.331460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:00.331463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:00.331478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:00.331557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:00.342641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:00.342657Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:00.345116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:00.345876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:00.345912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:00.347073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:00.347236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:00.347328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.347393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:00.348130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.348339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.348348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.348369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:00.348373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.348378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:00.348386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.349428Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:00.365400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:00.365476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.365523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:00.365578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:00.365587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:00.366397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:00.366411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:00.366414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:00.366841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:00.366857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:00.367197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.367205Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.367224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.367230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.367761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:00.368179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:00.368222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:00.368392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.368469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:00.368473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:00.368491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.368499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:00.368868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:00.368894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:00.368936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:00.368940Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:00.368947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:00.368949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.368952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:00.368955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:00.368958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:00.368960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:00.368967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:00.368970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:00.368972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:00.369201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.369213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:00.369216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:00.369219Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:00.369222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:00.369235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... vFeed: self# [1:3450:5416] 2024-11-21T17:49:14.732398Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3451:5417], sender# [1:3450:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv.zst?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:16599 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8B1F2AE7-6999-46FF-BDDA-E6C335E377C8 amz-sdk-request: attempt=1 content-length: 55 content-md5: Ry5TonSXZhxkAEEJCUBcTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=99&uploadId=1 / 55 2024-11-21T17:49:14.732963Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3451:5417], result# UploadPartResult { ETag: 472e53a27497661c6400410909405c4e } 2024-11-21T17:49:14.733014Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3450:5416] 2024-11-21T17:49:14.733051Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3451:5417], sender# [1:3450:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:16599 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 171DFEA3-B883-4F53-9F4C-D99B829DC56F amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2024-11-21T17:49:14.733560Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3451:5417], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2024-11-21T17:49:14.733620Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3450:5416] 2024-11-21T17:49:14.733635Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3451:5417], sender# [1:3450:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:16599 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B62AEF9B-DB92-4CE0-A901-8A6125C18524 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2024-11-21T17:49:14.734175Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3451:5417], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2024-11-21T17:49:14.734189Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3451:5417], success# 1, error# , multipart# 1, uploadId# 1 2024-11-21T17:49:14.736963Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3451:5417], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:16599 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 00654D95-0066-41CF-AB75-BD9FC17D1E68 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2024-11-21T17:49:14.738590Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3451:5417], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2024-11-21T17:49:14.738681Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3450:5416], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T17:49:14.740791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T17:49:14.740809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:14.740834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T17:49:14.740846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T17:49:14.740860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:14.740864Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:14.740869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:14.740875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:49:14.740926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:14.741823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:14.741936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:14.741946Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:49:14.741958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:49:14.741962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:14.741967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:49:14.741986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T17:49:14.741992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:14.741997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:49:14.742001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:49:14.742032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:14.742930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:49:14.742945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3436:5403] TestWaitNotification: OK eventTxId 102 >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2024-11-21T17:48:46.863037Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.863061Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.865638Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.865661Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.886559Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.886649Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=1108210220902436279, session=0, seqNo=0) 2024-11-21T17:48:46.886676Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.897181Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=1108210220902436279, session=1) 2024-11-21T17:48:46.897240Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=4328401957069085704, session=0, seqNo=0) 2024-11-21T17:48:46.897276Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:46.907798Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=4328401957069085704, session=2) 2024-11-21T17:48:46.908001Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:46.908076Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:46.908130Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:46.908200Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Lock2" count=1) 2024-11-21T17:48:46.908215Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2024-11-21T17:48:46.908273Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2024-11-21T17:48:46.908311Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=333, session=1, semaphore="Lock2" count=1) 2024-11-21T17:48:46.908327Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2024-11-21T17:48:46.918991Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2024-11-21T17:48:46.919012Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2024-11-21T17:48:46.919015Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=333) 2024-11-21T17:48:46.919111Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=10656162396099077192, name="Lock1") 2024-11-21T17:48:46.919133Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=10656162396099077192) 2024-11-21T17:48:46.919175Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:149:2173], cookie=15598267770119962008, name="Lock2") 2024-11-21T17:48:46.919179Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:149:2173], cookie=15598267770119962008) 2024-11-21T17:48:46.921143Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.921170Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.921224Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.921331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.963097Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.963127Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:46.963132Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2024-11-21T17:48:46.963135Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2024-11-21T17:48:46.963218Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:189:2203], cookie=8950575592274511072, name="Lock1") 2024-11-21T17:48:46.963233Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:189:2203], cookie=8950575592274511072) 2024-11-21T17:48:46.963296Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:197:2210], cookie=9706402249805381861, name="Lock2") 2024-11-21T17:48:46.963300Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:197:2210], cookie=9706402249805381861) 2024-11-21T17:48:47.277868Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.288719Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.532158Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.542828Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.786018Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.796743Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.039898Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.050538Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.313894Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.324612Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.567840Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.578569Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.811653Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.822533Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.065890Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.076778Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.340878Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.351748Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.646636Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.657570Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.921313Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.932099Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.195842Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.206692Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.471194Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.482133Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.746702Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.757612Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.052625Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.063567Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.318058Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.328938Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.583386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.594259Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.848896Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.859911Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.113833Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.124643Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.409364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.420115Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.673650Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.684464Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.937796Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.948611Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.201857Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.212600Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.466070Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.476834Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.750479Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.761294Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.015127Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.025875Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.269226Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.280068Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.533696Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.544451Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.797968Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.808743Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:55.082953Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:363:2365], cookie=15961522770806084471, name="Lock1") 2024-11-21T17:48:55.082978Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:363:2365], cookie=15961522770806084471) 2024-11-21T17:48:55.083025Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:366:2368], cookie=16533305595006897785, name="Lock2") 2024-11-21T17:48:55.083030Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:366:2368], cookie=16533305595006897785) 2024-11-21T17:48:55.134030Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:55.144660Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:55.397887Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:55.408585Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:55.661920Z node 1 :KESUS_TABLET DEBUG: [720575 ... 7:49:09.438635Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:09.704960Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:09.716319Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:09.978573Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:09.989541Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:10.246002Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:10.257048Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:10.532877Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:10.544595Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:10.838622Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:10.851836Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:11.119914Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:11.131770Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:11.388780Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:11.399718Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:11.668804Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:11.681719Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:11.946600Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:11.965196Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:12.251252Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:12.267754Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:12.532364Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:12.552531Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:12.828402Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:12.844644Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.111130Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.122806Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.412193Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.428528Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.694084Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.705014Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.950839Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.962206Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:14.219305Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:14.231455Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:14.502866Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:14.513920Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:14.769110Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:14.779950Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:15.028446Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2024-11-21T17:49:15.028473Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2024-11-21T17:49:15.028481Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2024-11-21T17:49:15.028504Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2024-11-21T17:49:15.028512Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2024-11-21T17:49:15.028517Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2024-11-21T17:49:15.039509Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2024-11-21T17:49:15.039725Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:329:2342], cookie=17037554721830590198, name="Lock1") 2024-11-21T17:49:15.039745Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:329:2342], cookie=17037554721830590198) 2024-11-21T17:49:15.039815Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:332:2345], cookie=1399110957348465919, name="Lock2") 2024-11-21T17:49:15.039823Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:332:2345], cookie=1399110957348465919) 2024-11-21T17:49:15.039882Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:335:2348], cookie=16916144631321746830) 2024-11-21T17:49:15.039890Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:335:2348], cookie=16916144631321746830) 2024-11-21T17:49:15.045578Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:49:15.045614Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:49:15.045677Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:49:15.045829Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:49:15.100579Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:49:15.100630Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2024-11-21T17:49:15.100639Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2024-11-21T17:49:15.100749Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:375:2378], cookie=14926652658865739899) 2024-11-21T17:49:15.100769Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:375:2378], cookie=14926652658865739899) 2024-11-21T17:49:15.100889Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:382:2384], cookie=4748025956631383826, name="Lock1") 2024-11-21T17:49:15.100901Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:382:2384], cookie=4748025956631383826) 2024-11-21T17:49:15.100961Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:385:2387], cookie=12360035889676294915, name="Lock2") 2024-11-21T17:49:15.100967Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:385:2387], cookie=12360035889676294915) 2024-11-21T17:49:15.213985Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:49:15.214017Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:49:15.219061Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:49:15.219099Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:49:15.230976Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:49:15.231111Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=7675503883842179407, session=0, seqNo=0) 2024-11-21T17:49:15.231152Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:49:15.252258Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=7675503883842179407, session=1) 2024-11-21T17:49:15.252350Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=6015436614446022050, session=0, seqNo=0) 2024-11-21T17:49:15.252386Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:49:15.263983Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=6015436614446022050, session=2) 2024-11-21T17:49:15.264066Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2024-11-21T17:49:15.274968Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=111) 2024-11-21T17:49:15.275132Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:143:2167], cookie=12062250928439620880, name="Sem1", limit=1) 2024-11-21T17:49:15.275192Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2024-11-21T17:49:15.286022Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:143:2167], cookie=12062250928439620880) 2024-11-21T17:49:15.286140Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=333, session=1, semaphore="Sem1" count=100500) 2024-11-21T17:49:15.299233Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=333) 2024-11-21T17:49:15.299345Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=222, session=1, semaphore="Sem1" count=1) 2024-11-21T17:49:15.299393Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2024-11-21T17:49:15.299443Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=333, session=2, semaphore="Sem1" count=1) 2024-11-21T17:49:15.310267Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=222) 2024-11-21T17:49:15.310304Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=333) 2024-11-21T17:49:15.310409Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:153:2177], cookie=12032891549791094321, name="Sem1") 2024-11-21T17:49:15.310427Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:153:2177], cookie=12032891549791094321) 2024-11-21T17:49:15.310483Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:156:2180], cookie=18143628609845920110, name="Sem1") 2024-11-21T17:49:15.310489Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:156:2180], cookie=18143628609845920110) 2024-11-21T17:49:15.310524Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:159:2183], cookie=11160261468375168114, name="Sem1", force=0) 2024-11-21T17:49:15.321330Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:159:2183], cookie=11160261468375168114) 2024-11-21T17:49:15.321469Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:164:2188], cookie=14554824879820389071, name="Sem1", force=1) 2024-11-21T17:49:15.321497Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2024-11-21T17:49:15.332328Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:164:2188], cookie=14554824879820389071) >> DataShardReadIterator::ShouldReadKeyCellVec [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |79.6%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] Test command err: 2024-11-21T17:48:43.021205Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790985800267522:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.021240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001729/r3tmp/tmp3AshZ4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26915, node 1 2024-11-21T17:48:43.074059Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:43.078069Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.078082Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.078084Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.078115Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.097695Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.099510Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:20353, port: 20353 2024-11-21T17:48:43.099804Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:43.119630Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:43.121861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.121886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.122928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.164447Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:43.212776Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****Oubg (13970192) () has now valid token of ldapuser@ldap 2024-11-21T17:48:43.472184Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790983166618860:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.472373Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001729/r3tmp/tmpsio59i/pdisk_1.dat 2024-11-21T17:48:43.482095Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23520, node 2 2024-11-21T17:48:43.493113Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.493127Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.493128Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.493157Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.530099Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.531603Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:6530, port: 6530 2024-11-21T17:48:43.531651Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:43.541493Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:43.574467Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.574496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.575530Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:43.584549Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****t5Ug (1C550DF4) () has now valid token of ldapuser@ldap 2024-11-21T17:48:43.821282Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439790983562048776:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:43.821339Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001729/r3tmp/tmpfEMJTw/pdisk_1.dat 2024-11-21T17:48:43.829348Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15060, node 3 2024-11-21T17:48:43.839854Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:43.839871Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:43.839872Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:43.839906Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:43.901792Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:43.903234Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:12835, port: 12835 2024-11-21T17:48:43.903261Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:43.921153Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2024-11-21T17:48:43.921178Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:12835. Bad search filter 2024-11-21T17:48:43.921301Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****8rfg (F2923072) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:48:43.923666Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:43.923692Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:43.924741Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:44.134460Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439790987803806888:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:44.134485Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001729/r3tmp/tmpOqCjzx/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3572, node 4 2024-11-21T17:48:44.149019Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:44.151597Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:44.151607Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:44.151608Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:44.151638Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:44.193524Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:44.195769Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:8026, port: 8026 2024-11-21T17:48:44.195804Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:44.218957Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:44.234967Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:44.234997Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:44.236059Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:44.264479Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:44.308427Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:44.308599Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:44.308614Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:44.352450Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:44.396469Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:44.396927Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****h3QQ (C75A6FB5) () has now valid token of ldapuser@ldap 2024-11-21T17:48:48.136323Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****h3QQ (C75A6FB5) 2024-11-21T17:48:48.136411Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:8026, port: 8026 2024-11-21T17:48:48.136435Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:48.142884Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:48.184472Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:48.228623Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:48.228851Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:48.228866Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:48.276460Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:48.324404Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:48.324766Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****h3QQ (C75A6FB5) () has now valid token of ldapuser@ldap 2024-11-21T17:48:49.134664Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439790987803806888:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:49.134700Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:48:53.138461Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****h3QQ (C75A6FB5) 2024-11-21T17:48:53.138495Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:8026, port: 8026 2024-11-21T17:48:53.138519Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:53.156245Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:53.200428Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:53.248379Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:48:53.248544Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:48:53.248558Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:53.292433Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:53.336396Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:48:53.336708Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****h3QQ (C75A6FB5) () has now valid token of ldapuser@ldap 2024-11-21T17:48:54.626951Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439791033433172250:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:54.626968Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001729/r3tmp/tmp5OZYnn/pdisk_1.dat 2024-11-21T17:48:54.637313Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26616, node 5 2024-11-21T17:48:54.642870Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:54.642880Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:54.642881Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:54.642903Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:48:54.695473Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:48:54.697162Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21983, port: 21983 2024-11-21T17:48:54.697194Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:54.717356Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:54.727353Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:54.727385Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:54.728501Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:48:54.764396Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:54.808659Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****KduQ (9B884FEA) () has now valid token of ldapuser@ldap 2024-11-21T17:48:58.628694Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****KduQ (9B884FEA) 2024-11-21T17:48:58.628739Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21983, port: 21983 2024-11-21T17:48:58.628768Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:48:58.643186Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:48:58.688384Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:48:58.732611Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****KduQ (9B884FEA) () has now valid token of ldapuser@ldap 2024-11-21T17:48:59.627101Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439791033433172250:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:59.627136Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:49:02.630461Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****KduQ (9B884FEA) 2024-11-21T17:49:02.630494Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21983, port: 21983 2024-11-21T17:49:02.630509Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:49:02.638439Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:49:02.680508Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:49:02.724709Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****KduQ (9B884FEA) () has now valid token of ldapuser@ldap 2024-11-21T17:49:04.926153Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439791073607838795:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:04.926169Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001729/r3tmp/tmppd8WKD/pdisk_1.dat 2024-11-21T17:49:04.934183Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63353, node 6 2024-11-21T17:49:04.944483Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:04.944496Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:04.944498Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:04.944536Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:49:05.021431Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T17:49:05.023469Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:28906, port: 28906 2024-11-21T17:49:05.023492Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:49:05.026295Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:05.026323Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:05.027361Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:05.043132Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:49:05.084447Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:49:05.128673Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2024-11-21T17:49:05.128854Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2024-11-21T17:49:05.128865Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:49:05.172426Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:49:05.220395Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2024-11-21T17:49:05.220782Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****OwPw (FF35215E) () has now valid token of ldapuser@ldap 2024-11-21T17:49:08.928072Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****OwPw (FF35215E) 2024-11-21T17:49:08.928119Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:28906, port: 28906 2024-11-21T17:49:08.928141Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T17:49:08.945403Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:49:08.988475Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T17:49:08.988652Z node 6 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:28906 return no entries 2024-11-21T17:49:08.988887Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****OwPw (FF35215E) () has now permanent error message 'Could not login via LDAP' 2024-11-21T17:49:09.926604Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439791073607838795:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:09.926650Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:49:11.929435Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****OwPw (FF35215E) >> DataShardReadIterator::ShouldReadRangeCellVec |79.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow >> DataShardReadIterator::ShouldReceiveErrorAfterSplit [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted >> DataShardReadIteratorBatchMode::RangeFull [GOOD] >> DataShardReadIteratorBatchMode::RangeToInclusive |79.6%| [TA] $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReverseReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture |79.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |79.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |79.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |79.6%| [TA] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid [GOOD] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest >> TTablesWithReboots::CopyTableAndDropWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:48:02.107853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:02.107871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:02.107874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:02.107878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:02.107886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:02.107888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:02.107894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:02.107955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:02.116703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:02.116724Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:48:02.118704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:02.118778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:02.118805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:02.121042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:02.121123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:02.121217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:02.121394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:02.122081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:02.122372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:02.122383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:02.122396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:02.122403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:02.122409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:02.122449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:48:02.123757Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:48:02.139115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:02.139195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:02.139255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:02.139302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:02.139311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:02.139921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:02.139944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:02.139975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:02.139982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:02.139985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:02.139989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:02.140332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:02.140342Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:02.140345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:02.140627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:02.140635Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:02.140640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:02.140644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:02.141069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:02.141407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:02.141450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:02.141613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:02.141634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:02.141640Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:02.141691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:02.141697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:02.141719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:02.141730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:02.142124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:02.142131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:02.142168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:02.142174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:02.142249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:02.142255Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:02.142265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:02.142269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:02.142275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:02.142280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:02.142285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:02.142289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:02.142299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:02.142304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:02.142309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 111Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:13.569136Z node 235 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:49:13.569140Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:49:13.569145Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T17:49:13.569156Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [235:398:2373] message: TxId: 1004 2024-11-21T17:49:13.569159Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:49:13.569162Z node 235 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:49:13.569165Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:49:13.569181Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:13.569243Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:49:13.569248Z node 235 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1005:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:49:13.569255Z node 235 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:49:13.569258Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:49:13.569262Z node 235 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 240 2024-11-21T17:49:13.569290Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:49:13.569295Z node 235 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:13.569298Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [235:202:2205], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T17:49:13.569302Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [235:202:2205], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T17:49:13.569420Z node 235 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:13.569428Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:13.569431Z node 235 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:49:13.569437Z node 235 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T17:49:13.569441Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:49:13.569490Z node 235 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:13.569497Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:13.569500Z node 235 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:49:13.569503Z node 235 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:49:13.569506Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:49:13.569512Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T17:49:13.570001Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:49:13.570011Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [235:533:2497] 2024-11-21T17:49:13.570561Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:49:13.570571Z node 235 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:13.570616Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:49:13.570635Z node 235 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T17:49:13.570639Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:49:13.570643Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T17:49:13.570654Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [235:398:2373] message: TxId: 1005 2024-11-21T17:49:13.570658Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:49:13.570661Z node 235 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T17:49:13.570665Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T17:49:13.570679Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:49:13.570837Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:49:13.570855Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:49:13.571608Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:49:13.571622Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [235:533:2497] 2024-11-21T17:49:13.571749Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 1009317316874 } TabletId: 72075186233409546 State: 4 2024-11-21T17:49:13.571763Z node 235 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:49:13.572656Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:13.572698Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 430 RawX2: 1009317316962 } TabletId: 72075186233409547 State: 4 2024-11-21T17:49:13.572705Z node 235 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:49:13.572830Z node 235 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:49:13.586103Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:13.586233Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:13.586476Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:13.586491Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:49:13.586508Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 Forgetting tablet 72075186233409546 2024-11-21T17:49:13.588044Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:13.588215Z node 235 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2024-11-21T17:49:13.588757Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:49:13.588829Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:49:13.589299Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:49:13.589312Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:49:13.589409Z node 235 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:49:13.589430Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:13.589436Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:49:13.589453Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:13.590084Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:49:13.590103Z node 235 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:49:13.590221Z node 235 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:49:13.590322Z node 235 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:49:13.590336Z node 235 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> AnalyzeDatashard::AnalyzeTwoTables >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive [GOOD] >> DataShardReadIterator::ShouldNotReadAfterCancel |79.7%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2024-11-21T17:49:06.875818Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:06.893419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:06.895338Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:06.895350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:06.895382Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:06.895875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:06.895900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:06.895924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:06.895935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:06.895945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:06.895955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:06.895964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:06.895974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:06.895985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:06.895994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.896008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:06.896019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:06.900649Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:06.901587Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:06.901650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:06.901661Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:06.901690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.901724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:06.901736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:06.901740Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:06.901747Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:06.901755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:06.901761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:06.901765Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:06.901798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.901805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:06.901811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:06.901814Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:06.901822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:06.901827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:06.901835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:06.901838Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:06.901849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:06.901855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:06.901858Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:06.901868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:06.901874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:06.901878Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:06.901903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T17:49:06.901911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T17:49:06.901918Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T17:49:06.901929Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2024-11-21T17:49:06.901945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:06.901952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:06.901955Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:06.901977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:06.901983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.901987Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.901998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:06.902003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:06.902007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:06.902023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:06.902030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:06.902033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:49:06.902044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... lumn_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=330; 2024-11-21T17:49:16.967110Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=1108; 2024-11-21T17:49:16.967250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=133; 2024-11-21T17:49:16.967432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=81; 2024-11-21T17:49:16.967446Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=188; 2024-11-21T17:49:16.967462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=9; 2024-11-21T17:49:16.967476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T17:49:16.967481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=14; 2024-11-21T17:49:16.967492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=5; 2024-11-21T17:49:16.967501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T17:49:16.967776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=266; 2024-11-21T17:49:16.968017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=225; 2024-11-21T17:49:16.968069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=37; 2024-11-21T17:49:16.968100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=24; 2024-11-21T17:49:16.968106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2024-11-21T17:49:16.968111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T17:49:16.968116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T17:49:16.968127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=7; 2024-11-21T17:49:16.968132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T17:49:16.968145Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=8; 2024-11-21T17:49:16.968151Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2024-11-21T17:49:16.968162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=5; 2024-11-21T17:49:16.968166Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3073; 2024-11-21T17:49:16.968197Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7400888;raw_bytes=7389306; inactive portions=33;blobs=66;rows=1126788;bytes=87164812;raw_bytes=110830786; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T17:49:16.968222Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1948:3942];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T17:49:16.968252Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T17:49:16.968268Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T17:49:16.968290Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1948:3942];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T17:49:16.968298Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T17:49:16.968315Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=202797666304;kff=0.3; 2024-11-21T17:49:16.968320Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:16.968330Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:16.968344Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=8; 2024-11-21T17:49:16.968355Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T17:49:16.968360Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:16.968370Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:16.968376Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:16.968390Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:16.968406Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:16.968657Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:16.968679Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1981:3968];tablet_id=9437184;parent=[1:1948:3942];fline=manager.h:99;event=ask_data;request=request_id=106;1={portions_count=35};; 2024-11-21T17:49:16.969043Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1981:3968];tablet_id=9437184;parent=[1:1948:3942];fline=manager.h:99;event=ask_data;request=request_id=108;1={portions_count=2};; 2024-11-21T17:49:16.969275Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T17:49:16.969312Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T17:49:16.969316Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:49:16.969319Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T17:49:16.969325Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:16.969334Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:16.969342Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=8; 2024-11-21T17:49:16.969350Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T17:49:16.969358Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:16.969366Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:16.969370Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:16.969375Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:16.969386Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:16.971688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=35;path_id=1; 2024-11-21T17:49:16.971826Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=35;path_id=1; 2024-11-21T17:49:16.972006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T17:49:16.972009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; >> TPDiskTest::DeviceHaltTooLong [GOOD] >> TPDiskTest::ChangePDiskKey >> DataShardReadIteratorBatchMode::RangeToInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToNonInclusive >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions >> KqpWorkloadService::TestQueueSizeSimple >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow [GOOD] >> DataShardReadIterator::ShouldReadRangeReverse >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite >> DataShardReadIterator::ShouldReadRangeCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow >> DataShardReadIterator::ShouldReadKeyArrow [GOOD] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn >> TPDiskTest::ChangePDiskKey [GOOD] >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |79.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots >> IncrementalBackup::SimpleBackup >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart [GOOD] |79.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] >> DataShardReadIteratorSysTables::ShouldNotAllowArrow >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[PipeResets] >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[TabletReboots] >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[PipeResets] >> DataShardReadIterator::ShouldReverseReadMultipleRanges [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[TabletReboots] >> DataShardReadIterator::ShouldNotReadAfterCancel [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] >> DataShardReadIteratorBatchMode::RangeToNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::SelectingColumns >> DataShardReadIterator::ShouldReadRangeReverse [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2024-11-21T17:49:03.783871Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:03.798223Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:03.800399Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:03.800416Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:03.800456Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:03.800928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:03.800954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:03.800978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:03.800989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:03.801000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:03.801010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:03.801024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:03.801040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:03.801054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:03.801068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.801086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:03.801096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:03.803956Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:03.805082Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:03.805157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:03.805165Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:03.805188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:03.805217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:03.805225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:03.805228Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:03.805234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:03.805240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:03.805244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:03.805247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:03.805258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:03.805262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:03.805266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:03.805268Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:03.805274Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:03.805277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:03.805282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:03.805285Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:03.805292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:03.805296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:03.805299Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:03.805305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:03.805310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:03.805312Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:03.805334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2024-11-21T17:49:03.805340Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T17:49:03.805347Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T17:49:03.805353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T17:49:03.805365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:03.805370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:03.805372Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:03.805388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:03.805392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.805395Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.805402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:03.805407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:03.805409Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:03.805421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:03.805425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:03.805428Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:49:03.805435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... n_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=544; 2024-11-21T17:49:19.233299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8259; 2024-11-21T17:49:19.233486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=176; 2024-11-21T17:49:19.233707Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=85; 2024-11-21T17:49:19.233720Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=225; 2024-11-21T17:49:19.233738Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=12; 2024-11-21T17:49:19.233752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T17:49:19.233759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=16; 2024-11-21T17:49:19.233776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=9; 2024-11-21T17:49:19.233785Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=4; 2024-11-21T17:49:19.234071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=280; 2024-11-21T17:49:19.234308Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=227; 2024-11-21T17:49:19.234354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=38; 2024-11-21T17:49:19.234389Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=28; 2024-11-21T17:49:19.234395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2024-11-21T17:49:19.234405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T17:49:19.234412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T17:49:19.234425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=9; 2024-11-21T17:49:19.234432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2024-11-21T17:49:19.234446Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=10; 2024-11-21T17:49:19.234452Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T17:49:19.234462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=5; 2024-11-21T17:49:19.234466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=10464; 2024-11-21T17:49:19.234523Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7100088;raw_bytes=7088498; inactive portions=35;blobs=70;rows=1239297;bytes=95895024;raw_bytes=116845255; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T17:49:19.234552Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T17:49:19.234559Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T17:49:19.234573Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T17:49:19.234595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T17:49:19.234601Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T17:49:19.234618Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=202797666304;kff=0.3; 2024-11-21T17:49:19.234623Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:19.234634Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:19.234651Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=9; 2024-11-21T17:49:19.234662Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2024-11-21T17:49:19.234668Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:19.234681Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:19.234685Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:19.234701Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:19.234721Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:19.234949Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:19.234967Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4061:5688];tablet_id=9437184;parent=[1:4006:5640];fline=manager.h:99;event=ask_data;request=request_id=232;1={portions_count=37};; 2024-11-21T17:49:19.235098Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4061:5688];tablet_id=9437184;parent=[1:4006:5640];fline=manager.h:99;event=ask_data;request=request_id=234;1={portions_count=2};; 2024-11-21T17:49:19.235182Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T17:49:19.235312Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T17:49:19.235318Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:49:19.235321Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T17:49:19.235326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:19.235334Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:19.235343Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=9; 2024-11-21T17:49:19.235351Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2024-11-21T17:49:19.235356Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:19.235363Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:19.235368Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:19.235373Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:19.235386Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:19.235549Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=37;path_id=1; 2024-11-21T17:49:19.235783Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=37;path_id=1; 2024-11-21T17:49:19.236098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T17:49:19.236106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2024-11-21T17:49:03.516394Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:03.531706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:03.533632Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:03.533644Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:03.533677Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:03.534166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:03.534192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:03.534222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:03.534238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:03.534254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:03.534264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:03.534274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:03.534285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:03.534296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:03.534305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.534318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:03.534329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:03.537455Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:03.538231Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:03.538283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:03.538291Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:03.538311Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:03.538339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:03.538349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:03.538352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:03.538357Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:03.538364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:03.538381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:03.538384Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:03.538394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:03.538399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:03.538403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:03.538405Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:03.538411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:03.538415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:03.538420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:03.538422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:03.538430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:03.538434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:03.538436Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:03.538442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:03.538446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:03.538448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:03.538469Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T17:49:03.538475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T17:49:03.538480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T17:49:03.538487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T17:49:03.538500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:03.538505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:03.538507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:03.538522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:03.538527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.538529Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.538537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:03.538542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:03.538544Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:03.538556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:03.538560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:03.538562Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:49:03.538570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... lumn_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=515; 2024-11-21T17:49:19.147384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=1325; 2024-11-21T17:49:19.147526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=135; 2024-11-21T17:49:19.147698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=72; 2024-11-21T17:49:19.147709Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=177; 2024-11-21T17:49:19.147724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=8; 2024-11-21T17:49:19.147738Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T17:49:19.147745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=16; 2024-11-21T17:49:19.147757Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=7; 2024-11-21T17:49:19.147765Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=2; 2024-11-21T17:49:19.148025Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=254; 2024-11-21T17:49:19.148274Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=241; 2024-11-21T17:49:19.148321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=39; 2024-11-21T17:49:19.148353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=26; 2024-11-21T17:49:19.148359Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2024-11-21T17:49:19.148366Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2024-11-21T17:49:19.148372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T17:49:19.148384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=8; 2024-11-21T17:49:19.148390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T17:49:19.148403Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=7; 2024-11-21T17:49:19.148408Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2024-11-21T17:49:19.148417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=5; 2024-11-21T17:49:19.148422Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3328; 2024-11-21T17:49:19.148453Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7400888;raw_bytes=7389306; inactive portions=35;blobs=70;rows=1239297;bytes=98153860;raw_bytes=121802583; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T17:49:19.148477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T17:49:19.148483Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T17:49:19.148495Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T17:49:19.148513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T17:49:19.148519Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T17:49:19.148535Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=202797666304;kff=0.3; 2024-11-21T17:49:19.148541Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:19.148550Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:19.148565Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=9; 2024-11-21T17:49:19.148575Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2024-11-21T17:49:19.148580Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:19.148592Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:19.148597Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:19.148610Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:19.148627Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:19.148825Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:19.148840Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4061:5688];tablet_id=9437184;parent=[1:4006:5640];fline=manager.h:99;event=ask_data;request=request_id=232;1={portions_count=37};; 2024-11-21T17:49:19.148979Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4061:5688];tablet_id=9437184;parent=[1:4006:5640];fline=manager.h:99;event=ask_data;request=request_id=234;1={portions_count=2};; 2024-11-21T17:49:19.149050Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T17:49:19.149166Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T17:49:19.149171Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:49:19.149174Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T17:49:19.149179Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:19.149186Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:19.149193Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=9; 2024-11-21T17:49:19.149199Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2024-11-21T17:49:19.149203Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:19.149209Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:19.149213Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:19.149217Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:19.149229Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:19.152914Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=37;path_id=1; 2024-11-21T17:49:19.153173Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=37;path_id=1; 2024-11-21T17:49:19.153487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T17:49:19.153498Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; >> TCdcStreamWithRebootsTests::InitialScan[PipeResets] >> DataShardReadIterator::ShouldReadRangeArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn [GOOD] >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn >> DataShardReadIteratorSysTables::ShouldNotAllowArrow [GOOD] >> ReadIteratorExternalBlobs::ExtBlobs >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks [GOOD] >> DataShardReadIterator::ShouldStopWhenNodeDisconnected >> TCdcStreamWithRebootsTests::InitialScan[PipeResets] [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestZeroQueueSize >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2024-11-21T17:49:03.747137Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:03.761498Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:03.763235Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:03.763247Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:03.763283Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:03.763733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:03.763758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:03.763782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:03.763793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:03.763803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:03.763813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:03.763823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:03.763833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:03.763844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:03.763856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.763869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:03.763880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:03.767259Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:03.768350Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:03.768409Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:03.768417Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:03.768439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:03.768469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:03.768478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:03.768481Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:03.768487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:03.768494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:03.768499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:03.768501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:03.768513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:03.768517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:03.768522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:03.768524Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:03.768530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:03.768534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:03.768539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:03.768542Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:03.768549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:03.768553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:03.768555Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:03.768561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:03.768566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:03.768568Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:03.768590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T17:49:03.768597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T17:49:03.768603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T17:49:03.768610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2024-11-21T17:49:03.768622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:03.768627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:03.768630Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:03.768645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:03.768649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.768651Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.768659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:03.768663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:03.768665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:03.768676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:03.768681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:03.768683Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:49:03.768691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... mn_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=710; 2024-11-21T17:49:20.174761Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=1740; 2024-11-21T17:49:20.174894Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=125; 2024-11-21T17:49:20.175079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=80; 2024-11-21T17:49:20.175094Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=191; 2024-11-21T17:49:20.175110Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=9; 2024-11-21T17:49:20.175124Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T17:49:20.175130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=15; 2024-11-21T17:49:20.175143Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=8; 2024-11-21T17:49:20.175150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T17:49:20.175475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=317; 2024-11-21T17:49:20.175716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=229; 2024-11-21T17:49:20.175763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=38; 2024-11-21T17:49:20.175795Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=27; 2024-11-21T17:49:20.175801Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2024-11-21T17:49:20.175806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T17:49:20.175812Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T17:49:20.175823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=7; 2024-11-21T17:49:20.175829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T17:49:20.175842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=8; 2024-11-21T17:49:20.175847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2024-11-21T17:49:20.175857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=5; 2024-11-21T17:49:20.175863Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3787; 2024-11-21T17:49:20.175895Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=3;blobs=6;rows=75200;bytes=7465172;raw_bytes=7453400; inactive portions=51;blobs=102;rows=1251798;bytes=102220660;raw_bytes=124064310; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T17:49:20.175919Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4157:5792];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T17:49:20.175925Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T17:49:20.175935Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T17:49:20.175953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4157:5792];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T17:49:20.175958Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T17:49:20.175975Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=202797666304;kff=0.3; 2024-11-21T17:49:20.175980Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:20.175989Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:20.176005Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=9; 2024-11-21T17:49:20.176014Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2024-11-21T17:49:20.176020Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:20.176029Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:20.176034Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:20.176046Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:20.176062Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:20.176258Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:20.176277Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4212:5840];tablet_id=9437184;parent=[1:4157:5792];fline=manager.h:99;event=ask_data;request=request_id=262;1={portions_count=54};; 2024-11-21T17:49:20.176473Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4212:5840];tablet_id=9437184;parent=[1:4157:5792];fline=manager.h:99;event=ask_data;request=request_id=264;1={portions_count=3};; 2024-11-21T17:49:20.176681Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T17:49:20.177991Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T17:49:20.178003Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:49:20.178007Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T17:49:20.178013Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:20.178023Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:20.178033Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=9; 2024-11-21T17:49:20.178042Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2024-11-21T17:49:20.178048Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:20.178056Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:20.178060Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:20.178066Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:20.178080Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:20.178185Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=54;path_id=1; 2024-11-21T17:49:20.178466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=54;path_id=1; 2024-11-21T17:49:20.178858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T17:49:20.178868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse [GOOD] >> DataShardReadIterator::ShouldForbidDuplicatedReadId >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2024-11-21T17:48:46.922891Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:46.922912Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:46.925424Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:46.925450Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:46.946686Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:46.946847Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=17642024256222591872, session=0, seqNo=0) 2024-11-21T17:48:46.946880Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:46.957527Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=17642024256222591872, session=1) 2024-11-21T17:48:46.957689Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=1) 2024-11-21T17:48:46.957730Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:46.957755Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:46.968361Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2024-11-21T17:48:46.968424Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:46.979172Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2024-11-21T17:48:46.979317Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=11896013040355375422, name="Lock1") 2024-11-21T17:48:46.979348Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=11896013040355375422) 2024-11-21T17:48:47.164377Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:48:47.164406Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:48:47.166807Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:48:47.166862Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:48:47.177871Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:48:47.178006Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=4864920381804270667, session=0, seqNo=0) 2024-11-21T17:48:47.178032Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:48:47.198853Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=4864920381804270667, session=1) 2024-11-21T17:48:47.198920Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=7829213087293130528, session=0, seqNo=0) 2024-11-21T17:48:47.198944Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:48:47.209588Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=7829213087293130528, session=2) 2024-11-21T17:48:47.209734Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T17:48:47.209760Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T17:48:47.209771Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T17:48:47.220557Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2024-11-21T17:48:47.220646Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=112, session=1, semaphore="Lock2" count=1) 2024-11-21T17:48:47.220675Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2024-11-21T17:48:47.220687Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2024-11-21T17:48:47.231475Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=112) 2024-11-21T17:48:47.231573Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2024-11-21T17:48:47.231633Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2024-11-21T17:48:47.242347Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=222) 2024-11-21T17:48:47.242385Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=223) 2024-11-21T17:48:47.242454Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=333, session=2, semaphore="Lock1" count=1) 2024-11-21T17:48:47.242519Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2024-11-21T17:48:47.253372Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=333) 2024-11-21T17:48:47.253401Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=334) 2024-11-21T17:48:47.548097Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.558692Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:47.802305Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:47.813032Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.066726Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.077415Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.320944Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.331671Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.595585Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.606333Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:48.839992Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:48.850774Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.094224Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.105114Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.348520Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.359190Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.602904Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.613746Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:49.888908Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:49.899911Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.164144Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.175149Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.428881Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.439700Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.693581Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.704417Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:50.958254Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:50.969077Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.254087Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.265030Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.529107Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.539894Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:51.794768Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:51.805685Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.060268Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.071191Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.325191Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.336132Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.610970Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.621749Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:52.885587Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:52.896355Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.149939Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.160754Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.414408Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.425292Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.679534Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.690438Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:53.954468Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:53.965224Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.219016Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.229754Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.493527Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.504328Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:54.758030Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:54.768702Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:55.022103Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:48:55.032859Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:48:55.276634Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=2) 2024-11-21T17:48:55.276663Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 2 "Lock2" waiter link 2024-11-21T17:48:55.287362Z node 2 :KESUS_TABLET DEBUG: [ ... 11-21T17:49:13.410109Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.687100Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.699215Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:13.955451Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:13.966443Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:14.222511Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:14.237264Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:14.498660Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:14.509934Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:14.765063Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:14.776110Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:15.067519Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:15.080817Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:15.351915Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:15.362976Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:15.619406Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:15.632895Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:15.886194Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:15.901098Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:16.175265Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:16.186625Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:16.473547Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:16.486694Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:16.745731Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:16.761509Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:17.009728Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:17.024673Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:17.282639Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:17.293703Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:17.550086Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:17.562355Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:17.843595Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:17.859425Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:18.114890Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:18.125823Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:18.403437Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:18.417471Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:18.653024Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:18.664265Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:18.913460Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:18.929120Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:19.216826Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:19.228638Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:19.492174Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:19.505258Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:19.762597Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:19.773646Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:20.032658Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:20.044741Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:20.302801Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T17:49:20.314310Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T17:49:20.672722Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2024-11-21T17:49:20.672757Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2024-11-21T17:49:20.686364Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2024-11-21T17:49:20.708584Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:534:2530], cookie=12194067013904993491) 2024-11-21T17:49:20.708627Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:534:2530], cookie=12194067013904993491) 2024-11-21T17:49:20.708702Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:537:2533], cookie=15661326769249014215) 2024-11-21T17:49:20.708709Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:537:2533], cookie=15661326769249014215) 2024-11-21T17:49:20.708770Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:540:2536], cookie=11033775266949876694, name="Lock1") 2024-11-21T17:49:20.708781Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:540:2536], cookie=11033775266949876694) 2024-11-21T17:49:20.708836Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:543:2539], cookie=10785006145871933426, name="Lock1") 2024-11-21T17:49:20.708841Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:543:2539], cookie=10785006145871933426) 2024-11-21T17:49:20.898016Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T17:49:20.898053Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T17:49:20.902805Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T17:49:20.902858Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T17:49:20.920638Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T17:49:20.920803Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=11801990734093415924, session=0, seqNo=0) 2024-11-21T17:49:20.920840Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T17:49:20.942898Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=11801990734093415924, session=1) 2024-11-21T17:49:20.943014Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=7058008977647366524, session=0, seqNo=0) 2024-11-21T17:49:20.943061Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T17:49:20.954653Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=7058008977647366524, session=2) 2024-11-21T17:49:20.954767Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=11254814160651989726, session=0, seqNo=0) 2024-11-21T17:49:20.954802Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2024-11-21T17:49:20.965742Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=11254814160651989726, session=3) 2024-11-21T17:49:20.965905Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:143:2167], cookie=3957035180182772214, name="Sem1", limit=3) 2024-11-21T17:49:20.965943Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2024-11-21T17:49:20.976909Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:143:2167], cookie=3957035180182772214) 2024-11-21T17:49:20.977031Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=111, session=1, semaphore="Sem1" count=2) 2024-11-21T17:49:20.977079Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2024-11-21T17:49:20.977120Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=222, session=2, semaphore="Sem1" count=2) 2024-11-21T17:49:20.977165Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=333, session=3, semaphore="Sem1" count=1) 2024-11-21T17:49:20.988520Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=111) 2024-11-21T17:49:20.988560Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=222) 2024-11-21T17:49:20.988567Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=333) 2024-11-21T17:49:20.988727Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:152:2176], cookie=13088950257630353484, name="Sem1") 2024-11-21T17:49:20.988756Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:152:2176], cookie=13088950257630353484) 2024-11-21T17:49:20.988823Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:155:2179], cookie=8224807639244965784, name="Sem1") 2024-11-21T17:49:20.988832Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:155:2179], cookie=8224807639244965784) 2024-11-21T17:49:20.988865Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:130:2156], cookie=444, name="Sem1") 2024-11-21T17:49:20.988895Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2024-11-21T17:49:20.988907Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2024-11-21T17:49:20.988917Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2024-11-21T17:49:20.999963Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:130:2156], cookie=444) 2024-11-21T17:49:21.000155Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:160:2184], cookie=217962151013335607, name="Sem1") 2024-11-21T17:49:21.000179Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:160:2184], cookie=217962151013335607) 2024-11-21T17:49:21.000257Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:163:2187], cookie=13541391433506116565, name="Sem1") 2024-11-21T17:49:21.000266Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:163:2187], cookie=13541391433506116565) |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest >> TConsistentOpsWithReboots::DropIndexedTableAndForceDropSimultaneously [GOOD] >> DataShardReadIteratorBatchMode::SelectingColumns [GOOD] >> DataShardReadIteratorBatchMode::ShouldHandleReadAck ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::InitialScan[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:20.434869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:20.434890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:20.434896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:20.434901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:20.434916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:20.434921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:20.434931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:20.435016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:20.448316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:20.448334Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:20.450538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:20.450634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:20.450677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:20.457334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:20.457431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:20.457565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:20.457743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:20.467361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:20.467674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:20.467685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:20.467698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:20.467705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:20.467711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:20.467758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:20.469280Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:20.486184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:20.486269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:20.486328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:20.486375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:20.486382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:20.489286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:20.489323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:20.489379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:20.489392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:20.489397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:20.489403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:20.489893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:20.489905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:20.489910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:20.490259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:20.490268Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:20.490274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:20.490282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:20.490905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:20.491285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:20.491334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:20.491515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:20.491540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:20.491547Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:20.491605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:20.491614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:20.491643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:20.491657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:20.492053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:20.492064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:20.492104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:20.492109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:20.492191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:20.492196Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:20.492208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:20.492213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:20.492220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:20.492225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:20.492251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:20.492255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:20.492266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:20.492272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:20.492276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... yToNotify, TxId: 281474976715657, ready parts: 1/3, is published: false 2024-11-21T17:49:21.077613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T17:49:21.077620Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 ProgressState at tablet: 72057594046678944 2024-11-21T17:49:21.077687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046678944 2024-11-21T17:49:21.077692Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:49:21.077701Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2024-11-21T17:49:21.077704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2024-11-21T17:49:21.077707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2024-11-21T17:49:21.077752Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2024-11-21T17:49:21.077763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2024-11-21T17:49:21.077767Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976715657 2024-11-21T17:49:21.077772Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T17:49:21.077778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:49:21.077865Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2024-11-21T17:49:21.077872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976715657 2024-11-21T17:49:21.077875Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976715657 2024-11-21T17:49:21.077879Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976715657, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:49:21.077882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T17:49:21.077889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true FAKE_COORDINATOR: Erasing txId 281474976715657 2024-11-21T17:49:21.078320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 200 } } 2024-11-21T17:49:21.078330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2024-11-21T17:49:21.078347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 200 } } 2024-11-21T17:49:21.078362Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 281474976715657 Step: 5000005 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 200 } } 2024-11-21T17:49:21.078761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 8589936908 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T17:49:21.078775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409546, partId: 1 2024-11-21T17:49:21.078792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 8589936908 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T17:49:21.078798Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:49:21.078805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 328 RawX2: 8589936908 } Origin: 72075186233409546 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T17:49:21.078815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:21.078818Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T17:49:21.078823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:21.078828Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2024-11-21T17:49:21.079101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2024-11-21T17:49:21.079500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976715657 2024-11-21T17:49:21.079531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T17:49:21.079851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T17:49:21.079913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72057594046678944 2024-11-21T17:49:21.079920Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976715657:1 ProgressState 2024-11-21T17:49:21.079932Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2024-11-21T17:49:21.079936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2024-11-21T17:49:21.079946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2024-11-21T17:49:21.079949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2024-11-21T17:49:21.079954Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:49:21.079956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T17:49:21.079964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:49:21.079968Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2024-11-21T17:49:21.079971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2024-11-21T17:49:21.079984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:49:21.079988Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2024-11-21T17:49:21.079991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2024-11-21T17:49:21.079994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 TestWaitNotification: OK eventTxId 1003 2024-11-21T17:49:21.080593Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:21.080639Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 56us result status StatusSuccess 2024-11-21T17:49:21.080765Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 2 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 2 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 ScanProgress { ShardsTotal: 1 ShardsCompleted: 1 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight [GOOD] >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec [GOOD] >> IncrementalBackup::SimpleBackup [GOOD] |79.7%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 [GOOD] >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn [GOOD] >> TSchemeShardCheckProposeSize::CopyTable >> DataShardReadIterator::ShouldReadMultipleKeys >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool >> TSchemeShardTest::Boot |79.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart [GOOD] Test command err: 2024-11-21T17:42:23.093729Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:42:23.094875Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 684318820337267614 MagicNextLogChunkReference: 11103513898647218056 MagicLogChunk: 3873115236404548078 MagicDataChunk: 6415498467966539896 MagicSysLogChunk: 3099404382979826036 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732210943059239 (2024-11-21T17:42:23.059239Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:42:23.096336Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2024-11-21T17:42:23.102040Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2024-11-21T17:42:23.102228Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:42:23.102432Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.102582Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 0 endOfSplice# 0},] PDiskId# 1 2024-11-21T17:42:23.102659Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 2024-11-21T17:42:23.102785Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2024-11-21T17:42:23.103194Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 4 vDiskId# [1:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 3 PDiskId# 1 2024-11-21T17:42:23.103397Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 5 vDiskId# [2:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 4 PDiskId# 1 2024-11-21T17:42:23.103600Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 6 vDiskId# [3:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 5 PDiskId# 1 2024-11-21T17:42:23.103793Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 7 vDiskId# [4:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 6 PDiskId# 1 2024-11-21T17:42:23.104065Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 8 vDiskId# [5:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 7 PDiskId# 1 2024-11-21T17:42:23.106307Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 9 vDiskId# [6:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 8 PDiskId# 1 2024-11-21T17:42:23.106571Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 10 vDiskId# [7:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 9 PDiskId# 1 2024-11-21T17:42:23.106788Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 11 vDiskId# [8:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 10 PDiskId# 1 2024-11-21T17:42:23.106981Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 12 vDiskId# [9:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 11 PDiskId# 1 2024-11-21T17:42:23.107270Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 13 vDiskId# [a:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 12 PDiskId# 1 2024-11-21T17:42:23.107719Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 14 vDiskId# [b:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 13 PDiskId# 1 2024-11-21T17:42:23.108022Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 15 vDiskId# [c:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 14 PDiskId# 1 2024-11-21T17:42:23.108360Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 16 vDiskId# [d:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 15 PDiskId# 1 2024-11-21T17:42:23.108776Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 17 vDiskId# [e:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 16 PDiskId# 1 2024-11-21T17:42:23.109091Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 18 vDiskId# [f:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 17 PDiskId# 1 2024-11-21T17:42:23.109398Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 19 vDiskId# [10:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 18 PDiskId# 1 2024-11-21T17:42:23.109840Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 20 vDiskId# [11:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 19 PDiskId# 1 2024-11-21T17:42:23.110289Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 21 vDiskId# [12:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 20 PDiskId# 1 2024-11-21T17:42:23.110640Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 22 vDiskId# [13:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 21 PDiskId# 1 2024-11-21T17:42:23.110991Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 23 vDiskId# [14:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 22 PDiskId# 1 2024-11-21T17:42:23.111364Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 24 vDiskId# [15:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 23 PDiskId# 1 2024-11-21T17:42:23.111703Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 25 vDiskId# [16:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 24 PDiskId# 1 2024-11-21T17:42:23.112124Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 26 vDiskId# [17:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 25 PDiskId# 1 2024-11-21T17:42:23.112437Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 27 vDiskId# [18:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 26 PDiskId# 1 2024-11-21T17:42:23.112750Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 28 vDiskId# [19:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 27 PDiskId# 1 2024-11-21T17:42:23.113094Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 29 vDiskId# [1a:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 28 PDiskId# 1 2024-11-21T17:42:23.113423Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 30 vDiskId# [1b:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 29 PDiskId# 1 2024-11-21T17:42:23.113899Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 31 vDiskId# [1c:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 30 PDiskId# 1 2024-11-21T17:42:23.114209Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 32 vDiskId# [1d:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 31 PDiskId# 1 2024-11-21T17:42:23.117028Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 33 vDiskId# [1e:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 32 PDiskId# 1 2024-11-21T17:42:23.117358Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 34 vDiskId# [1f:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 33 PDiskId# 1 2024-11-21T17:42:23.117759Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 35 vDiskId# [20:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 34 PDiskId# 1 2024-11-21T17:42:23.118170Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 36 vDiskId# [21:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 35 PDiskId# 1 2024-11-21T17:42:23.119360Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 37 vDiskId# [22:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 36 PDiskId# 1 2024-11-21T17:42:23.119835Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 38 vDiskId# [23:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 37 PDiskId# 1 2024-11-21T17:42:23.120630Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 39 vDiskId# [24:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 38 PDiskId# 1 2024-11-21T17:42:23.120978Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 40 vDiskId# [25:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 39 PDiskId# 1 2024-11-21T17:42:23.121470Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 41 vDiskId# [26:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 40 PDiskId# 1 2024-11-21T17:42:23.121733Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 42 vDiskId# [27:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 41 PDiskId# 1 2024-11-21T17:42:23.122071Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 43 vDiskId# [28:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 42 PDiskId# 1 2024-11-21T17:42:23.122560Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 44 vDiskId# [29:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 43 PDiskId# 1 2024-11-21T17:42:23.123169Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 45 vDiskId# [2a:_:0:0:0] FirstNonceToKeep# 1066528 CutLogId# [0:0:0] ownerRound# 44 PDiskId# 1 2024-11-21T17:42:23.123585Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1925} New owner is created ownerId# 46 vDiskId# [2b:_:0:0:0] FirstNonceToKeep# 1 ... wn OwnerInfo# {{OwnerId: 3 VDiskId: [16d5a:_:0:0:0] ChunkWrites: 69 ChunkReads: 70 LogWrites: 71 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1255956 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 104}} Owned chunkIds: {2}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {3..982} PDiskId# 1 2024-11-21T17:49:18.495305Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560090926 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495375Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560091694 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495390Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560092462 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495401Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560093230 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495410Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560093998 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495418Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560094766 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495427Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560095534 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495437Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560096302 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495446Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560097070 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495454Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560097838 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495463Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560098606 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495470Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560099374 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495477Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560100142 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495485Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560100910 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495506Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560101678 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495514Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560102446 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495526Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560103214 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495534Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560103982 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495541Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560104750 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495553Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560105518 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495560Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560106286 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495568Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560107054 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495575Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560107822 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495581Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560108590 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495589Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560109358 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495596Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560110126 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495603Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560110894 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495609Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560111662 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495616Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560112430 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495625Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560113198 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495632Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560113966 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495639Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560114734 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495646Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560115502 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495654Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560116270 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495661Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560117038 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495668Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560117806 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495675Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560118574 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495684Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560119342 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495691Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560120110 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495697Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560120878 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495705Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560121646 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495712Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560122414 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495720Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560123182 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495727Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560123950 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495734Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560124718 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495741Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560125486 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495747Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560126254 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495754Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560127022 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495761Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560127790 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495768Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560128558 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495774Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560129326 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495783Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560130094 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495790Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560130862 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495797Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560131630 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495805Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560132398 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495811Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560133166 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495818Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560133934 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495825Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560134702 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495834Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560135470 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495841Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560136238 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.495849Z node 1631 :BS_PDISK WARN: PDiskId# 1 ReqId# 2560137006 reason# One of ChunkReadPart failed due to unknown reason 2024-11-21T17:49:18.498379Z node 1631 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2557} OnDriveStartup Path# "" PDiskId# 1 2024-11-21T17:49:18.498714Z node 1631 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:534} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 16792808611145314282 MagicNextLogChunkReference: 6287920100580638749 MagicLogChunk: 11152223312760592784 MagicDataChunk: 2710327520665470697 MagicSysLogChunk: 11615401720854526321 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1732211358255865 (2024-11-21T17:49:18.255865Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2024-11-21T17:49:18.501487Z node 1631 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:251} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1939426 NonceLog# 1255956 NonceData# 2076807} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [16d5a:4294967295:0:0:0]} PDiskId# 1 2024-11-21T17:49:18.504021Z node 1631 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:699} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2024-11-21T17:49:18.504103Z node 1631 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:798} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 19 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 77824} PDiskId# 1 2024-11-21T17:49:18.504127Z node 1631 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1142} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 77824} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2024-11-21T17:49:18.504802Z node 1631 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo startup LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-175 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:49:18.505527Z node 1631 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:157} PrintLogChunksInfo before log cut LogChunks# [{chunkIdx# 1 users# 1 endOfSplice# 0 {owner# 3 lsn# 1-175 firstLsnToKeep# 0},},] PDiskId# 1 2024-11-21T17:49:18.505621Z node 1631 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1506} PDisk have successfully started PDiskId# 1 all chunk reads are received all chunk writes are received all log writes are received >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> DataShardReadIterator::ShouldForbidDuplicatedReadId [GOOD] >> TSchemeShardTest::AlterTableKeyColumns >> TSchemeShardCheckProposeSize::CopyTables >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite [GOOD] >> TSchemeShardTest::Boot [GOOD] >> DataShardReadIterator::ShouldFailUknownColumns >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite >> TSchemeShardTest::ConsistentCopyTable >> DataShardReadIteratorBatchMode::ShouldHandleReadAck [GOOD] >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds [GOOD] >> TSchemeShardTest::AlterTableFollowers >> DataShardReadIterator::ShouldReadRangeLeftInclusive >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow [GOOD] >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> TSchemeShardTest::ConsistentCopyTableAwait >> TSchemeShardTest::AlterTableFollowers [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] >> TSchemeShardTest::CreateAlterTableWithCodec >> TSchemeShardTest::MkRmDir >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 >> DataShardReadIterator::ShouldFailUknownColumns [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] >> DataShardReadIterator::ShouldReadRangeLeftInclusive [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite [GOOD] >> DataShardReadIterator::ShouldStopWhenNodeDisconnected [GOOD] |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit |79.7%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite >> ResourcePoolsDdl::TestCreateResourcePool >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 [GOOD] >> DataShardReadIterator::ShouldFailWrongSchema >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> RetryPolicy::TWriteSession_SeqNoShift >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 >> TSchemeShardTest::DependentOps >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> DataShardReadIterator::ShouldFailWrongSchema [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite [GOOD] >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[PipeResets] [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[PipeResets] [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix1 >> TSchemeShardTest::AlterTableSplitSchema >> TSchemeShardTest::PathName >> TSchemeShardTest::CopyTableTwiceSimultaneously >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 [GOOD] >> TSchemeShardTest::DependentOps [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies >> DataShardReadIterator::ShouldReadRangeOneByOne >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix1 [GOOD] >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::PathName_SetLocale >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> TSchemeShardTest::AlterTableSettings >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> DataShardReadIterator::ShouldReadNonExistingKey >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix2 >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> TSchemeShardTest::PathName_SetLocale [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::AlterTableSettings [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool >> TSchemeShardTest::ConsistentCopyTableToDeletedPath >> TSchemeShardTest::ModifyACL >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable >> TSchemeShardTest::DropBlockStoreVolume >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig >> TSchemeShardTest::CopyIndexedTable >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TSchemeShardTest::ParallelCreateSameTable >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] |79.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |79.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |79.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |79.8%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |79.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithResolvedTimestamps[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:19.067643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:19.067666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:19.067671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:19.067676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:19.067692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:19.067697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:19.067706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:19.067787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:19.079882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:19.079905Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:19.082605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:19.082711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:19.082746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:19.108667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:19.108794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:19.108908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:19.115448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:19.128512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:19.128893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:19.128901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:19.128914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:19.128921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:19.128928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:19.128973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:19.137843Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:19.167626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:19.167726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.167795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:19.167845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:19.167852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.172779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:19.172830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:19.172908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.172921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:19.172926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:19.172931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:19.180719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.180751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:19.180758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:19.188740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.188779Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.188787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:19.188799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:19.189499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:19.196808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:19.196912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:19.197172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:19.197219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:19.197228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:19.197307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:19.197316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:19.197357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:19.197371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:19.200434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:19.200469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:19.200545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:19.200552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:19.200661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.200671Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:19.200690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:19.200695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:19.200704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:19.200711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:19.200717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:19.200721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:19.200751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:19.200758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:19.200761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ssState at tablet: 72057594046678944 2024-11-21T17:49:29.143638Z node 34 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:29.143648Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:29.143655Z node 34 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:29.143659Z node 34 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T17:49:29.143664Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:49:29.143801Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 205 } } 2024-11-21T17:49:29.143807Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T17:49:29.143818Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 205 } } 2024-11-21T17:49:29.143849Z node 34 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 205 } } 2024-11-21T17:49:29.143952Z node 34 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:29.143961Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:29.143965Z node 34 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:29.143969Z node 34 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:49:29.143973Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:49:29.144063Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 146028890378 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:49:29.144069Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T17:49:29.144080Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 146028890378 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:49:29.144086Z node 34 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:49:29.144093Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 146028890378 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:49:29.144105Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:29.144109Z node 34 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T17:49:29.144112Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:29.144116Z node 34 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T17:49:29.144208Z node 34 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:29.144222Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:29.144250Z node 34 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:29.144256Z node 34 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T17:49:29.144260Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T17:49:29.144270Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T17:49:29.145531Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:29.146308Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T17:49:29.146355Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:29.146372Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T17:49:29.146469Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T17:49:29.146477Z node 34 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T17:49:29.146488Z node 34 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T17:49:29.146492Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T17:49:29.146498Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T17:49:29.146509Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [34:396:2370] message: TxId: 1003 2024-11-21T17:49:29.146515Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T17:49:29.146522Z node 34 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:49:29.146527Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:49:29.146537Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:49:29.146542Z node 34 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T17:49:29.146545Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T17:49:29.146574Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:49:29.146580Z node 34 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T17:49:29.146583Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T17:49:29.146592Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T17:49:29.146696Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:29.147751Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:49:29.147761Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [34:599:2530] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:49:29.148449Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:29.148493Z node 34 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 52us result status StatusSuccess 2024-11-21T17:49:29.148561Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 1000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers >> TSchemeShardTest::BlockStoreVolumeLimits >> TSchemeShardTest::CreateWithIntermediateDirs >> TSchemeShardTest::MultipleColumnFamilies >> TSchemeShardTest::CopyTable >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::CopyTableForBackup >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> TSchemeShardTest::CopyTableAndConcurrentChanges >> TSchemeShardTest::DocumentApiVersion >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> TSchemeShardTest::CopyTableAndConcurrentSplit >> TSchemeShardTest::ParallelModifying >> TSchemeShardTest::DisablePublicationsOfDropping_Dir >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> TSchemeShardTest::CopyTableAndConcurrentMerge >> TSchemeShardTest::DisablePublicationsOfDropping_Table >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> TSchemeShardTest::ConsistentCopyTablesForBackup >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable >> TSchemeShardTest::CopyLockedTableForBackup >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> TSchemeShardTest::ConfigColumnFamily >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleBackup [GOOD] Test command err: 2024-11-21T17:49:18.831412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:18.831988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:18.832021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0018a8/r3tmp/tmpEX8tp7/pdisk_1.dat 2024-11-21T17:49:18.987636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2024-11-21T17:49:18.987748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:18.987812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:49:18.987856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:18.987877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:18.988138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:18.988156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:49:18.988212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:18.991440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:49:18.991498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:18.991506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:18.991819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:18.991827Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:49:18.991832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:18.991888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:18.991893Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:18.991899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046644480 2024-11-21T17:49:18.991906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:18.992645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:18.992768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:18.992811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:49:18.993062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:18.993070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T17:49:18.993074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:19.008739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:49:19.008781Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:19.057266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:19.057305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:19.068751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:19.198010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:19.198081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:49:19.198110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T17:49:19.198180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:19.198191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T17:49:19.198235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:49:19.198247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:49:19.198543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:19.198555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:49:19.198600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:19.198606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2024-11-21T17:49:19.198682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:19.198693Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2024-11-21T17:49:19.198708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:19.198713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:19.198719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:19.198725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:19.198729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:19.198733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:19.198744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:49:19.198750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:49:19.198754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T17:49:19.199255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T17:49:19.199283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T17:49:19.199288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2024-11-21T17:49:19.199294Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:49:19.199300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:49:19.199323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T17:49:19.199328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2024-11-21T17:49:19.199534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2024-11-21T17:49:19.200523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:49:19.200594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:19.200609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Table, opId: 281474976715657:0, schema: Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:19.200712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T17:49:19.200728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T17:49:19.200735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T17:49:19.200756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] wa ... X_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2485] message: TxId: 281474976715664 2024-11-21T17:49:20.895747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715664 ready parts: 4/4 2024-11-21T17:49:20.895752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 2024-11-21T17:49:20.895754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715664:0 2024-11-21T17:49:20.895761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 7] was 3 2024-11-21T17:49:20.895764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:1 2024-11-21T17:49:20.895766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715664:1 2024-11-21T17:49:20.895776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T17:49:20.895778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:2 2024-11-21T17:49:20.895780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715664:2 2024-11-21T17:49:20.895785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 9] was 3 2024-11-21T17:49:20.895787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:3 2024-11-21T17:49:20.895789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715664:3 2024-11-21T17:49:20.895794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 8] was 4 2024-11-21T17:49:21.131580Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupImpl TableId: [72057594046644480:9:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:49:21.131662Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T17:49:21.131686Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:1064:2820] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2024-11-21T17:49:21.131691Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:1064:2820] Handshake with writer: sender# [1:1066:2820] 2024-11-21T17:49:21.131711Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 0 Data: 71b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 1 Data: 71b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 71b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 4 Data: 57b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T17:49:21.131746Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 71 },{ Order: 1 BodySize: 71 },{ Order: 2 BodySize: 71 },{ Order: 3 BodySize: 57 },{ Order: 4 BodySize: 57 }] } 2024-11-21T17:49:21.131786Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:9:1][72075186224037891][1:1160:2820] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T17:49:21.131797Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037891 } 2024-11-21T17:49:21.131817Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:9:1][72075186224037891][1:1160:2820] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 1732211360107667 Step: 2000 TxId: 18446744073709551615 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 1 Group: 1732211360107667 Step: 2000 TxId: 18446744073709551615 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 2 Group: 1732211360107667 Step: 2000 TxId: 18446744073709551615 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 3 Group: 1732211360130543 Step: 2000 TxId: 18446744073709551615 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 4 Group: 1732211360130543 Step: 2000 TxId: 18446744073709551615 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2024-11-21T17:49:21.131929Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [1:1161:2893], serverId# [1:1162:2894], sessionId# [0:0:0] 2024-11-21T17:49:21.145203Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:9:1][72075186224037891][1:1160:2820] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T17:49:21.145247Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037891 } 2024-11-21T17:49:21.145265Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 9][1:1066:2820] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0,1,2,3,4] } 2024-11-21T17:49:21.145289Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:1064:2820] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T17:49:21.145301Z node 1 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][[1:904:2712]][0][1:1065:2820] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T17:49:21.145331Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T17:49:21.145341Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2024-11-21T17:49:21.145380Z node 1 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T17:49:21.145394Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 3 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2024-11-21T17:49:21.145401Z node 1 :PERSQUEUE DEBUG: waiting read cookie 3 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2024-11-21T17:49:21.373702Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xfhjdb8km1vtmaqfay1fj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWUyMjYxOS1mNDM5NjVmLTI3NDRmNzk1LWVjYTE2YTgy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 2 } items { null_flag_value: NULL_VALUE } }, { items { uint32_value: 5 } items { uint32_value: 200 } }, { items { uint32_value: 6 } items { null_flag_value: NULL_VALUE } } 2024-11-21T17:49:21.387167Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xfhk0fn1j695mqm6cknre, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTBjY2U5MzMtOGJjYTY3NzUtNjUxNzU1ZTYtOTczMWNlMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:21.387341Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:21.400625Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:21.400692Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:21.421283Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xfhks8yt815az8bde0ens, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWEzODVlY2ItYzUyNTY4N2UtYmVmYjg2YzAtYWRjODEzMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:21.421448Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:21.436693Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:21.436764Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:21.660509Z node 1 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:49:21.660564Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] waiting read cookie 3 partition 0 read timeout for __OFFLOAD_ACTOR__ offset 5 2024-11-21T17:49:21.660613Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:49:21.660712Z node 1 :CONTINUOUS_BACKUP DEBUG: [LocalPartitionReader][[1:904:2712]][0][1:1065:2820] Handle NKikimrClient.TResponse Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 5 SizeLag: 0 RealReadOffset: 5 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 18 LastOffset: 4 EndOffset: 5 } } 2024-11-21T17:49:21.660748Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2024-11-21T17:49:21.660757Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/continuousBackupImpl/streamImpl' partition 0 2024-11-21T17:49:21.660790Z node 1 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T17:49:21.660801Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 4 Topic 'Table/continuousBackupImpl/streamImpl' partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 endOffset 5 max time lag 0ms effective offset 5 2024-11-21T17:49:21.660808Z node 1 :PERSQUEUE DEBUG: waiting read cookie 4 partition 0 user __OFFLOAD_ACTOR__ offset 5 count 4294967295 size 1048576 timeout 1000 2024-11-21T17:49:22.018070Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd7xfj5y32exn6xeawk4q2h9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFjMmUyMmUtZjMwOWVmOWQtZTk4YmJkMTUtZjU4ZGI0ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 2 } items { null_flag_value: NULL_VALUE } }, { items { uint32_value: 5 } items { uint32_value: 200 } }, { items { uint32_value: 6 } items { null_flag_value: NULL_VALUE } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2024-11-21T17:49:06.858234Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:06.875227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:06.878038Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:06.878057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:06.878122Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:06.878889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:06.878932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:06.878971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:06.878988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:06.879006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:06.879022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:06.879039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:06.879057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:06.879074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:06.879090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.879110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:06.879126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:06.883542Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:06.884614Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:06.884689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:06.884700Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:06.884729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.884767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:06.884781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:06.884787Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:06.884796Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:06.884805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:06.884812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:06.884817Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:06.884834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.884841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:06.884848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:06.884851Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:06.884860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:06.884867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:06.884875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:06.884879Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:06.884890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:06.884897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:06.884900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:06.884909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:06.884916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:06.884920Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:06.884948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T17:49:06.884957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T17:49:06.884966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2024-11-21T17:49:06.884976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2024-11-21T17:49:06.884995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:06.885003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:06.885007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:06.885030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:06.885037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.885041Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.885054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:06.885060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:06.885064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:06.885083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:06.885089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:06.885093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T17:49:06.885106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... _engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=867; 2024-11-21T17:49:24.290100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2027; 2024-11-21T17:49:24.290263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=155; 2024-11-21T17:49:24.290470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=89; 2024-11-21T17:49:24.290481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=210; 2024-11-21T17:49:24.290497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=10; 2024-11-21T17:49:24.290512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T17:49:24.290519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=17; 2024-11-21T17:49:24.290551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=11; 2024-11-21T17:49:24.290561Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=4; 2024-11-21T17:49:24.290879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=311; 2024-11-21T17:49:24.291163Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=278; 2024-11-21T17:49:24.291217Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=47; 2024-11-21T17:49:24.291258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=36; 2024-11-21T17:49:24.291265Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2024-11-21T17:49:24.291270Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T17:49:24.291277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T17:49:24.291290Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=9; 2024-11-21T17:49:24.291297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=2; 2024-11-21T17:49:24.291310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=8; 2024-11-21T17:49:24.291317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T17:49:24.291329Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2024-11-21T17:49:24.291337Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=4251; 2024-11-21T17:49:24.291372Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=3;blobs=6;rows=75200;bytes=7465172;raw_bytes=7453400; inactive portions=51;blobs=102;rows=1251798;bytes=102220660;raw_bytes=124064310; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T17:49:24.291396Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4157:5792];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T17:49:24.291404Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T17:49:24.291417Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T17:49:24.291438Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4157:5792];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T17:49:24.291444Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T17:49:24.291463Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=202797666304;kff=0.3; 2024-11-21T17:49:24.291469Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:24.291480Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:24.291496Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=9; 2024-11-21T17:49:24.291512Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2024-11-21T17:49:24.291518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:24.291530Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:24.291536Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:24.291551Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:24.291569Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:24.291729Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:24.291744Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4212:5840];tablet_id=9437184;parent=[1:4157:5792];fline=manager.h:99;event=ask_data;request=request_id=262;1={portions_count=54};; 2024-11-21T17:49:24.291947Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4212:5840];tablet_id=9437184;parent=[1:4157:5792];fline=manager.h:99;event=ask_data;request=request_id=264;1={portions_count=3};; 2024-11-21T17:49:24.292180Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T17:49:24.292746Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T17:49:24.292759Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:49:24.292763Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T17:49:24.292768Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:24.292779Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:24.292788Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=9; 2024-11-21T17:49:24.292796Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2024-11-21T17:49:24.292802Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:24.292810Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:24.292814Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:24.292820Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:24.292833Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:24.292920Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=54;path_id=1; 2024-11-21T17:49:24.293285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=54;path_id=1; 2024-11-21T17:49:24.293747Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T17:49:24.293753Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4157:5792];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::PathErrors >> DataShardReadIterator::ShouldReadNonExistingKey [GOOD] >> DataShardReadIterator::ShouldReadNotExistingRange ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2024-11-21T17:48:20.265202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790886621773642:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:20.265284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b4f/r3tmp/tmpXsAJ5P/pdisk_1.dat 2024-11-21T17:48:20.309675Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11123 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:48:20.329462Z node 1 :TX_PROXY DEBUG: actor# [1:7439790886621773862:2136] Handle TEvNavigate describe path dc-1 2024-11-21T17:48:20.329481Z node 1 :TX_PROXY DEBUG: Actor# [1:7439790886621774233:2391] HANDLE EvNavigateScheme dc-1 2024-11-21T17:48:20.329528Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439790886621773965:2192], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:48:20.329536Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439790886621773965:2192], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:48:20.329672Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790886621774236:2394][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:48:20.330139Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790886621773538:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790886621774241:2394] 2024-11-21T17:48:20.330148Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790886621773535:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790886621774240:2394] 2024-11-21T17:48:20.330166Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790886621773535:2050] Subscribe: subscriber# [1:7439790886621774240:2394], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:20.330166Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790886621773538:2053] Subscribe: subscriber# [1:7439790886621774241:2394], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:20.330181Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790886621773541:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439790886621774242:2394] 2024-11-21T17:48:20.330183Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439790886621773541:2056] Subscribe: subscriber# [1:7439790886621774242:2394], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:48:20.330203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790886621774241:2394][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790886621773538:2053] 2024-11-21T17:48:20.330213Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790886621774240:2394][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790886621773535:2050] 2024-11-21T17:48:20.330216Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790886621774242:2394][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790886621773541:2056] 2024-11-21T17:48:20.330223Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790886621774236:2394][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790886621774238:2394] 2024-11-21T17:48:20.330229Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790886621774236:2394][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790886621774237:2394] 2024-11-21T17:48:20.330230Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790886621773535:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790886621774240:2394] 2024-11-21T17:48:20.330233Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790886621773541:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790886621774242:2394] 2024-11-21T17:48:20.330239Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439790886621774236:2394][/dc-1] Set up state: owner# [1:7439790886621773965:2192], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:20.330247Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790886621773538:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439790886621774241:2394] 2024-11-21T17:48:20.330278Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790886621774236:2394][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439790886621774239:2394] 2024-11-21T17:48:20.330292Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439790886621774236:2394][/dc-1] Path was already updated: owner# [1:7439790886621773965:2192], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:48:20.330299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790886621774240:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790886621774237:2394], cookie# 1 2024-11-21T17:48:20.330303Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790886621774241:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790886621774238:2394], cookie# 1 2024-11-21T17:48:20.330307Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790886621774242:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790886621774239:2394], cookie# 1 2024-11-21T17:48:20.330327Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790886621773535:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790886621774240:2394], cookie# 1 2024-11-21T17:48:20.330337Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790886621773538:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790886621774241:2394], cookie# 1 2024-11-21T17:48:20.330341Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439790886621773541:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439790886621774242:2394], cookie# 1 2024-11-21T17:48:20.330355Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790886621774240:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790886621773535:2050], cookie# 1 2024-11-21T17:48:20.330364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790886621774241:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790886621773538:2053], cookie# 1 2024-11-21T17:48:20.330367Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439790886621774242:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790886621773541:2056], cookie# 1 2024-11-21T17:48:20.330372Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790886621774236:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790886621774237:2394], cookie# 1 2024-11-21T17:48:20.330377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790886621774236:2394][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:48:20.330385Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790886621774236:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790886621774238:2394], cookie# 1 2024-11-21T17:48:20.330388Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790886621774236:2394][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:48:20.330391Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790886621774236:2394][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439790886621774239:2394], cookie# 1 2024-11-21T17:48:20.330393Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439790886621774236:2394][/dc-1] Unexpected sync response: sender# [1:7439790886621774239:2394], cookie# 1 2024-11-21T17:48:20.339462Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439790886621773965:2192], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:48:20.339565Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439790886621773965:2192], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... ad_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:23.989560Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439790889519550785:2101], cacheItem# { Subscriber: { Subscriber: [4:7439790889519550942:2196] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:23.989596Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439791155807523750:2431], recipient# [4:7439791155807523749:2425], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:24.032383Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439790883892819339:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:24.032440Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439790883892819339:2149], cacheItem# { Subscriber: { Subscriber: [3:7439790888187787311:2620] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:24.032467Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791158770728196:3334], recipient# [3:7439791158770728195:2407], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:24.301285Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439790883892819339:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:24.301333Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439790883892819339:2149], cacheItem# { Subscriber: { Subscriber: [3:7439790888187787187:2540] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:24.301366Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791158770728201:3335], recipient# [3:7439791158770728200:2408], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } IsActive: /dc-1/USER_0 -- 2 -- 2 2024-11-21T17:49:24.616943Z node 3 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2024-11-21T17:49:24.617071Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790883892818994:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7439790888233295528:2100] 2024-11-21T17:49:24.617085Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790883892818994:2051] Unsubscribe: subscriber# [5:7439790888233295528:2100], path# /dc-1/USER_0 2024-11-21T17:49:24.617095Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790883892818997:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7439790888233295529:2100] 2024-11-21T17:49:24.617100Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790883892818997:2054] Unsubscribe: subscriber# [5:7439790888233295529:2100], path# /dc-1/USER_0 2024-11-21T17:49:24.617119Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439790883892819000:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7439790888233295530:2100] 2024-11-21T17:49:24.617125Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439790883892819000:2057] Unsubscribe: subscriber# [5:7439790888233295530:2100], path# /dc-1/USER_0 2024-11-21T17:49:24.617388Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:49:24.671629Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7439790888233295526:2101], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:24.671683Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7439790888233295526:2101], cacheItem# { Subscriber: { Subscriber: [5:7439790892528263261:2366] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:24.671718Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7439791158816236204:2792], recipient# [5:7439791158816236203:2429], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:24.692500Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439790889519550785:2101], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:24.692552Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439790889519550785:2101], cacheItem# { Subscriber: { Subscriber: [4:7439790893814518433:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:24.692588Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439791160102491048:2432], recipient# [4:7439791160102491047:2426], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:24.989927Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439790889519550785:2101], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:49:24.989983Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439790889519550785:2101], cacheItem# { Subscriber: { Subscriber: [4:7439790889519550942:2196] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:49:24.990013Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439791160102491050:2433], recipient# [4:7439791160102491049:2427], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamWithVirtualTimestamps[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:18.921493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:18.921515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:18.921520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:18.921524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:18.921537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:18.921542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:18.921549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:18.921620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:18.931144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:18.931165Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:18.933979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:18.934072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:18.934103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:18.938815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:18.938891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:18.938995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:18.939322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:18.940111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:18.940411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:18.940422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:18.940433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:18.940449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:18.940455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:18.940494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:18.941747Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:18.961200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:18.961271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.961317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:18.961356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:18.961363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.962223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:18.962252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:18.962289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.962299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:18.962303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:18.962308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:18.962910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.962924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:18.962928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:18.963466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.963480Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.963486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:18.963492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:18.964036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:18.965309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:18.965381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:18.965638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:18.965674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:18.965683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:18.965739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:18.965748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:18.965779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:18.965794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:18.966425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:18.966438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:18.966481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:18.966497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:18.966584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.966591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:18.966604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:18.966608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:18.966614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:18.966619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:18.966624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:18.966627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:18.966640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:18.966646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:18.966650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 8944 2024-11-21T17:49:27.600180Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:27.600211Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:27.600216Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:27.600220Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T17:49:27.600242Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:49:27.600336Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:27.600345Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:27.600348Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:27.600351Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:49:27.600354Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:49:27.600470Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 169 } } 2024-11-21T17:49:27.600475Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T17:49:27.600489Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 169 } } 2024-11-21T17:49:27.600500Z node 26 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 169 } } 2024-11-21T17:49:27.600610Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:27.600619Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:27.600622Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:27.600626Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T17:49:27.600631Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T17:49:27.600639Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T17:49:27.600693Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:49:27.600698Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 1 2024-11-21T17:49:27.600709Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:1, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:49:27.600714Z node 26 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:49:27.600720Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 111669152010 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:49:27.600729Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:1, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:27.600732Z node 26 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T17:49:27.600736Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:27.600740Z node 26 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:1 129 -> 240 2024-11-21T17:49:27.602192Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:27.602226Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:27.602254Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T17:49:27.602266Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:27.602285Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T17:49:27.602349Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:1, at schemeshard: 72057594046678944 2024-11-21T17:49:27.602355Z node 26 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:1 ProgressState 2024-11-21T17:49:27.602364Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:1 progress is 3/3 2024-11-21T17:49:27.602368Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T17:49:27.602372Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T17:49:27.602384Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [26:397:2372] message: TxId: 1003 2024-11-21T17:49:27.602392Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T17:49:27.602397Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:49:27.602401Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:49:27.602410Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:49:27.602414Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T17:49:27.602417Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T17:49:27.602428Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:49:27.602431Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T17:49:27.602434Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T17:49:27.602441Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T17:49:27.603066Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:49:27.603079Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:596:2528] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:49:27.603174Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:27.603226Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Stream" took 48us result status StatusSuccess 2024-11-21T17:49:27.603316Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Stream" PathDescription { Self { Name: "Stream" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409548 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 4 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: true AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:23.541190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:23.541216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:23.541221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:23.541226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:23.541240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:23.541245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:23.541253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:23.541333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:23.552868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:23.552890Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:23.555748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:23.556637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:23.556672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:23.558038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:23.558183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:23.558282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:23.558382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:23.559201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:23.559470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:23.559479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:23.559522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:23.559529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:23.559535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:23.559548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.562811Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:23.575650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:23.575729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.575778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:23.575813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:23.575819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.576473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:23.576498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:23.576536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.576546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:23.576551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:23.576555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:23.576984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.576995Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:23.577000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:23.577369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.577380Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.577386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:23.577404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:23.577975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:23.578408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:23.578464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:23.578675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:23.578704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:23.578732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:23.578775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:23.578779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:23.578803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:23.578812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:23.579197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:23.579203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:23.579237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:23.579243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:23.579320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.579326Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:23.579333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:23.579336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:23.579340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:23.579342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:23.579345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:23.579347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:23.579355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:23.579359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:23.579362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:23.579605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:23.579614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:23.579617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:23.579620Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:23.579623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:23.579633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Z_full/Table1" took 24us result status StatusSuccess 2024-11-21T17:49:28.650906Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/Table1" PathDescription { Self { Name: "Table1" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 16 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:28.650966Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:28.650999Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA" took 35us result status StatusSuccess 2024-11-21T17:49:28.651038Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA" PathDescription { Self { Name: "DirA" PathId: 17 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 16 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } Children { Name: "DirB" PathId: 18 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 17 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Table2" PathId: 23 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 17 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 17 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:28.651092Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:28.651109Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/Table2" took 19us result status StatusSuccess 2024-11-21T17:49:28.651152Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 23 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 17 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 23 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:28.651223Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:28.651235Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB" took 15us result status StatusSuccess 2024-11-21T17:49:28.651268Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 18 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 17 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "Table3" PathId: 24 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 18 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 18 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:28.651317Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:28.651332Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB/Table3" took 16us result status StatusSuccess 2024-11-21T17:49:28.651373Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 24 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 18 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 24 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:24.612782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:24.612804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:24.612809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:24.612814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:24.612828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:24.612833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:24.612843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:24.612913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:24.623535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:24.623549Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:24.625657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:24.626369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:24.626396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:24.627693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:24.627868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:24.627957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:24.628045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:24.629005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:24.629269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:24.629278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:24.629304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:24.629311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:24.629317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:24.629329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.630306Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:24.642503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:24.642579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.642629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:24.642665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:24.642669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.643343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:24.643368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:24.643410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.643434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:24.643438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:24.643442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:24.643835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.643845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:24.643850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:24.644155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.644163Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.644167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:24.644191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:24.644802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:24.645529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:24.645579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:24.645746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:24.645779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:24.645813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:24.645861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:24.645867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:24.645892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:24.645903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:24.646307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:24.646315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:24.646351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:24.646356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:24.646431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.646435Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:24.646443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:24.646446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:24.646450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:24.646453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:24.646455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:24.646458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:24.646466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:24.646470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:24.646473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:24.646689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:24.646698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:24.646701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:24.646704Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:24.646707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:24.646718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ardIdx# 72057594046678944:2 at schemeshard# 72057594046678944 2024-11-21T17:49:28.744903Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2024-11-21T17:49:28.745323Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:49:28.745351Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:49:28.745356Z node 15 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:49:28.745359Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:49:28.745364Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T17:49:28.745386Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:28.745685Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T17:49:28.745705Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T17:49:28.745752Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:28.745771Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 64424511593 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:28.745777Z node 15 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:49:28.745838Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T17:49:28.745843Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:49:28.745867Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:28.745874Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:28.745882Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:49:28.746257Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:28.746265Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:28.746300Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:49:28.746314Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:28.746317Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:49:28.746320Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:49:28.746364Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:49:28.746369Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:49:28.746380Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:49:28.746385Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:49:28.746391Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:49:28.746396Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:49:28.746401Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:49:28.746405Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:49:28.746426Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:49:28.746430Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:49:28.746433Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:49:28.746435Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:49:28.746591Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:49:28.746603Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:49:28.746608Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:49:28.746613Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:49:28.746617Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:28.746835Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:49:28.746847Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:49:28.746850Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:49:28.746857Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:49:28.746861Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:28.746871Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:49:28.747258Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:49:28.747476Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T17:49:28.747525Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:49:28.747531Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:49:28.747575Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:49:28.747588Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:49:28.747591Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [15:404:2372] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T17:49:28.748212Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "Topic1" TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 121 } MeteringMode: METERING_MODE_RESERVED_CAPACITY } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:28.748285Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /MyRoot/USER_1/Topic1, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:28.748334Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_1/Topic1', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:402, at schemeshard: 72057594046678944 2024-11-21T17:49:28.748778Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusResourceExhausted Reason: "Check failed: path: \'/MyRoot/USER_1/Topic1\', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:402" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:28.748805Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_1, subject: , status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_1/Topic1', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:402, operation: CREATE PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T17:49:28.748851Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:49:28.748856Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:49:28.748906Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:49:28.748919Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:49:28.748924Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:411:2379] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_reboots/unittest >> TConsistentOpsWithReboots::DropIndexedTableAndForceDropSimultaneously [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:47:21.704931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:47:21.704947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:21.704951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:47:21.704953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:47:21.704958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:47:21.704960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:47:21.704966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:47:21.705020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:47:21.713416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:47:21.713436Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:47:21.715334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:47:21.715425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:47:21.715450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:47:21.717424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:47:21.717494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:47:21.717585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.717759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:21.718312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.718492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:21.718497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.718505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:47:21.718509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:21.718513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:47:21.718537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:47:21.719442Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:47:21.736332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:47:21.736427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.736482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:47:21.736540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:47:21.736547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.737184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.737207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:47:21.737252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.737259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:47:21.737263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:47:21.737266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:47:21.737651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.737660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:47:21.737675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:47:21.737948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.737954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.737957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.737962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.738346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:47:21.738659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:47:21.738693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:47:21.738819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:47:21.738836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:47:21.738841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.738888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:47:21.738894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:47:21.738918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:47:21.738929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:47:21.739216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:47:21.739222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:47:21.739243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:47:21.739246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:47:21.739292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:47:21.739296Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:47:21.739303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:47:21.739306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.739309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:47:21.739312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:47:21.739314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:47:21.739316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:47:21.739323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:47:21.739327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:47:21.739329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... calPathId: 1] was 2 2024-11-21T17:49:21.800587Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:21.800975Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:49:21.801337Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T17:49:21.801644Z node 249 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:21.801653Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:21.801702Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:49:21.801728Z node 249 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:21.801732Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [249:205:2208], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T17:49:21.801737Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [249:205:2208], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2024-11-21T17:49:21.801844Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:49:21.801851Z node 249 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1005:0 ProgressState 2024-11-21T17:49:21.801864Z node 249 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T17:49:21.801868Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:49:21.801874Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2024-11-21T17:49:21.801879Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:49:21.801884Z node 249 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T17:49:21.801888Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T17:49:21.801902Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:21.801908Z node 249 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2024-11-21T17:49:21.801912Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T17:49:21.801919Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:49:21.802064Z node 249 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:21.802075Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:21.802079Z node 249 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:49:21.802084Z node 249 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T17:49:21.802088Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:21.802256Z node 249 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:21.802266Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:21.802270Z node 249 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:49:21.802273Z node 249 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:49:21.802277Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:21.802290Z node 249 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2024-11-21T17:49:21.802326Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:21.802331Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:49:21.802342Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:21.802932Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:49:21.802948Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:49:21.803182Z node 249 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1004 2024-11-21T17:49:21.803240Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:49:21.803248Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 TestWaitNotification wait txId: 1005 2024-11-21T17:49:21.803264Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T17:49:21.803267Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T17:49:21.803357Z node 249 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:49:21.803374Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:49:21.803378Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [249:820:2761] 2024-11-21T17:49:21.803386Z node 249 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T17:49:21.803401Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:49:21.803405Z node 249 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [249:820:2761] TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted wait until 72075186233409554 is deleted wait until 72075186233409555 is deleted 2024-11-21T17:49:21.803489Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:49:21.803501Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:49:21.803508Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:49:21.803514Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T17:49:21.803522Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T17:49:21.803533Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 2024-11-21T17:49:21.803541Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409552 2024-11-21T17:49:21.803549Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409553 2024-11-21T17:49:21.803555Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409554 2024-11-21T17:49:21.803564Z node 249 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409555 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 Deleted tabletId 72075186233409554 Deleted tabletId 72075186233409555 2024-11-21T17:49:21.803669Z node 249 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:21.803713Z node 249 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 54us result status StatusSuccess 2024-11-21T17:49:21.803804Z node 249 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::ManyDirs >> DataShardReadIterator::ShouldReadKeyPrefix2 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix3 >> DataShardReadIterator::ShouldReadRangeOneByOne [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk7 >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes |79.8%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite |79.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |79.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning >> GroupWriteTest::ByTableName >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] |79.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted [GOOD] >> DataShardReadIterator::NoErrorOnFinalACK >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] >> TReplicationWithRebootsTests::Alter |79.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |79.8%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> KqpProxy::PingNotExistedSession >> KqpProxy::NoLocalSessionExecution >> TReplicationWithRebootsTests::CreateInParallelWithInitialController >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [GOOD] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:23.418620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:23.418647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:23.418652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:23.418657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:23.418673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:23.418676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:23.418685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:23.418760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:23.427208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:23.427230Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:23.431183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:23.431942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:23.431985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:23.433628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:23.433818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:23.433927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:23.434047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:23.435386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:23.435744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:23.435761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:23.435805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:23.435814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:23.435821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:23.435839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.437334Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:23.454144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:23.454240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.454313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:23.454362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:23.454370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.456566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:23.456598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:23.456648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.456657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:23.456660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:23.456664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:23.457210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.457221Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:23.457226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:23.457594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.457607Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.457613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:23.457637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:23.458231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:23.458637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:23.458691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:23.458887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:23.458918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:23.458938Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:23.458995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:23.459001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:23.459033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:23.459045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:23.459452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:23.459461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:23.459511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:23.459517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:23.459616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.459622Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:23.459636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:23.459640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:23.459645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:23.459651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:23.459655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:23.459659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:23.459670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:23.459676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:23.459680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:23.459979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:23.459993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:23.459998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:23.460003Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:23.460008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:23.460023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... shardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } Children { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:30.607535Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:30.607558Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 23us result status StatusSuccess 2024-11-21T17:49:30.607605Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:30.607648Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:30.607661Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy1" took 15us result status StatusSuccess 2024-11-21T17:49:30.607697Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy1" PathDescription { Self { Name: "Copy1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Copy1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:30.607731Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:30.607740Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy2" took 9us result status StatusSuccess 2024-11-21T17:49:30.607765Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy2" PathDescription { Self { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Copy2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:30.607795Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:30.607806Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy3" took 11us result status StatusSuccess 2024-11-21T17:49:30.607833Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy3" PathDescription { Self { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Copy3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] Test command err: 2024-11-21T17:49:18.112419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791135552723889:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:18.112470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00177c/r3tmp/tmpT2mfAl/pdisk_1.dat 2024-11-21T17:49:18.181126Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30180, node 1 2024-11-21T17:49:18.201629Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:18.201643Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:18.201645Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:18.201687Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:49:18.216503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:18.216535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:18.220683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:18.268799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:18.277357Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:18.297533Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T17:49:18.572700Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T17:49:18.572741Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791135552724338:2296], Start check tables existence, number paths: 2 2024-11-21T17:49:18.573392Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWFmM2MzNDQtZGI0M2ZiNi1hYTA3N2ZhNC1kZDYwMDY5Yg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YWFmM2MzNDQtZGI0M2ZiNi1hYTA3N2ZhNC1kZDYwMDY5Yg== 2024-11-21T17:49:18.574999Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T17:49:18.575003Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T17:49:18.575007Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T17:49:18.575020Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791135552724338:2296], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T17:49:18.575027Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791135552724338:2296], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T17:49:18.575030Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791135552724338:2296], Successfully finished 2024-11-21T17:49:18.575062Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T17:49:18.575068Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWFmM2MzNDQtZGI0M2ZiNi1hYTA3N2ZhNC1kZDYwMDY5Yg==, ActorId: [1:7439791135552724339:2297], ActorState: unknown state, session actor bootstrapped 2024-11-21T17:49:18.581412Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791135552724366:2284], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T17:49:18.582294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:18.582854Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791135552724366:2284], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T17:49:18.583383Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791135552724366:2284], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T17:49:18.584440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791135552724366:2284], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:18.661788Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791135552724366:2284], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T17:49:18.662772Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791135552724366:2284], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T17:49:18.663727Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T17:49:18.663744Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T17:49:18.663763Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWFmM2MzNDQtZGI0M2ZiNi1hYTA3N2ZhNC1kZDYwMDY5Yg==, ActorId: [1:7439791135552724339:2297], ActorState: ReadyState, TraceId: 01jd7xfey7bq61dvxtdjnh8jw5, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2024-11-21T17:49:18.663778Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791135552724426:2301], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T17:49:18.664280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791135552724426:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:18.664317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:18.714993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:18.715911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:49:18.716306Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=YWFmM2MzNDQtZGI0M2ZiNi1hYTA3N2ZhNC1kZDYwMDY5Yg==, ActorId: [1:7439791135552724339:2297], ActorState: ExecuteState, TraceId: 01jd7xfey7bq61dvxtdjnh8jw5, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7439791135552724434:2297] WorkloadServiceCleanup: 0 2024-11-21T17:49:18.716719Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWFmM2MzNDQtZGI0M2ZiNi1hYTA3N2ZhNC1kZDYwMDY5Yg==, ActorId: [1:7439791135552724339:2297], ActorState: CleanupState, TraceId: 01jd7xfey7bq61dvxtdjnh8jw5, EndCleanup, isFinal: 0 2024-11-21T17:49:18.716729Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWFmM2MzNDQtZGI0M2ZiNi1hYTA3N2ZhNC1kZDYwMDY5Yg==, ActorId: [1:7439791135552724339:2297], ActorState: CleanupState, TraceId: 01jd7xfey7bq61dvxtdjnh8jw5, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7439791135552723985:2256] 2024-11-21T17:49:18.721088Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWQxMDcwYjctNGQ2ODRhMzUtNmE3ZTA5ZDYtNWFhN2I0M2Q=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MWQxMDcwYjctNGQ2ODRhMzUtNmE3ZTA5ZDYtNWFhN2I0M2Q= 2024-11-21T17:49:18.721217Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T17:49:18.721233Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWQxMDcwYjctNGQ2ODRhMzUtNmE3ZTA5ZDYtNWFhN2I0M2Q=, ActorId: [1:7439791135552724465:2304], ActorState: unknown state, session actor bootstrapped 2024-11-21T17:49:18.721285Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWQxMDcwYjctNGQ2ODRhMzUtNmE3ZTA5ZDYtNWFhN2I0M2Q=, ActorId: [1:7439791135552724465:2304], ActorState: ReadyState, TraceId: 01jd7xff0127c0edc1mpgwrec5, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: DROP RESOURCE POOL CLASSIFIER MyResourcePoolClassifier rpcActor: [1:7439791135552724464:2342] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T17:49:18.721295Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439791135552724465:2304], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MWQxMDcwYjctNGQ2ODRhMzUtNmE3ZTA5ZDYtNWFhN2I0M2Q= 2024-11-21T17:49:18.721310Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791135552724467:2305], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T17:49:18.721339Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439791135552724468:2306], Database: /Root, Start database fetching 2024-11-21T17:49:18.721583Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791135552724467:2305], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T17:49:18.721598Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] A ... ActorState: ExecuteState, TraceId: 01jd7xftmyfnhbjg93fx04zr0s, ExecutePhyTx, tx: 0x0000000000000000 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 1 2024-11-21T17:49:30.656108Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftmyfnhbjg93fx04zr0s, TExecPhysicalRequest, add DeferredEffect to Transaction, current Transactions.size(): 1 2024-11-21T17:49:30.656111Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftmyfnhbjg93fx04zr0s, TExecPhysicalRequest, tx has commit locks 2024-11-21T17:49:30.656117Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftmyfnhbjg93fx04zr0s, Sending to Executer TraceId: 0 8 2024-11-21T17:49:30.656126Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftmyfnhbjg93fx04zr0s, Created new KQP executer: [7:7439791184911934827:2471] isRollback: 0 2024-11-21T17:49:30.659669Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftmyfnhbjg93fx04zr0s, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T17:49:30.659729Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftmyfnhbjg93fx04zr0s, txInfo Status: Committed Kind: ReadWrite TotalDuration: 5.039 ServerDuration: 5.006 QueriesCount: 2 2024-11-21T17:49:30.659764Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftmyfnhbjg93fx04zr0s, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T17:49:30.659780Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftmyfnhbjg93fx04zr0s, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:49:30.659787Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftmyfnhbjg93fx04zr0s, EndCleanup, isFinal: 0 2024-11-21T17:49:30.659798Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftmyfnhbjg93fx04zr0s, Sent query response back to proxy, proxyRequestId: 31, proxyId: [7:7439791176321999099:2250] 2024-11-21T17:49:30.659914Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, TxId: 2024-11-21T17:49:30.659937Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2024-11-21T17:49:30.660062Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ReadyState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, received request, proxyRequestId: 32 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [7:7439791184911934836:2479] database: /Root databaseId: /Root pool id: default 2024-11-21T17:49:30.660072Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ReadyState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, request placed into pool from cache: default 2024-11-21T17:49:30.660083Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ReadyState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, Sending CompileQuery request 2024-11-21T17:49:30.660299Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, ExecutePhyTx, tx: 0x000045017C1F2D18 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T17:49:30.660321Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, Sending to Executer TraceId: 0 8 2024-11-21T17:49:30.660334Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, Created new KQP executer: [7:7439791184911934839:2471] isRollback: 0 2024-11-21T17:49:30.661822Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2024-11-21T17:49:30.661842Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, ExecutePhyTx, tx: 0x000045017C1E3998 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T17:49:30.661980Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T17:49:30.662019Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, txInfo Status: Committed Kind: ReadOnly TotalDuration: 1.757 ServerDuration: 1.723 QueriesCount: 2 2024-11-21T17:49:30.662055Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T17:49:30.662068Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:49:30.662076Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, EndCleanup, isFinal: 0 2024-11-21T17:49:30.662087Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ExecuteState, TraceId: 01jd7xftn48z61sbhj6afbcjq5, Sent query response back to proxy, proxyRequestId: 32, proxyId: [7:7439791176321999099:2250] 2024-11-21T17:49:30.662182Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, TxId: 2024-11-21T17:49:30.662203Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, TxId: 2024-11-21T17:49:30.662235Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7439791180616966860:2305], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2024-11-21T17:49:30.662241Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T17:49:30.662245Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:49:30.662247Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T17:49:30.662250Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T17:49:30.662261Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWRkZTE5OC0xMTA0YTE4MS1iMTkzYzhmYS01MjAxN2NlYg==, ActorId: [7:7439791184911934803:2471], ActorState: unknown state, Session actor destroyed >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite >> DataShardReadIterator::ShouldReadRangeChunk7 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] >> DataShardReadIterator::ShouldReadNotExistingRange [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1_100 >> DataShardReadIterator::ShouldReadKeyPrefix3 [GOOD] >> DataShardReadIterator::ShouldReadFromFollower ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:09.433164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:09.433185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:09.433189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:09.433194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:09.433208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:09.433211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:09.433219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:09.433296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:09.443013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:09.443031Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:09.445091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:09.445184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:09.445207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:09.447635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:09.447721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:09.447822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.447998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:09.448668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:09.448949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:09.448959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:09.448972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:09.448979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:09.448986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:09.449024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:09.450308Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:09.464689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:09.464756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.464797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:09.464829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:09.464834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.465343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.465362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:09.465399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.465406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:09.465409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:09.465412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:09.465778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.465789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:09.465792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:09.466129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.466136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.466139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:09.466144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:09.466600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:09.466961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:09.467002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:09.467167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.467187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:09.467192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:09.467236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:09.467240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:09.467260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:09.467268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:09.467691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:09.467705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:09.467732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:09.467737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:09.467794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.467800Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:09.467809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:09.467813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:09.467820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:09.467825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:09.467830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:09.467834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:09.467845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:09.467850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:09.467853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... tToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:30.957334Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:30.957389Z node 24 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 63us result status StatusSuccess 2024-11-21T17:49:30.957511Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:30.967811Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:786:2614] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T17:49:30.967861Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:714:2614] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T17:49:30.967897Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:786:2614] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732211370954088 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732211370954088 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732211370954088 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T17:49:30.968784Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:786:2614] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T17:49:30.968806Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:714:2614] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpProxy::PingNotExistedSession [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> ReadIteratorExternalBlobs::ExtBlobs [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:09.391013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:09.391029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:09.391033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:09.391035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:09.391044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:09.391046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:09.391053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:09.391108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:09.400181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:09.400197Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:09.401828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:09.401889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:09.401911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:09.404081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:09.404135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:09.404205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.404367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:09.404849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:09.405058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:09.405065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:09.405074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:09.405078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:09.405082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:09.405109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:09.406066Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:09.417601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:09.417662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.417700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:09.417733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:09.417737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.418210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.418231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:09.418273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.418284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:09.418288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:09.418292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:09.418650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.418660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:09.418665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:09.419002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.419012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.419018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:09.419023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:09.419534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:09.419897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:09.419934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:09.420092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.420113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:09.420119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:09.420167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:09.420173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:09.420198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:09.420208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:09.420544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:09.420552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:09.420581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:09.420588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:09.420647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.420652Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:09.420661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:09.420664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:09.420670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:09.420675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:09.420679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:09.420682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:09.420692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:09.420697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:09.420701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... sult: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:31.452111Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:31.452142Z node 22 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 37us result status StatusSuccess 2024-11-21T17:49:31.452256Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2024-11-21T17:49:09.429797Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791097376658313:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:09.429855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0006ae/r3tmp/tmpc9b7t1/pdisk_1.dat 2024-11-21T17:49:09.493223Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18534, node 1 2024-11-21T17:49:09.498612Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:49:09.498628Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:49:09.502582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:09.502694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:09.502759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:49:09.502988Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:09.502999Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:09.503001Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:09.503031Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:49:09.503166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.503335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.503339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.503347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:49:09.503360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:24710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:09.524632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.525440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:09.525455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.526037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:49:09.526118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:49:09.526128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:49:09.526500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:49:09.526507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:49:09.526541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:49:09.526816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.527789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211349575, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:09.527806Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:49:09.527892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:49:09.528387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.528421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.528433Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:49:09.528448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:49:09.528459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:49:09.528475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:49:09.528916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:49:09.528935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:49:09.528939Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:49:09.528958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:49:09.529231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:09.529252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:09.530691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24710 2024-11-21T17:49:09.704636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791097376659100:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:09.704666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:09.724334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.724481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:49:09.724663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:09.724674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.726103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T17:49:09.726171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.726227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.726255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:49:09.726337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:49:09.726520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:49:09.726529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:49:09.726533Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:49:09.726582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:49:09.726584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:49:09.726586Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:49:09.728179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:49:09.728202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T17:49:09.728757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:49:09.781096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:49:09.781109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:49:09.781130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T17:49:09.781559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:49:09.782278Z node ... PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.828579Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746308. Ctx: { TraceId: 01jd7xfsv38pb8jv5hp8bmcfxd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M2OWY5ZTMtZmI5NzBjZTYtZWY2YTE0ZmEtOWUyZDFkZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.828609Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746309. Ctx: { TraceId: 01jd7xfsv33jbjck52xm8fppcs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZhODJiYjUtN2YxZmM4MDAtNTQ2YjMwZjQtN2U0Zjk4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.828680Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746313. Ctx: { TraceId: 01jd7xfsv31sc74pvs92tnjvke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTgzMGY1MzgtODAwMDJmOC04MTE0MzBjMi1hNmY3MWE0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.828686Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746312. Ctx: { TraceId: 01jd7xfsv321j6c584d2fqab8f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTg0MmVjODUtMzA1MmRkZS1hZjc0MmNmYi1lMTRmNTdjMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.828740Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746310. Ctx: { TraceId: 01jd7xfsv33w83r8ya75jysnd8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTRiMGIzNDItYWI0Mjc4N2QtYjRjNjZlYzctZDdjOTQzNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.828810Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746311. Ctx: { TraceId: 01jd7xfsv4bg13nas4ssd9ck5p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzIzYjRiN2YtMTY5Y2ZjODUtMjlhN2NhNi01YmRlMjhkNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.829426Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746314. Ctx: { TraceId: 01jd7xfsv3af5nkym9sak06bkg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmY3N2M2YmItN2ViY2I2OWUtMTZhMDc0M2ItOGQ5ZGFlNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.830182Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746315. Ctx: { TraceId: 01jd7xfsv502489mtkvdz8gq8r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc4MGUzMjMtZTI4ODUzZjAtNjYwNzUwNjQtMjk2NWU1MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.830183Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746316. Ctx: { TraceId: 01jd7xfsv52wcnnntwfs79abrk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjYxMzllMmYtMWNiZTYwY2EtZTNhYzdkNTYtZTliNTFlMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.831099Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746317. Ctx: { TraceId: 01jd7xfsv662dp2g0qhgnetx56, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzIzYjRiN2YtMTY5Y2ZjODUtMjlhN2NhNi01YmRlMjhkNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.831126Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746318. Ctx: { TraceId: 01jd7xfsv6c8q91zdr4e85zmg1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdhOWQ2ZDMtZjdkMmExZjctODJiMzJiNGUtN2U5YjZmM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.831205Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746320. Ctx: { TraceId: 01jd7xfsv62rr5hj880nc0gy1r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZhODJiYjUtN2YxZmM4MDAtNTQ2YjMwZjQtN2U0Zjk4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.831288Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746319. Ctx: { TraceId: 01jd7xfsv60b0qzm8vw06mc1fs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M2OWY5ZTMtZmI5NzBjZTYtZWY2YTE0ZmEtOWUyZDFkZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.831647Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746321. Ctx: { TraceId: 01jd7xfsv73rb8xk09w98wm2m9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTgzMGY1MzgtODAwMDJmOC04MTE0MzBjMi1hNmY3MWE0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.831703Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746322. Ctx: { TraceId: 01jd7xfsv787r7b97gny8d51t5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTg0MmVjODUtMzA1MmRkZS1hZjc0MmNmYi1lMTRmNTdjMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.831754Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746323. Ctx: { TraceId: 01jd7xfsv7fyz064rhxkxfw77f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTRiMGIzNDItYWI0Mjc4N2QtYjRjNjZlYzctZDdjOTQzNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732211349827 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T17:49:29.835086Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746326. Ctx: { TraceId: 01jd7xfsvaaaz8q37f9fj0rbfy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdhOWQ2ZDMtZjdkMmExZjctODJiMzJiNGUtN2U5YjZmM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.835104Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746327. Ctx: { TraceId: 01jd7xfsva2yydt6kkpw6pwxy8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjYxMzllMmYtMWNiZTYwY2EtZTNhYzdkNTYtZTliNTFlMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.835168Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746328. Ctx: { TraceId: 01jd7xfsvaaepjt31txp02g1p8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmY3N2M2YmItN2ViY2I2OWUtMTZhMDc0M2ItOGQ5ZGFlNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.835472Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746324. Ctx: { TraceId: 01jd7xfsva807mptndvws6fdkw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzIzYjRiN2YtMTY5Y2ZjODUtMjlhN2NhNi01YmRlMjhkNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.835550Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746325. Ctx: { TraceId: 01jd7xfsva8ns0qd52g2rpyzk6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc4MGUzMjMtZTI4ODUzZjAtNjYwNzUwNjQtMjk2NWU1MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.835617Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746329. Ctx: { TraceId: 01jd7xfsvaahzmvh11j1d8gydd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZhODJiYjUtN2YxZmM4MDAtNTQ2YjMwZjQtN2U0Zjk4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.835717Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746330. Ctx: { TraceId: 01jd7xfsvaaafsky01b3jbkcfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTRiMGIzNDItYWI0Mjc4N2QtYjRjNjZlYzctZDdjOTQzNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.835925Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746332. Ctx: { TraceId: 01jd7xfsva3aw8wwzs6j2hmz5r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M2OWY5ZTMtZmI5NzBjZTYtZWY2YTE0ZmEtOWUyZDFkZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.836024Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746331. Ctx: { TraceId: 01jd7xfsva54j0cj5ytt6skdpg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTgzMGY1MzgtODAwMDJmOC04MTE0MzBjMi1hNmY3MWE0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.836553Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976746333. Ctx: { TraceId: 01jd7xfsva7jef3fg7g7fyfe99, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTg0MmVjODUtMzA1MmRkZS1hZjc0MmNmYi1lMTRmNTdjMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732211349827 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T17:49:30.438362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 32912 rowCount 414 cpuUsage 0 2024-11-21T17:49:30.439273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 40296 rowCount 491 cpuUsage 0 2024-11-21T17:49:30.538609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2024-11-21T17:49:30.538699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 414, DataSize 32912 2024-11-21T17:49:30.538760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 491, DataSize 40296 2024-11-21T17:49:30.539082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> ScriptExecutionsTest::RunCheckLeaseStatus >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite >> TInterconnectTest::OldFormat ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2024-11-21T17:49:09.422375Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791093896665180:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:09.422523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000618/r3tmp/tmpc4wMyo/pdisk_1.dat 2024-11-21T17:49:09.480679Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15947, node 1 2024-11-21T17:49:09.497007Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:09.497023Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:09.497024Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:09.497054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:49:09.522751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:09.522787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:09.524388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:09.552967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.554028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:09.554046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.554743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:49:09.554804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:49:09.554813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:49:09.555271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:49:09.555279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:49:09.555351Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:09.555680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.556701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211349603, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:09.556723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:49:09.556806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:49:09.557320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.557372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.557390Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:49:09.557407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:49:09.557422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:49:09.557441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:49:09.557854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:49:09.557878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:49:09.557886Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:49:09.557900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:17503 2024-11-21T17:49:09.692901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791093896666106:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:09.692933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:09.712710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.712881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:49:09.713102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:09.713118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.716712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T17:49:09.716798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.716889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.716918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:49:09.717633Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:49:09.717685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:49:09.717690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:49:09.717694Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:49:09.717748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:49:09.717751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:49:09.717753Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:49:09.720289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:49:09.720325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:49:09.720988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:49:09.773602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:49:09.773615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:49:09.773636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:49:09.774068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:49:09.774852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211349820, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:09.774866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211349820 2024-11-21T17:49:09.774891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:49:09.775256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.775334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.775356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:49:09.775637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:49:09.775651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:49:09.775656Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057 ... 08t0qmvercc1f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY1ZWZkZjgtN2ZkYjEzNWMtZDE2ZmU3YjQtNjdkYjY4MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.804208Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745762. Ctx: { TraceId: 01jd7xfstb7nwrx4h758wdqj83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWMwZWJjYTQtMzk1ODNlMDUtZDZjZjYwNjktYWIwZjIyNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.804311Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745764. Ctx: { TraceId: 01jd7xfstb3v4t0hkf7ctas673, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdhZTI3OTEtZDM3NDg1NS1jZTNlNzBiOS1lMDJkMGVjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.804523Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745763. Ctx: { TraceId: 01jd7xfstb9zkfwp73bn3g8yjj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRiM2VkNjUtZGVjZWY1YjEtZjMzMmY2NmMtYzc5YmY1MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.804716Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745765. Ctx: { TraceId: 01jd7xfstb42717exhdkhcjsdn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjk1MWNhMDUtNDFhN2I3ODktNTAzYjliZjEtNGMwZTViYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.804805Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745767. Ctx: { TraceId: 01jd7xfstbcbfzbhzar3he9e2t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2YxNGVmYWMtZjZjY2UyYjQtNmI3OWU2OTgtNTNlMmVjMmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.804890Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745766. Ctx: { TraceId: 01jd7xfstb3a31b93y528pztx8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjgzYjNlMi0yODg0MTEwZS1hMDdiNmEwNi00ZjU2YmQ5MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.805019Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745768. Ctx: { TraceId: 01jd7xfstbczkadvnbn2ytynd9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ2NGNiYjctODM4YWQyZWQtMTg5ZGU2Y2QtYzA1ODA0ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.805401Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745769. Ctx: { TraceId: 01jd7xfstb36rct0ae6n4y1mft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMzN2VhMTMtMTA5NTI2Zi1iZmNjNTgxYy05Njk5ZWRjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.805725Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745770. Ctx: { TraceId: 01jd7xfstcdks39jsc5njvkvfr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JhOThjZTUtZmZmOGQ2MzgtNTEyN2ZjMTMtNjRjNGMyZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.807848Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745771. Ctx: { TraceId: 01jd7xfstf8jnsn32zna7fz6ww, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRiM2VkNjUtZGVjZWY1YjEtZjMzMmY2NmMtYzc5YmY1MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.807895Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745773. Ctx: { TraceId: 01jd7xfstf6hxvyf7ncqh0vvgz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjk1MWNhMDUtNDFhN2I3ODktNTAzYjliZjEtNGMwZTViYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.807942Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745772. Ctx: { TraceId: 01jd7xfstfdh2ngw03e94vg0x8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdhZTI3OTEtZDM3NDg1NS1jZTNlNzBiOS1lMDJkMGVjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.807992Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745775. Ctx: { TraceId: 01jd7xfstfdnhwsd2tqkdhqw45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ2NGNiYjctODM4YWQyZWQtMTg5ZGU2Y2QtYzA1ODA0ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.808180Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745774. Ctx: { TraceId: 01jd7xfstf0yzwcb48gj58xjzp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWMwZWJjYTQtMzk1ODNlMDUtZDZjZjYwNjktYWIwZjIyNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.808306Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745776. Ctx: { TraceId: 01jd7xfstfdn5225jmbp7p6yjb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY1ZWZkZjgtN2ZkYjEzNWMtZDE2ZmU3YjQtNjdkYjY4MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2024-11-21T17:49:29.810359Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745778. Ctx: { TraceId: 01jd7xfstj7d34440k4fvczmnt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JhOThjZTUtZmZmOGQ2MzgtNTEyN2ZjMTMtNjRjNGMyZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.810368Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745777. Ctx: { TraceId: 01jd7xfsthfmg0mxq3cfkpzpkf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjgzYjNlMi0yODg0MTEwZS1hMDdiNmEwNi00ZjU2YmQ5MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: 2024-11-21T17:49:29.810440Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745779. Ctx: { TraceId: 01jd7xfsthcja2wkg3vpapfm0v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMzN2VhMTMtMTA5NTI2Zi1iZmNjNTgxYy05Njk5ZWRjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211349820 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T17:49:29.811066Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745780. Ctx: { TraceId: 01jd7xfstj1anq6zq1j2701mvn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2YxNGVmYWMtZjZjY2UyYjQtNmI3OWU2OTgtNTNlMmVjMmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.812446Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745781. Ctx: { TraceId: 01jd7xfstkbynckhavdc7bxc28, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRiM2VkNjUtZGVjZWY1YjEtZjMzMmY2NmMtYzc5YmY1MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.812541Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745782. Ctx: { TraceId: 01jd7xfstk04wx4251npje1ppp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ2NGNiYjctODM4YWQyZWQtMTg5ZGU2Y2QtYzA1ODA0ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.812605Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745783. Ctx: { TraceId: 01jd7xfstk9ad2wfncwmfx5qt4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdhZTI3OTEtZDM3NDg1NS1jZTNlNzBiOS1lMDJkMGVjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.812660Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745784. Ctx: { TraceId: 01jd7xfstke5nn3bjr2183h7r8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWMwZWJjYTQtMzk1ODNlMDUtZDZjZjYwNjktYWIwZjIyNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:29.812715Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976745785. Ctx: { TraceId: 01jd7xfstk2xjtz82122d4x2kc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjk1MWNhMDUtNDFhN2I3ODktNTAzYjliZjEtNGMwZTViYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211349820 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T17:49:29.851024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2024-11-21T17:49:29.851104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 16500, DataSize 1027543 2024-11-21T17:49:29.851503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2024-11-21T17:49:30.192719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 1036279 rowCount 16576 cpuUsage 0 2024-11-21T17:49:30.192746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 868157 rowCount 13504 cpuUsage 0 2024-11-21T17:49:30.292865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2024-11-21T17:49:30.292935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 16576, DataSize 1036279 2024-11-21T17:49:30.292998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 13504, DataSize 868157 2024-11-21T17:49:30.293372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards >> TestProtocols::TestResolveProtocol >> TInterconnectTest::TestBlobEvent220BytesPreSerialized >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew >> DataShardReadIterator::NoErrorOnFinalACK [GOOD] >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 |79.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |79.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> TableCreation::ConcurrentUpdateTable >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld >> GroupWriteTest::ByTableName [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] >> TActorTracker::Basic [GOOD] >> TInterconnectTest::TestManyEvents >> TInterconnectTest::TestBlobEvent >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized >> TInterconnectTest::TestSimplePingPong >> DataShardReadIterator::ShouldReadRangeChunk1_100 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1 >> DataShardReadIterator::ShouldReadRangePrefix1 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 14278589629182679945 2024-11-21T17:49:30.839976Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2024-11-21T17:49:30.842642Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2024-11-21T17:49:30.842654Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2024-11-21T17:49:30.842924Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2024-11-21T17:49:30.851460Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T17:49:30.851912Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2024-11-21T17:49:33.063493Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T17:49:33.063520Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T17:49:33.063529Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T17:49:33.063533Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T17:49:33.072044Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2024-11-21T17:49:33.072069Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2024-11-21T17:49:31.076393Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791188982876205:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:31.076652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:49:31.078941Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791189231817967:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:31.079200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0015d1/r3tmp/tmpsM283I/pdisk_1.dat 2024-11-21T17:49:31.130778Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64412 2024-11-21T17:49:31.177048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:31.177080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:31.178836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:31.207355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:31.207382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:31.216451Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:49:31.216748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:31.386758Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:49:31.387276Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T17:49:31.388139Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NjIxNzYzN2MtZGRjYjIwOC03YmY5MzY0YS1mMjMwNzM2Ng==, workerId: [2:7439791189231818233:2277], database: , longSession: 1, local sessions count: 1 2024-11-21T17:49:31.388165Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T17:49:31.388211Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T17:49:31.388252Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2024-11-21T17:49:31.388260Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2024-11-21T17:49:31.388266Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:49:31.388274Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T17:49:31.388292Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.388316Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.388325Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.388328Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.388415Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.422417Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:49:31.422833Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T17:49:31.422933Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NjIxNzYzN2MtZGRjYjIwOC03YmY5MzY0YS1mMjMwNzM2Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [2:8678280833929343339:121] 2024-11-21T17:49:31.422945Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 600.000000s actor id: [1:7439791188982876878:2441] 2024-11-21T17:49:31.422951Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2024-11-21T17:49:31.422960Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2024-11-21T17:49:31.422993Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:49:31.423009Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T17:49:31.423022Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.423029Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.423042Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.423051Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.423096Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NjIxNzYzN2MtZGRjYjIwOC03YmY5MzY0YS1mMjMwNzM2Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7439791189231818233:2277] 2024-11-21T17:49:31.423114Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [2:7439791189231818249:2111] 2024-11-21T17:49:31.423471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791188982876901:2278], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:31.423502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:31.424731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791189231818250:2278], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:31.424753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:31.452512Z node 2 :KQP_PROXY DEBUG: TraceId: "01jd7xfvcz3ypmk50tw016kvsz", Created new session, sessionId: ydb://session/3?node_id=2&id=NmZkNTI1Yi00ZTEyM2Y0My1iNjAzOWE0OC02Y2NhNWY4OQ==, workerId: [2:7439791189231818261:2280], database: , longSession: 0, local sessions count: 2 2024-11-21T17:49:31.452561Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jd7xfvcz3ypmk50tw016kvsz, Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NmZkNTI1Yi00ZTEyM2Y0My1iNjAzOWE0OC02Y2NhNWY4OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 4, targetId: [2:7439791189231818261:2280] 2024-11-21T17:49:31.452572Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 4 timeout: 300.000000s actor id: [2:7439791189231818262:2114] 2024-11-21T17:49:31.452751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791189231818263:2281], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:31.452771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:31.452865Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791189231818268:2284], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:31.458389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T17:49:31.463773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791189231818270:2285], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T17:49:31.549118Z node 2 :KQP_PROXY DEBUG: TraceId: "01jd7xfvcz3ypmk50tw016kvsz", Forwarded response to sender actor, requestId: 4, sender: [2:7439791189231818260:2279], selfId: [2:7439791189231818094:2175], source: [2:7439791189231818261:2280] 2024-11-21T17:49:31.549164Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NmZkNTI1Yi00ZTEyM2Y0My1iNjAzOWE0OC02Y2NhNWY4OQ==, workerId: [2:7439791189231818261:2280], local sessions count: 1 2024-11-21T17:49:31.549998Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7439791188982876424:2256], selfId: [2:7439791189231818094:2175], source: [2:7439791189231818233:2277] 2024-11-21T17:49:31.550116Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7439791188982876804:2395], selfId: [1:7439791188982876424:2256], source: [2:7439791189231818094:2175] 2024-11-21T17:49:32.012671Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439791196227895313:2098];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:32.012732Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0015d1/r3tmp/tmpDqsu5c/pdisk_1.dat 2024-11-21T17:49:32.031016Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28823, node 3 2024-11-21T17:49:32.048469Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:32.048481Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:32.048483Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:32.048530Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { ... sender: [3:7439791196227896729:2336], selfId: [3:7439791196227895473:2256], source: [3:7439791196227896727:2335] 2024-11-21T17:49:32.644887Z node 3 :TX_PROXY ERROR: Access denied for user@builtin with access DescribeSchema to path Root/.metadata 2024-11-21T17:49:32.652431Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 7, sender: [3:7439791196227896716:2330], selfId: [3:7439791196227895473:2256], source: [3:7439791196227896715:2329] 2024-11-21T17:49:32.652528Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 54a26882-9a3ea438-10cb1258-8b23660a, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=MmM3YzI5ODYtOTdjNmZkMzYtYjA1OWJhNDUtZmVmYTAxYzY=, TxId: 2024-11-21T17:49:32.652542Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 54a26882-9a3ea438-10cb1258-8b23660a, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=MmM3YzI5ODYtOTdjNmZkMzYtYjA1OWJhNDUtZmVmYTAxYzY=, TxId: 2024-11-21T17:49:32.652609Z node 3 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 54a26882-9a3ea438-10cb1258-8b23660a, start saving rows range [0; 1) 2024-11-21T17:49:32.652638Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 54a26882-9a3ea438-10cb1258-8b23660a, Bootstrap. Database: /Root 2024-11-21T17:49:32.652671Z node 3 :KQP_PROXY DEBUG: Request has 18445011862336.898949s seconds to be completed 2024-11-21T17:49:32.653089Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YjUwYjdhZGUtYTgwOGEzMDAtYTAxOTRkMzQtNmRkYjQ1MzE=, workerId: [3:7439791196227896760:2344], database: /Root, longSession: 1, local sessions count: 4 2024-11-21T17:49:32.653114Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T17:49:32.653128Z node 3 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=3&id=MmM3YzI5ODYtOTdjNmZkMzYtYjA1OWJhNDUtZmVmYTAxYzY=, workerId: [3:7439791196227896715:2329], local sessions count: 3 2024-11-21T17:49:32.653222Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 54a26882-9a3ea438-10cb1258-8b23660a, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2024-11-21T17:49:32.653368Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YjUwYjdhZGUtYTgwOGEzMDAtYTAxOTRkMzQtNmRkYjQ1MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 11, targetId: [3:7439791196227896760:2344] 2024-11-21T17:49:32.653375Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 11 timeout: 300.000000s actor id: [3:7439791196227896762:3014] 2024-11-21T17:49:32.674373Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 11, sender: [3:7439791196227896761:2345], selfId: [3:7439791196227895473:2256], source: [3:7439791196227896760:2344] 2024-11-21T17:49:32.674466Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 54a26882-9a3ea438-10cb1258-8b23660a, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YjUwYjdhZGUtYTgwOGEzMDAtYTAxOTRkMzQtNmRkYjQ1MzE=, TxId: 2024-11-21T17:49:32.674479Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 54a26882-9a3ea438-10cb1258-8b23660a, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YjUwYjdhZGUtYTgwOGEzMDAtYTAxOTRkMzQtNmRkYjQ1MzE=, TxId: 2024-11-21T17:49:32.674506Z node 3 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 54a26882-9a3ea438-10cb1258-8b23660a, result part successfully saved 2024-11-21T17:49:32.674518Z node 3 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 54a26882-9a3ea438-10cb1258-8b23660a, reply SUCCESS, issues: 2024-11-21T17:49:32.674582Z node 3 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=3&id=YjUwYjdhZGUtYTgwOGEzMDAtYTAxOTRkMzQtNmRkYjQ1MzE=, workerId: [3:7439791196227896760:2344], local sessions count: 2 2024-11-21T17:49:32.674607Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 54a26882-9a3ea438-10cb1258-8b23660a, Bootstrap. Database: /Root 2024-11-21T17:49:32.674625Z node 3 :KQP_PROXY DEBUG: Request has 18445011862336.876992s seconds to be completed 2024-11-21T17:49:32.675025Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YzYyNWEzNTAtYjQyN2YzYTUtNDEyYzhmOGYtNzIyZGExOTA=, workerId: [3:7439791196227896783:2354], database: /Root, longSession: 1, local sessions count: 3 2024-11-21T17:49:32.675050Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T17:49:32.675082Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 54a26882-9a3ea438-10cb1258-8b23660a, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2024-11-21T17:49:32.675168Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YzYyNWEzNTAtYjQyN2YzYTUtNDEyYzhmOGYtNzIyZGExOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 13, targetId: [3:7439791196227896783:2354] 2024-11-21T17:49:32.675176Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 13 timeout: 300.000000s actor id: [3:7439791196227896785:3024] 2024-11-21T17:49:32.726577Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 13, sender: [3:7439791196227896784:2355], selfId: [3:7439791196227895473:2256], source: [3:7439791196227896783:2354] 2024-11-21T17:49:32.726747Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 54a26882-9a3ea438-10cb1258-8b23660a, State: Get operation info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YzYyNWEzNTAtYjQyN2YzYTUtNDEyYzhmOGYtNzIyZGExOTA=, TxId: 01jd7xfwnk5k48qdjd6zg4965r 2024-11-21T17:49:32.726864Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 54a26882-9a3ea438-10cb1258-8b23660a, State: Get operation info, RunDataQuery: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan AS JsonDocument; DECLARE $stats AS JsonDocument; DECLARE $ast AS Optional; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $user_token AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan = $plan, end_ts = CurrentUtcTimestamp(), stats = $stats, ast = $ast, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2024-11-21T17:49:32.726976Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YzYyNWEzNTAtYjQyN2YzYTUtNDEyYzhmOGYtNzIyZGExOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 14, targetId: [3:7439791196227896783:2354] 2024-11-21T17:49:32.726987Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 14 timeout: 300.000000s actor id: [3:7439791196227896818:3036] 2024-11-21T17:49:32.773494Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 14, sender: [3:7439791196227896817:2366], selfId: [3:7439791196227895473:2256], source: [3:7439791196227896783:2354] 2024-11-21T17:49:32.773674Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 54a26882-9a3ea438-10cb1258-8b23660a, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YzYyNWEzNTAtYjQyN2YzYTUtNDEyYzhmOGYtNzIyZGExOTA=, TxId: 2024-11-21T17:49:32.773700Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 54a26882-9a3ea438-10cb1258-8b23660a, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YzYyNWEzNTAtYjQyN2YzYTUtNDEyYzhmOGYtNzIyZGExOTA=, TxId: 2024-11-21T17:49:32.773705Z node 3 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: 54a26882-9a3ea438-10cb1258-8b23660a. SUCCESS. Issues: 2024-11-21T17:49:32.773825Z node 3 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=3&id=YzYyNWEzNTAtYjQyN2YzYTUtNDEyYzhmOGYtNzIyZGExOTA=, workerId: [3:7439791196227896783:2354], local sessions count: 2 2024-11-21T17:49:32.773865Z node 3 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=3&id=ZjMzMzVkYmEtOTQzYWZjMWItNDlhNGM5ZjUtOWNjMzExOA==, workerId: [3:7439791196227896693:2323], local sessions count: 1 >> TInterconnectTest::TestBlobEvent [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw >> TInterconnectTest::TestSimplePingPong [GOOD] >> TInterconnectTest::TestSubscribeByFlag >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect >> DataShardReadIterator::ShouldReadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadHeadFromFollower >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] >> TableCreation::ConcurrentUpdateTable [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestAddressResolve >> DataShardReadIteratorLatency::ReadSplitLatency [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TInterconnectTest::TestReconnect >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2024-11-21T17:49:32.644591Z node 4 :INTERCONNECT WARN: Handshake [4:20:2056] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2024-11-21T17:49:33.067968Z node 5 :INTERCONNECT WARN: Handshake [5:18:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2024-11-21T17:49:33.482694Z node 8 :INTERCONNECT WARN: Handshake [8:20:2056] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2024-11-21T17:49:33.483227Z node 7 :INTERCONNECT WARN: Handshake [7:18:2057] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentUpdateTable [GOOD] Test command err: 2024-11-21T17:49:32.204278Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791195209654960:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:32.204298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001542/r3tmp/tmpzgbMPY/pdisk_1.dat 2024-11-21T17:49:32.278632Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3657 TServer::EnableGrpc on GrpcPort 23346, node 1 2024-11-21T17:49:32.291195Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:32.291213Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:32.291215Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:32.291249Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2024-11-21T17:49:32.304557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:32.304579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:32.305572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2024-11-21T17:49:32.347572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:32.351146Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:32.510998Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:49:32.511510Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T17:49:32.511791Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2024-11-21T17:49:32.511801Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2024-11-21T17:49:32.511810Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:49:32.511818Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T17:49:32.511840Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:32.511853Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:32.511861Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:32.511865Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:32.512066Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2024-11-21T17:49:32.512075Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2024-11-21T17:49:32.512089Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2024-11-21T17:49:32.512089Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2024-11-21T17:49:32.512091Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2024-11-21T17:49:32.512096Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2024-11-21T17:49:32.512107Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2024-11-21T17:49:32.512108Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2024-11-21T17:49:32.512111Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2024-11-21T17:49:32.512962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2024-11-21T17:49:32.513349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:32.513613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:32.514616Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2024-11-21T17:49:32.514631Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2024-11-21T17:49:32.514635Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976715659 2024-11-21T17:49:32.514641Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976715658 2024-11-21T17:49:32.514662Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2024-11-21T17:49:32.514665Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976715660 2024-11-21T17:49:32.572686Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976715659. Doublechecking... 2024-11-21T17:49:32.578756Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976715658. Doublechecking... 2024-11-21T17:49:32.584676Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2024-11-21T17:49:32.640438Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2024-11-21T17:49:32.661348Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2024-11-21T17:49:32.663309Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2024-11-21T17:49:32.663500Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: a385ca8a-3411fad3-94350a6f-66ac4b0d, Bootstrap. Database: /dc-1 2024-11-21T17:49:32.663584Z node 1 :KQP_PROXY DEBUG: Request has 18445011862336.888051s seconds to be completed 2024-11-21T17:49:32.664301Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=YzU2OGQwNS0xYzA2MjNhNC02NDZlMmRhNS00YWJiZjYwOQ==, workerId: [1:7439791195209655787:2301], database: /dc-1, longSession: 1, local sessions count: 1 2024-11-21T17:49:32.664330Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T17:49:32.667687Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: a385ca8a-3411fad3-94350a6f-66ac4b0d, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2024-11-21T17:49:32.667847Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=YzU2OGQwNS0xYzA2MjNhNC02NDZlMmRhNS00YWJiZjYwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7439791195209655787:2301] 2024-11-21T17:49:32.667863Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7439791195209655790:2443] 2024-11-21T17:49:32.668282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791195209655801:2306], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:32.668489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791195209655789:2303], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:32.668525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:32.669067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:2, at schemeshard: 72057594046644480 2024-11-21T17:49:32.670286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791195209655804:2307], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:49:32.777437Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7439791195209655788:2302], selfId: [1:7439791195209655180:2256], source: [ ... 17:49:33.565842Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715670 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523" SchemeShardTabletId: 72057594046644480 } 2024-11-21T17:49:33.565850Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715673 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523" SchemeShardTabletId: 72057594046644480 } 2024-11-21T17:49:33.565851Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2024-11-21T17:49:33.565852Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2024-11-21T17:49:33.565876Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715676 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523" SchemeShardTabletId: 72057594046644480 } 2024-11-21T17:49:33.565877Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2024-11-21T17:49:33.565880Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715677 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523" SchemeShardTabletId: 72057594046644480 } 2024-11-21T17:49:33.565881Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2024-11-21T17:49:33.565892Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715671 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523" SchemeShardTabletId: 72057594046644480 } 2024-11-21T17:49:33.565893Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2024-11-21T17:49:33.565905Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715674 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523" SchemeShardTabletId: 72057594046644480 } 2024-11-21T17:49:33.565912Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2024-11-21T17:49:33.568415Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976715675. Doublechecking... 2024-11-21T17:49:33.584127Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 12, sender: [2:7439791196828979663:2352], selfId: [2:7439791196828978767:2227], source: [2:7439791196828979662:2351] 2024-11-21T17:49:33.584208Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 4ae8f3b4-5748757f-bd54194d-be351b4, State: Get operation info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjM0ODBhOTctNDk5ZWE3ZjktYWE3YjA0YTMtYzkyNWY1MmQ=, TxId: 01jd7xfxge2bjshge4327bbfck 2024-11-21T17:49:33.584344Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 4ae8f3b4-5748757f-bd54194d-be351b4, State: Get operation info, RunDataQuery: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan AS JsonDocument; DECLARE $stats AS JsonDocument; DECLARE $ast AS Optional; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $user_token AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan = $plan, end_ts = CurrentUtcTimestamp(), stats = $stats, ast = $ast, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2024-11-21T17:49:33.584492Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjM0ODBhOTctNDk5ZWE3ZjktYWE3YjA0YTMtYzkyNWY1MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 13, targetId: [2:7439791196828979662:2351] 2024-11-21T17:49:33.584505Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 13 timeout: 300.000000s actor id: [2:7439791196828979796:2682] 2024-11-21T17:49:33.618269Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T17:49:33.623892Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T17:49:33.628433Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T17:49:33.629474Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T17:49:33.633949Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 13, sender: [2:7439791196828979795:2364], selfId: [2:7439791196828978767:2227], source: [2:7439791196828979662:2351] 2024-11-21T17:49:33.634119Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 4ae8f3b4-5748757f-bd54194d-be351b4, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjM0ODBhOTctNDk5ZWE3ZjktYWE3YjA0YTMtYzkyNWY1MmQ=, TxId: 2024-11-21T17:49:33.634159Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 4ae8f3b4-5748757f-bd54194d-be351b4, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjM0ODBhOTctNDk5ZWE3ZjktYWE3YjA0YTMtYzkyNWY1MmQ=, TxId: 2024-11-21T17:49:33.634169Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: 4ae8f3b4-5748757f-bd54194d-be351b4. SUCCESS. Issues: 2024-11-21T17:49:33.634317Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZjM0ODBhOTctNDk5ZWE3ZjktYWE3YjA0YTMtYzkyNWY1MmQ=, workerId: [2:7439791196828979662:2351], local sessions count: 2 2024-11-21T17:49:33.634359Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ODA1MTNmMDgtOTc3ZGQwNGQtNTZlNzA3NGEtNWMyNjI0NmQ=, workerId: [2:7439791196828979504:2317], local sessions count: 1 2024-11-21T17:49:33.639732Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T17:49:33.641208Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T17:49:33.644827Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T17:49:33.650475Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T17:49:33.657673Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T17:49:33.660203Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T17:49:33.662459Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YmQ1YzRmYjMtZTJkZDMxNGEtMmE0OTc2ZDUtN2FiMjFkNWI=, workerId: [2:7439791196828979537:2331], local sessions count: 0 >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2024-11-21T17:49:31.092374Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791189190332668:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:31.092540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0015c8/r3tmp/tmpBTbclj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8031, node 1 2024-11-21T17:49:31.149872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:49:31.149885Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:31.154924Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:31.154941Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:31.154943Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:31.154975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:31.192782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:31.192823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:31.203756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:31.225075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:31.226145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:31.226166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:31.226942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:49:31.227013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:49:31.227022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:49:31.227814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:49:31.227826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:49:31.228413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:31.228472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:31.229566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211371275, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:31.229581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:49:31.229664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:49:31.230247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:31.230312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:31.230328Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:49:31.230345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:49:31.230357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:49:31.230374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:49:31.231072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:49:31.231090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:49:31.231094Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:49:31.231109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:49:31.412130Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:49:31.412594Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2024-11-21T17:49:31.412842Z node 1 :KQP_PROXY DEBUG: Received ping session request, request_id: 2, sender: [1:7439791189190333548:2283], trace_id: 01jd7xfv7c2hsfm1sq1mn8z1n8 2024-11-21T17:49:31.412962Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 5.000000s actor id: [0:0:0] 2024-11-21T17:49:31.412983Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2024-11-21T17:49:31.412987Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2024-11-21T17:49:31.412994Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:49:31.413021Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2024-11-21T17:49:31.413041Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.413051Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.413061Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.413064Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:31.413073Z node 1 :KQP_PROXY DEBUG: Session not found, targetId: [2:8678280833929343339:121] requestId: 2 2024-11-21T17:49:31.413471Z node 1 :KQP_PROXY DEBUG: TraceId: "01jd7xfv7c2hsfm1sq1mn8z1n8", Forwarded response to sender actor, requestId: 2, sender: [1:7439791189190333548:2283], selfId: [1:7439791189190332883:2256], source: [1:7439791189190332883:2256] 2024-11-21T17:49:31.945090Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439791191080940099:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0015c8/r3tmp/tmpMVJmcx/pdisk_1.dat 2024-11-21T17:49:31.950916Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:49:31.960132Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18373 TServer::EnableGrpc on GrpcPort 5930, node 4 2024-11-21T17:49:31.983958Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:31.983976Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:31.983978Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:31.984020Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2024-11-21T17:49:32.044497Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:32.044551Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:49:32.047676Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:32.048142Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:32.265799Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:49:32.266187Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T17:49:32.268489Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2024-11-21T17:49:32.268494Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:32.268504Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2024-11-21T17:49:32.268509Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:49:32.268521Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:32.268524Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard ... hNmFmYw==, workerId: [4:7439791199670875739:2405], database: /dc-1, longSession: 1, local sessions count: 2 2024-11-21T17:49:33.694099Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T17:49:33.694127Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: b155ca63-2b6ee8d0-d6575edc-acc60d08, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2024-11-21T17:49:33.694230Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=YWVkZDc3MC1iYmM0MzRjMy1hOTFiOWY0OS1iYjRhNmFmYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [4:7439791199670875739:2405] 2024-11-21T17:49:33.694243Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [4:7439791199670875741:2618] 2024-11-21T17:49:33.696352Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [4:7439791199670875740:2406], selfId: [4:7439791191080940080:2179], source: [4:7439791199670875739:2405] 2024-11-21T17:49:33.696423Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: b155ca63-2b6ee8d0-d6575edc-acc60d08, State: Get operation info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=YWVkZDc3MC1iYmM0MzRjMy1hOTFiOWY0OS1iYjRhNmFmYw==, TxId: 01jd7xfxky54d0xwddabj0ypck 2024-11-21T17:49:33.696545Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: b155ca63-2b6ee8d0-d6575edc-acc60d08, State: Get operation info, RunDataQuery: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan AS JsonDocument; DECLARE $stats AS JsonDocument; DECLARE $ast AS Optional; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $user_token AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan = $plan, end_ts = CurrentUtcTimestamp(), stats = $stats, ast = $ast, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2024-11-21T17:49:33.696671Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=YWVkZDc3MC1iYmM0MzRjMy1hOTFiOWY0OS1iYjRhNmFmYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 21, targetId: [4:7439791199670875739:2405] 2024-11-21T17:49:33.696683Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 21 timeout: 300.000000s actor id: [4:7439791199670875762:2623] 2024-11-21T17:49:33.700203Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 21, sender: [4:7439791199670875761:2412], selfId: [4:7439791191080940080:2179], source: [4:7439791199670875739:2405] 2024-11-21T17:49:33.700298Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: b155ca63-2b6ee8d0-d6575edc-acc60d08, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=YWVkZDc3MC1iYmM0MzRjMy1hOTFiOWY0OS1iYjRhNmFmYw==, TxId: 2024-11-21T17:49:33.700334Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: b155ca63-2b6ee8d0-d6575edc-acc60d08, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=YWVkZDc3MC1iYmM0MzRjMy1hOTFiOWY0OS1iYjRhNmFmYw==, TxId: 2024-11-21T17:49:33.700345Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: b155ca63-2b6ee8d0-d6575edc-acc60d08. UNAVAILABLE. Issues: {
: Error: Lease expired } 2024-11-21T17:49:33.700409Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=YWVkZDc3MC1iYmM0MzRjMy1hOTFiOWY0OS1iYjRhNmFmYw==, workerId: [4:7439791199670875739:2405], local sessions count: 1 2024-11-21T17:49:33.701679Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: b155ca63-2b6ee8d0-d6575edc-acc60d08, successfully finalized script execution operation 2024-11-21T17:49:33.701698Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: b155ca63-2b6ee8d0-d6575edc-acc60d08, reply success 2024-11-21T17:49:33.702790Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jd7xfxm61j8hjtkmjza7rs2m, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=NmMzMjVhNTgtOGU1N2E1ZmYtMTEyNDY2OWEtM2RkMzAyNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [4:7439791195375908198:2330] 2024-11-21T17:49:33.702804Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [4:7439791199670875785:2631] 2024-11-21T17:49:33.764051Z node 4 :KQP_PROXY DEBUG: TraceId: "01jd7xfxm61j8hjtkmjza7rs2m", Forwarded response to sender actor, requestId: 22, sender: [4:7439791199670875784:2417], selfId: [4:7439791191080940080:2179], source: [4:7439791195375908198:2330] 2024-11-21T17:49:33.766121Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: b155ca63-2b6ee8d0-d6575edc-acc60d08, Bootstrap. Database: /dc-1 2024-11-21T17:49:33.766216Z node 4 :KQP_PROXY DEBUG: Request has 18445011862335.785405s seconds to be completed 2024-11-21T17:49:33.766666Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=YmU1NWM0ZS0xMWFkYmViOS1hODE5YWE0ZC1jYTdlMGIyOQ==, workerId: [4:7439791199670875825:2429], database: /dc-1, longSession: 1, local sessions count: 2 2024-11-21T17:49:33.766699Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T17:49:33.767019Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: b155ca63-2b6ee8d0-d6575edc-acc60d08, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2024-11-21T17:49:33.767168Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=YmU1NWM0ZS0xMWFkYmViOS1hODE5YWE0ZC1jYTdlMGIyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 24, targetId: [4:7439791199670875825:2429] 2024-11-21T17:49:33.767182Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 24 timeout: 300.000000s actor id: [4:7439791199670875827:2647] 2024-11-21T17:49:33.793153Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 24, sender: [4:7439791199670875826:2430], selfId: [4:7439791191080940080:2179], source: [4:7439791199670875825:2429] 2024-11-21T17:49:33.793241Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: b155ca63-2b6ee8d0-d6575edc-acc60d08, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=YmU1NWM0ZS0xMWFkYmViOS1hODE5YWE0ZC1jYTdlMGIyOQ==, TxId: 01jd7xfxpz9yq9xfh5n7rabq9b 2024-11-21T17:49:33.793265Z node 4 :KQP_PROXY WARN: [TQueryBase] [TScriptLeaseUpdater] TraceId: b155ca63-2b6ee8d0-d6575edc-acc60d08, State: Get lease info, Finish with BAD_REQUEST, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=YmU1NWM0ZS0xMWFkYmViOS1hODE5YWE0ZC1jYTdlMGIyOQ==, TxId: 01jd7xfxpz9yq9xfh5n7rabq9b 2024-11-21T17:49:33.793270Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: b155ca63-2b6ee8d0-d6575edc-acc60d08, State: Get lease info, Rollback transaction: 01jd7xfxpz9yq9xfh5n7rabq9b 2024-11-21T17:49:33.793329Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=YmU1NWM0ZS0xMWFkYmViOS1hODE5YWE0ZC1jYTdlMGIyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 25, targetId: [4:7439791199670875825:2429] 2024-11-21T17:49:33.793336Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 25 timeout: 600.000000s actor id: [4:7439791199670875849:2655] 2024-11-21T17:49:33.793795Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 25, sender: [4:7439791199670875848:2437], selfId: [4:7439791191080940080:2179], source: [4:7439791199670875825:2429] 2024-11-21T17:49:33.793829Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: b155ca63-2b6ee8d0-d6575edc-acc60d08, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2024-11-21T17:49:33.793998Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=YmU1NWM0ZS0xMWFkYmViOS1hODE5YWE0ZC1jYTdlMGIyOQ==, workerId: [4:7439791199670875825:2429], local sessions count: 1 2024-11-21T17:49:33.794447Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=NmMzMjVhNTgtOGU1N2E1ZmYtMTEyNDY2OWEtM2RkMzAyNGY=, workerId: [4:7439791195375908198:2330], local sessions count: 0 >> TInterconnectTest::TestReconnect [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless >> TInterconnectTest::TestConnectAndDisconnect >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TestProtocols::TestHTTPRequest |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks [GOOD] >> DataShardReadIterator::ShouldRollbackLocksWhenWrite >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix3 >> TestProtocols::TestHTTPRequest [GOOD] >> TInterconnectTest::OldNbs [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk2 >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2024-11-21T17:49:34.178681Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 (null) -> PendingActivation 2024-11-21T17:49:34.178701Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP01 ready to work 2024-11-21T17:49:34.179036Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @201 (null) -> PendingActivation 2024-11-21T17:49:34.179044Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP01 ready to work 2024-11-21T17:49:34.179076Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @96 PendingActivation -> PendingNodeInfo 2024-11-21T17:49:34.179363Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP02 configured for host 6:::1:28915 2024-11-21T17:49:34.179397Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @488 PendingNodeInfo -> PendingConnection 2024-11-21T17:49:34.179414Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH01 starting outgoing handshake 2024-11-21T17:49:34.179429Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2024-11-21T17:49:34.179573Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH05 connected to peer 2024-11-21T17:49:34.179818Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:33644 2024-11-21T17:49:34.179852Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 0] ICH02 starting incoming handshake 2024-11-21T17:49:34.180010Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 543793 ProgramStartTime: 1877366976864 Serial: 605755873 ReceiverNodeId: 6 SenderActorId: "[5:605755873:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 543793" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 543793" AcceptUUID: "Cluster for process with id: 543793" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: false HandshakeId: "^\016\254\336l\265\220w\016\260\266R1\014I\020\001x\tK\256\227p\235\005\\r\345\232\006a\245" RequestXxhash: true RequestXdcShuffle: true 2024-11-21T17:49:34.180095Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 543793 ProgramStartTime: 1877366976864 Serial: 605755873 ReceiverNodeId: 6 SenderActorId: "[5:605755873:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 543793" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 543793" AcceptUUID: "Cluster for process with id: 543793" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: false HandshakeId: "^\016\254\336l\265\220w\016\260\266R1\014I\020\001x\tK\256\227p\235\005\\r\345\232\006a\245" RequestXxhash: true RequestXdcShuffle: true 2024-11-21T17:49:34.180104Z node 6 :INTERCONNECT WARN: Handshake [6:21:2057] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2024-11-21T17:49:34.180120Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @96 PendingActivation -> PendingNodeInfo 2024-11-21T17:49:34.180402Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP02 configured for host 5:::1:26716 2024-11-21T17:49:34.180409Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP17 incoming handshake (actor [6:21:2057]) 2024-11-21T17:49:34.180413Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @488 PendingNodeInfo -> PendingConnection 2024-11-21T17:49:34.180418Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2024-11-21T17:49:34.180422Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2024-11-21T17:49:34.180424Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @217 PendingConnection -> PendingConnection 2024-11-21T17:49:34.180458Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 543793 ProgramStartTime: 1877371791764 Serial: 3060681945 SenderActorId: "[6:3060681945:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 543793" AcceptUUID: "Cluster for process with id: 543793" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: false UseXxhash: true UseXdcShuffle: true } 2024-11-21T17:49:34.180486Z node 6 :INTERCONNECT INFO: Handshake [6:21:2057] [node 5] ICH04 handshake succeeded 2024-11-21T17:49:34.180598Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 543793 ProgramStartTime: 1877371791764 Serial: 3060681945 SenderActorId: "[6:3060681945:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 543793" AcceptUUID: "Cluster for process with id: 543793" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: false UseXxhash: true UseXdcShuffle: true } 2024-11-21T17:49:34.180607Z node 5 :INTERCONNECT WARN: Handshake [5:19:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2024-11-21T17:49:34.180614Z node 5 :INTERCONNECT INFO: Handshake [5:19:2057] [node 6] ICH04 handshake succeeded 2024-11-21T17:49:34.180657Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2024-11-21T17:49:34.180662Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:21:2057] poison: false 2024-11-21T17:49:34.180666Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @347 PendingConnection -> StateWork 2024-11-21T17:49:34.180695Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP22 created new session: [6:22:2048] 2024-11-21T17:49:34.180706Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS09 handshake done sender: [6:21:2057] self: [6:3060681945:0] peer: [5:605755873:0] socket: 25 2024-11-21T17:49:34.180713Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS10 traffic start 2024-11-21T17:49:34.180725Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS11 registering socket in PollerActor 2024-11-21T17:49:34.180731Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T17:49:34.180736Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2024-11-21T17:49:34.180743Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T17:49:34.180751Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2024-11-21T17:49:34.180756Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:19:2057] poison: false 2024-11-21T17:49:34.180759Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @347 PendingConnection -> StateWork 2024-11-21T17:49:34.180772Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:24:2048] 2024-11-21T17:49:34.180776Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS09 handshake done sender: [5:19:2057] self: [5:605755873:0] peer: [6:3060681945:0] socket: 24 2024-11-21T17:49:34.180780Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS10 traffic start 2024-11-21T17:49:34.180786Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS11 registering socket in PollerActor 2024-11-21T17:49:34.180790Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T17:49:34.180794Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2024-11-21T17:49:34.180797Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T17:49:34.180803Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS04 subscribe for session state for [5:17:2056] 2024-11-21T17:49:34.180839Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS01 InputSession created 2024-11-21T17:49:34.180845Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS01 InputSession created 2024-11-21T17:49:34.180854Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T17:49:34.180869Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T17:49:34.180877Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS02 ReceiveData called 2024-11-21T17:49:34.180882Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T17:49:34.180889Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS02 ReceiveData called 2024-11-21T17:49:34.180893Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T17:49:34.180897Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T17:49:34.180901Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T17:49:34.180907Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T17:49:34.180910Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T17:49:34.180915Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T17:49:34.180918Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T17:49:34.180926Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T17:49:34.180929Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T17:49:34.180933Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T17:49:34.180936Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T17:49:34.180954Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS02 send event from: [5:17:2056] to: [6:18:2056] 2024-11-21T17:49:34.180987Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS22 outgoing packet Serial# 1 Confirm# 0 DataSize# 84 InflightDataAmount# 84 2024-11-21T17:49:34.181009Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T17:49:34.181026Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS02 ReceiveData called 2024-11-21T17:49:34.181032Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2024-11-21T17:49:34.181055Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T17:49:34.181064Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T17:49:34.181067Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T17:49:34.181072Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS02 send event from: [6:18:2056] to: [5:17:2056] 2024-11-21T17:49:34.181086Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS22 outgoing packet Serial# 1 Confirm# 1 DataSize# 84 InflightDataAmount# 84 2024-11-21T17:49:34.181101Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T17:49:34.181117Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T17:49:34.181122Z node 5 :INTERCONNEC ... EBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T17:49:34.181188Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS02 send event from: [5:17:2056] to: [6:18:2056] 2024-11-21T17:49:34.181210Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS02 ReceiveData called 2024-11-21T17:49:34.181215Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# 0 num# 1 err# 2024-11-21T17:49:34.181329Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS09 ReestablishConnection, reason# EndOfStream 2024-11-21T17:49:34.181339Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS07 socket disconnect 25 reason# EndOfStream 2024-11-21T17:49:34.181344Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS25 shutdown socket, reason# EndOfStream 2024-11-21T17:49:34.181372Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS15 start handshake 2024-11-21T17:49:34.181399Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS22 outgoing packet Serial# 2 Confirm# 1 DataSize# 84 InflightDataAmount# 84 2024-11-21T17:49:34.181417Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS13 reestablish connection 2024-11-21T17:49:34.181421Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS25 shutdown socket, reason# EPIPE 2024-11-21T17:49:34.181427Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T17:49:34.181438Z node 6 :INTERCONNECT DEBUG: Handshake [6:26:2058] [node 5] ICH01 starting outgoing handshake 2024-11-21T17:49:34.181453Z node 6 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2024-11-21T17:49:34.181538Z node 6 :INTERCONNECT DEBUG: Handshake [6:26:2058] [node 5] ICH05 connected to peer 2024-11-21T17:49:34.181566Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T17:49:34.181572Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# 0 num# 1 err# 2024-11-21T17:49:34.181612Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS09 ReestablishConnection, reason# EndOfStream 2024-11-21T17:49:34.181627Z node 5 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:35494 2024-11-21T17:49:34.181660Z node 5 :INTERCONNECT DEBUG: Handshake [5:28:2058] [node 0] ICH02 starting incoming handshake 2024-11-21T17:49:34.181667Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS07 socket disconnect -1 reason# EndOfStream 2024-11-21T17:49:34.181671Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS15 start handshake 2024-11-21T17:49:34.181702Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2059] [node 6] ICH01 starting outgoing handshake 2024-11-21T17:49:34.181712Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP09 (actor [5:28:2058]) from: [6:3060681945:0] for: [5:605755873:0] 2024-11-21T17:49:34.181717Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS08 incoming handshake Self# [6:3060681945:0] Peer# [5:605755873:0] Counter# 1 LastInputSerial# 1 2024-11-21T17:49:34.181722Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP06 reply for incoming handshake (actor [5:28:2058]) is held 2024-11-21T17:49:34.181736Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2024-11-21T17:49:34.181803Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2059] [node 6] ICH05 connected to peer 2024-11-21T17:49:34.181825Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:33660 2024-11-21T17:49:34.181854Z node 6 :INTERCONNECT DEBUG: Handshake [6:31:2059] [node 0] ICH02 starting incoming handshake 2024-11-21T17:49:34.181873Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP09 (actor [6:31:2059]) from: [5:605755873:0] for: [6:3060681945:0] 2024-11-21T17:49:34.181878Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS08 incoming handshake Self# [5:605755873:0] Peer# [6:3060681945:0] Counter# 1 LastInputSerial# 1 2024-11-21T17:49:34.181881Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2024-11-21T17:49:34.181899Z node 6 :INTERCONNECT INFO: Handshake [6:31:2059] [node 5] ICH04 handshake succeeded 2024-11-21T17:49:34.181950Z node 5 :INTERCONNECT INFO: Handshake [5:29:2059] [node 6] ICH04 handshake succeeded 2024-11-21T17:49:34.181986Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2024-11-21T17:49:34.181992Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:31:2059] poison: false 2024-11-21T17:49:34.181997Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP052 dropped outgoing handshake: [6:26:2058] poison: true 2024-11-21T17:49:34.182002Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @347 StateWork -> StateWork 2024-11-21T17:49:34.182008Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS09 handshake done sender: [6:31:2059] self: [6:3060681945:0] peer: [5:605755873:0] socket: 29 2024-11-21T17:49:34.182013Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS10 traffic start 2024-11-21T17:49:34.182022Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS11 registering socket in PollerActor 2024-11-21T17:49:34.182027Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T17:49:34.182031Z node 6 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2024-11-21T17:49:34.182037Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2024-11-21T17:49:34.182042Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2024-11-21T17:49:34.182046Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T17:49:34.182053Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2024-11-21T17:49:34.182056Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:28:2058] poison: true 2024-11-21T17:49:34.182061Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:29:2059] poison: false 2024-11-21T17:49:34.182064Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @347 StateWork -> StateWork 2024-11-21T17:49:34.182068Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS09 handshake done sender: [5:29:2059] self: [5:605755873:0] peer: [6:3060681945:0] socket: 28 2024-11-21T17:49:34.182072Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS10 traffic start 2024-11-21T17:49:34.182079Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS11 registering socket in PollerActor 2024-11-21T17:49:34.182083Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T17:49:34.182086Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS06 rewind SendQueue size# 1 LastConfirmed# 1 NextSerial# 2 2024-11-21T17:49:34.182103Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T17:49:34.182235Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS01 InputSession created 2024-11-21T17:49:34.182242Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS01 InputSession created 2024-11-21T17:49:34.182343Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T17:49:34.182353Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T17:49:34.182359Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS02 ReceiveData called 2024-11-21T17:49:34.182365Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2024-11-21T17:49:34.182378Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T17:49:34.182382Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T17:49:34.182443Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS02 ReceiveData called 2024-11-21T17:49:34.182447Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T17:49:34.182449Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T17:49:34.182452Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T17:49:34.182456Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T17:49:34.182459Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T17:49:34.182462Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T17:49:34.182464Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T17:49:34.182470Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS02 send event from: [6:18:2056] to: [5:17:2056] 2024-11-21T17:49:34.182477Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T17:49:34.182479Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T17:49:34.182485Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 InflightDataAmount# 84 2024-11-21T17:49:34.182498Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T17:49:34.182502Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T17:49:34.182504Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T17:49:34.182516Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T17:49:34.182519Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2024-11-21T17:49:34.182527Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T17:49:34.182532Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 2 2024-11-21T17:49:34.182534Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2024-11-21T17:49:34.182537Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2024-11-21T17:49:34.182540Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 2 2024-11-21T17:49:34.182552Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS01 socket: 28 reason# 2024-11-21T17:49:34.182556Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:24:2048] VirtualId# [5:605755873:0] 2024-11-21T17:49:34.182560Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 StateWork -> PendingActivation 2024-11-21T17:49:34.182562Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS25 shutdown socket, reason# 2024-11-21T17:49:34.182580Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2024-11-21T17:49:06.850470Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:06.864909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:06.866678Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:06.866691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:06.866724Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:06.867231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:06.867258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:06.867284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:06.867296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:06.867306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:06.867317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:06.867326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:06.867337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:06.867347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:06.867358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.867371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:06.867384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:06.871412Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:06.871433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T17:49:06.872562Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:06.872648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:06.872657Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:06.872691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.872737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:06.872747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:06.872750Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:06.872756Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:06.872762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:06.872767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:06.872770Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:06.872780Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:06.872784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:06.872789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:06.872792Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:06.872798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:06.872801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:06.872807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:06.872809Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:06.872817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:06.872822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:06.872826Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:06.872831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:06.872836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:06.872838Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:06.872860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=6; 2024-11-21T17:49:06.872867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2024-11-21T17:49:06.872873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T17:49:06.872880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2024-11-21T17:49:06.872894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:06.872899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:06.872901Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:06.872917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:06.872922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.872924Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:06.872933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:06.872938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:06.872940Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:06.872953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:06.872957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:06.872959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;f ... ta.cpp:29;EXECUTE:finishLoadingTime=957; 2024-11-21T17:49:34.486999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2594; 2024-11-21T17:49:34.487037Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=30; 2024-11-21T17:49:34.487127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11; 2024-11-21T17:49:34.487134Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=92; 2024-11-21T17:49:34.487150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=11; 2024-11-21T17:49:34.487167Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T17:49:34.487174Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=20; 2024-11-21T17:49:34.487187Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=8; 2024-11-21T17:49:34.487196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=3; 2024-11-21T17:49:34.487277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=75; 2024-11-21T17:49:34.487356Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=73; 2024-11-21T17:49:34.487367Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=6; 2024-11-21T17:49:34.487374Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=4; 2024-11-21T17:49:34.487378Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2024-11-21T17:49:34.487382Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T17:49:34.487386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T17:49:34.487394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=5; 2024-11-21T17:49:34.487398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2024-11-21T17:49:34.487409Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=6; 2024-11-21T17:49:34.487413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2024-11-21T17:49:34.487421Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=5; 2024-11-21T17:49:34.487424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3739; 2024-11-21T17:49:34.487447Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=25;blobs=50;rows=708348;bytes=40196124;raw_bytes=67627660; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=22;blobs=44;rows=1136652;bytes=64332088;raw_bytes=108739216; inactive portions=44;blobs=88;rows=1246652;bytes=70696640;raw_bytes=119259832; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T17:49:34.487466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1677:3653];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T17:49:34.487471Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T17:49:34.487479Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T17:49:34.487502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1677:3653];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T17:49:34.487506Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T17:49:34.487521Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=202797666304;kff=0.3; 2024-11-21T17:49:34.487525Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:34.487533Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:34.487562Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=21; 2024-11-21T17:49:34.487571Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T17:49:34.487575Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:34.487582Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:34.487586Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:34.487604Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:34.487618Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:34.487881Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:34.487901Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1710:3679];tablet_id=9437184;parent=[1:1677:3653];fline=manager.h:99;event=ask_data;request=request_id=281;1={portions_count=91};; 2024-11-21T17:49:34.488144Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1710:3679];tablet_id=9437184;parent=[1:1677:3653];fline=manager.h:99;event=ask_data;request=request_id=283;1={portions_count=47};; 2024-11-21T17:49:34.488653Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T17:49:34.488720Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T17:49:34.488726Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:49:34.488729Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T17:49:34.488735Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:34.488746Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:34.488780Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=21; 2024-11-21T17:49:34.488789Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T17:49:34.488794Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:34.488801Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:34.488806Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:34.488811Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:34.488823Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:34.489042Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=91;path_id=1; 2024-11-21T17:49:34.489567Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=91;path_id=1; 2024-11-21T17:49:34.490435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T17:49:34.490446Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1677:3653];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; >> DataShardReadIterator::ShouldReadHeadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadFromHead >> Secret::ValidationQueryService |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels |79.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 [GOOD] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] Test command err: 2024-11-21T17:49:32.206191Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791193594249118:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:32.206285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001534/r3tmp/tmpPtEUse/pdisk_1.dat 2024-11-21T17:49:32.266113Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32501 TServer::EnableGrpc on GrpcPort 7291, node 1 2024-11-21T17:49:32.294174Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:32.294189Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:32.294191Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:32.294223Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2024-11-21T17:49:32.338242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:32.338267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:32.339336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:32.340664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:32.538085Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:49:32.538564Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T17:49:32.538877Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2024-11-21T17:49:32.538886Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2024-11-21T17:49:32.538891Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:49:32.538901Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T17:49:32.538924Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:32.538941Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:32.538952Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:32.538955Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:32.539174Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2024-11-21T17:49:32.539185Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2024-11-21T17:49:32.539191Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2024-11-21T17:49:32.539195Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2024-11-21T17:49:32.539199Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2024-11-21T17:49:32.539207Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2024-11-21T17:49:32.539216Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2024-11-21T17:49:32.539217Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2024-11-21T17:49:32.539219Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2024-11-21T17:49:32.540040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T17:49:32.540371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:49:32.540582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:32.541578Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2024-11-21T17:49:32.541582Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2024-11-21T17:49:32.541590Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976715659 2024-11-21T17:49:32.541604Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2024-11-21T17:49:32.541604Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976715658 2024-11-21T17:49:32.541606Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976715660 2024-11-21T17:49:32.602066Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976715658. Doublechecking... 2024-11-21T17:49:32.608703Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2024-11-21T17:49:32.612666Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976715659. Doublechecking... 2024-11-21T17:49:32.664372Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2024-11-21T17:49:32.679048Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2024-11-21T17:49:32.692527Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2024-11-21T17:49:32.692687Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: ad472654-3e39a404-9329393b-e4d0e930, Bootstrap. Database: /dc-1 2024-11-21T17:49:32.692800Z node 1 :KQP_PROXY DEBUG: Request has 18445011862336.858821s seconds to be completed 2024-11-21T17:49:32.693458Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=NDVlYzNiODUtYTAzOWNmMGEtMjYxNTFkZC04M2U2YTExMA==, workerId: [1:7439791193594249816:2301], database: /dc-1, longSession: 1, local sessions count: 1 2024-11-21T17:49:32.693494Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T17:49:32.699006Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: ad472654-3e39a404-9329393b-e4d0e930, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2024-11-21T17:49:32.699173Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NDVlYzNiODUtYTAzOWNmMGEtMjYxNTFkZC04M2U2YTExMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7439791193594249816:2301] 2024-11-21T17:49:32.699187Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7439791193594249819:2443] 2024-11-21T17:49:32.699539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791193594249818:2303], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:32.699555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:32.699605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791193594249830:2306], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:32.700484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:2, at schemeshard: 72057594046644480 2024-11-21T17:49:32.702257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791193594249833:2307], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:49:32.811766Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7439791193594249817:2302], selfId: [1:7439791193594249198:2256], source: [1:7439791193594249816:2301] 2024-11-21T17:49:32.811858Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: ad472654-3e39a404-9329393b- ... mestamp(), stats = $stats, ast = $ast, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2024-11-21T17:49:34.713395Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZDFhN2QxYTQtZGZiNDZkOTYtMzRhNTJjODUtYTcyM2VlOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 16, targetId: [2:7439791204798835249:2358] 2024-11-21T17:49:34.713409Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 16 timeout: 300.000000s actor id: [2:7439791204798835305:2561] 2024-11-21T17:49:34.747929Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 16, sender: [2:7439791204798835304:2375], selfId: [2:7439791204798834351:2172], source: [2:7439791204798835249:2358] 2024-11-21T17:49:34.748031Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: bb02dec5-5784874f-6444b8d7-2856b478, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDFhN2QxYTQtZGZiNDZkOTYtMzRhNTJjODUtYTcyM2VlOTc=, TxId: 2024-11-21T17:49:34.748051Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: bb02dec5-5784874f-6444b8d7-2856b478, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDFhN2QxYTQtZGZiNDZkOTYtMzRhNTJjODUtYTcyM2VlOTc=, TxId: 2024-11-21T17:49:34.748054Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: bb02dec5-5784874f-6444b8d7-2856b478. SUCCESS. Issues: 2024-11-21T17:49:34.748179Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=Njk5MzkwNWMtNTRjOWQ3OTgtNTVjNTc1MzktNDA3Y2U4MWE=, workerId: [2:7439791204798835140:2317], local sessions count: 2 2024-11-21T17:49:34.748188Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZDFhN2QxYTQtZGZiNDZkOTYtMzRhNTJjODUtYTcyM2VlOTc=, workerId: [2:7439791204798835249:2358], local sessions count: 1 2024-11-21T17:49:35.526487Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:49:35.620349Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 4efa6bfd-9b1a7d00-dc7fbe43-2d07bf60, Bootstrap. Database: /dc-1 2024-11-21T17:49:35.620434Z node 2 :KQP_PROXY DEBUG: Request has 18445011862333.931192s seconds to be completed 2024-11-21T17:49:35.620878Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZDlmMzU0YWQtNTc2Yzg1NjUtNmZhNmM3M2QtYTUyY2FmMjI=, workerId: [2:7439791209093802672:2388], database: /dc-1, longSession: 1, local sessions count: 2 2024-11-21T17:49:35.620910Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T17:49:35.621297Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 4efa6bfd-9b1a7d00-dc7fbe43-2d07bf60, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2024-11-21T17:49:35.621446Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZDlmMzU0YWQtNTc2Yzg1NjUtNmZhNmM3M2QtYTUyY2FmMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 18, targetId: [2:7439791209093802672:2388] 2024-11-21T17:49:35.621458Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 18 timeout: 300.000000s actor id: [2:7439791209093802674:2603] 2024-11-21T17:49:35.706734Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 18, sender: [2:7439791209093802673:2389], selfId: [2:7439791204798834351:2172], source: [2:7439791209093802672:2388] 2024-11-21T17:49:35.706974Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 4efa6bfd-9b1a7d00-dc7fbe43-2d07bf60, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDlmMzU0YWQtNTc2Yzg1NjUtNmZhNmM3M2QtYTUyY2FmMjI=, TxId: 01jd7xfzjmarma77yx65b15wtk 2024-11-21T17:49:35.707015Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 4efa6bfd-9b1a7d00-dc7fbe43-2d07bf60, State: Get lease info, RunDataQuery: -- TScriptLeaseUpdater::OnGetLeaseInfo DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $lease_duration AS Interval; UPDATE `.metadata/script_execution_leases` SET lease_deadline=(CurrentUtcTimestamp() + $lease_duration) WHERE database = $database AND execution_id = $execution_id; 2024-11-21T17:49:35.707338Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZDlmMzU0YWQtNTc2Yzg1NjUtNmZhNmM3M2QtYTUyY2FmMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 19, targetId: [2:7439791209093802672:2388] 2024-11-21T17:49:35.707350Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 19 timeout: 300.000000s actor id: [2:7439791209093802697:2612] 2024-11-21T17:49:35.767153Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 19, sender: [2:7439791209093802696:2396], selfId: [2:7439791204798834351:2172], source: [2:7439791209093802672:2388] 2024-11-21T17:49:35.767263Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 4efa6bfd-9b1a7d00-dc7fbe43-2d07bf60, State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDlmMzU0YWQtNTc2Yzg1NjUtNmZhNmM3M2QtYTUyY2FmMjI=, TxId: 2024-11-21T17:49:35.767283Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 4efa6bfd-9b1a7d00-dc7fbe43-2d07bf60, State: Update lease, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDlmMzU0YWQtNTc2Yzg1NjUtNmZhNmM3M2QtYTUyY2FmMjI=, TxId: 2024-11-21T17:49:35.767418Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZDlmMzU0YWQtNTc2Yzg1NjUtNmZhNmM3M2QtYTUyY2FmMjI=, workerId: [2:7439791209093802672:2388], local sessions count: 1 2024-11-21T17:49:35.768811Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jd7xfzmr623h37wrxq7f1gmb, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MTBjN2FkODEtYTcyY2U4ZTEtYTUxNmZjNmMtNTQ2NGJkMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [2:7439791204798835159:2327] 2024-11-21T17:49:35.768836Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [2:7439791209093802721:2620] 2024-11-21T17:49:35.824655Z node 2 :KQP_PROXY DEBUG: TraceId: "01jd7xfzmr623h37wrxq7f1gmb", Forwarded response to sender actor, requestId: 20, sender: [2:7439791209093802720:2403], selfId: [2:7439791204798834351:2172], source: [2:7439791204798835159:2327] 2024-11-21T17:49:35.826096Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: 4efa6bfd-9b1a7d00-dc7fbe43-2d07bf60, Bootstrap. Start TCheckLeaseStatusQueryActor 2024-11-21T17:49:35.826122Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 4efa6bfd-9b1a7d00-dc7fbe43-2d07bf60, Bootstrap. Database: /dc-1 2024-11-21T17:49:35.826195Z node 2 :KQP_PROXY DEBUG: Request has 18445011862333.725425s seconds to be completed 2024-11-21T17:49:35.826608Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YzRjYjgzMmQtNzZiNzhlYmQtY2IzOTAyODEtODlhYzJlODQ=, workerId: [2:7439791209093802761:2415], database: /dc-1, longSession: 1, local sessions count: 2 2024-11-21T17:49:35.826641Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T17:49:35.826821Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 4efa6bfd-9b1a7d00-dc7fbe43-2d07bf60, RunDataQuery: -- TCheckLeaseStatusQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, execution_status, finalization_status, issues, run_script_actor_id FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2024-11-21T17:49:35.826938Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YzRjYjgzMmQtNzZiNzhlYmQtY2IzOTAyODEtODlhYzJlODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [2:7439791209093802761:2415] 2024-11-21T17:49:35.826943Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [2:7439791209093802763:2636] 2024-11-21T17:49:35.878917Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 22, sender: [2:7439791209093802762:2416], selfId: [2:7439791204798834351:2172], source: [2:7439791209093802761:2415] 2024-11-21T17:49:35.878994Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 4efa6bfd-9b1a7d00-dc7fbe43-2d07bf60, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzRjYjgzMmQtNzZiNzhlYmQtY2IzOTAyODEtODlhYzJlODQ=, TxId: 2024-11-21T17:49:35.879028Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 4efa6bfd-9b1a7d00-dc7fbe43-2d07bf60, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzRjYjgzMmQtNzZiNzhlYmQtY2IzOTAyODEtODlhYzJlODQ=, TxId: 2024-11-21T17:49:35.879048Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: 4efa6bfd-9b1a7d00-dc7fbe43-2d07bf60, reply success 2024-11-21T17:49:35.879196Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YzRjYjgzMmQtNzZiNzhlYmQtY2IzOTAyODEtODlhYzJlODQ=, workerId: [2:7439791209093802761:2415], local sessions count: 1 2024-11-21T17:49:35.879770Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MTBjN2FkODEtYTcyY2U4ZTEtYTUxNmZjNmMtNTQ2NGJkMjM=, workerId: [2:7439791204798835159:2327], local sessions count: 0 |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> DataShardReadIterator::ShouldRollbackLocksWhenWrite [GOOD] >> DataShardReadIterator::TryWriteManyRows+Commit >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |80.0%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> DataShardReadIterator::ShouldReadRangePrefix3 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix4 >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true >> DataShardReadIterator::ShouldReadRangeChunk2 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> DataShardReadIterator::ShouldReadFromHead [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:23.080894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:23.080922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:23.080927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:23.080932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:23.080950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:23.080954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:23.080963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:23.081041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:23.092142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:23.092166Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:23.097299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:23.098127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:23.098170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:23.102108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:23.104598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:23.104757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:23.104965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:23.107483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:23.107887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:23.107903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:23.107947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:23.107956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:23.107964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:23.107984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.112658Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:23.133987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:23.134085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.134160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:23.134215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:23.134224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.135982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:23.136030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:23.136095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.136106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:23.136111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:23.136117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:23.140961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.140988Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:23.140996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:23.141588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.141599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.141605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:23.141627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:23.142331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:23.142824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:23.142888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:23.143066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:23.143115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:23.143134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:23.143183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:23.143189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:23.143218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:23.143230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:23.143706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:23.143715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:23.143756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:23.143760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:23.143828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:23.143834Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:23.143842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:23.143845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:23.143849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:23.143853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:23.143856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:23.143858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:23.143867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:23.143871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:23.143875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:23.144225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:23.144257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:23.144262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:23.144267Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:23.144273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:23.144289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... oseLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 59 } } 2024-11-21T17:49:37.358833Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:37.358880Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 61500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 59 } } 2024-11-21T17:49:37.358893Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#102:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 61500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 59 } } 2024-11-21T17:49:37.358896Z node 12 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T17:49:37.358924Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T17:49:37.358928Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2024-11-21T17:49:37.359555Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:37.359605Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:37.359613Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:37.359629Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2024-11-21T17:49:37.359665Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:37.359989Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:49:37.360019Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T17:49:37.360312Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:37.360339Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 51539609707 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:37.360347Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:49:37.360446Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T17:49:37.360486Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:49:37.361850Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:37.361857Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:49:37.361950Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:37.361961Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [12:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:49:37.362194Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:37.362206Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:49:37.362360Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:37.362374Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:49:37.362382Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:49:37.362387Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T17:49:37.362395Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:37.362415Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T17:49:37.362601Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 307 } } 2024-11-21T17:49:37.362613Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:37.362646Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 307 } } 2024-11-21T17:49:37.362658Z node 12 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 307 } } 2024-11-21T17:49:37.362842Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:49:37.362849Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:37.362863Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:49:37.362869Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:49:37.362875Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:49:37.362887Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:37.362891Z node 12 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:37.362895Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:37.362901Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:49:37.363346Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:49:37.363589Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:37.363607Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:37.363622Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:49:37.363627Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:49:37.363639Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:49:37.363642Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:37.363646Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:49:37.363658Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [12:340:2315] message: TxId: 102 2024-11-21T17:49:37.363663Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:49:37.363666Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:49:37.363669Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:49:37.363695Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:37.364059Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:49:37.364068Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [12:389:2363] TestWaitNotification: OK eventTxId 102 |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault [GOOD] >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Simple >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] Test command err: 2024-11-21T17:49:14.583783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:14.584294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:14.584322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ca9/r3tmp/tmpUneWw4/pdisk_1.dat 2024-11-21T17:49:14.687783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.705247Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:14.747608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:14.747632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:14.758121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:14.861993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.876757Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:49:14.876987Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:49:14.877074Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:49:14.877135Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:14.887154Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:49:14.887337Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:14.887363Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:49:14.887548Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:49:14.887557Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:49:14.887565Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:49:14.887619Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:49:14.891239Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:49:14.891307Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:49:14.891329Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:49:14.891334Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:49:14.891337Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:49:14.891341Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.891455Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.891461Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.891588Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:49:14.891608Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:49:14.891619Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.891622Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.891627Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:49:14.891631Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.891636Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:14.891641Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.891646Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:14.891648Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:14.891651Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:49:14.891654Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:14.891670Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:49:14.891672Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:49:14.891691Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:14.891739Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:49:14.891746Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:49:14.891760Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:49:14.891766Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:49:14.891769Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:49:14.891772Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:49:14.891775Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.891813Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:49:14.891817Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:49:14.891820Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:14.891829Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.891840Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:49:14.891842Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:14.891844Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:49:14.891846Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.891850Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:49:14.892048Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:49:14.892055Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:49:14.902878Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:14.902906Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.902914Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.902942Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:49:14.902961Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:49:15.083596Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:15.083617Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:15.083624Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:49:15.083642Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:49:15.083648Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:49:15.083673Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:15.083681Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:49:15.083686Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:49:15.083691Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:49:15.084431Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:49:15.084449Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:15.084551Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:15.084557Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:15.084564Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:15.084572Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:15.084577Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:15.084586Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... leId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\002\000\004\000\000\000\002\000\000\000\004\000\000\000\000\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 1 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "index" Type: 1 } MaxValueSizeBytes: 4 } Columns { Column { Id: 3 Name: "value" Type: 1 } MaxValueSizeBytes: 4 } } 2024-11-21T17:49:38.075095Z node 13 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Int32 : 2, Int32 : 0) 2024-11-21T17:49:38.075104Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 2, Int32 : 0) table: [72057594046644480:2:1] 2024-11-21T17:49:38.075120Z node 13 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T17:49:38.075125Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T17:49:38.075181Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2024-11-21T17:49:38.075202Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:49:38.075207Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T17:49:38.075213Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:38.075218Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:49:38.075229Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/0 ImmediateWriteEdgeReplied# v3501/0 2024-11-21T17:49:38.075253Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2024-11-21T17:49:38.075259Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:49:38.075262Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:38.075266Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T17:49:38.075269Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T17:49:38.075278Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/0 ImmediateWriteEdgeReplied# v3501/0 2024-11-21T17:49:38.075293Z node 13 :TX_DATASHARD TRACE: TSysLocks::GetLock: lock 281474976715663 not found 2024-11-21T17:49:38.075298Z node 13 :TX_DATASHARD TRACE: ValidateLocks: broken lock 281474976715663 expected 2:5 found 0:0 2024-11-21T17:49:38.075314Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 5 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2024-11-21T17:49:38.075330Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:49:38.075334Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T17:49:38.075337Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:38.075341Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:38.075381Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:49:38.075385Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:38.075389Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:38.075392Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:38.075405Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:49:38.075408Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:38.075412Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2024-11-21T17:49:38.075544Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:38.075550Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:38.075557Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2024-11-21T17:49:38.075575Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:38.075709Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YTBjOTdjMWItMTkzOGU3ZWUtYzY0OTRmYmEtOTRjZmFiZQ==, ActorId: [13:912:2730], ActorState: ExecuteState, TraceId: 01jd7xg1w6ce243tyw62pvwgn1, Create QueryResponse for error on request, msg: 2024-11-21T17:49:38.075883Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xg1w6ce243tyw62pvwgn1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YTBjOTdjMWItMTkzOGU3ZWUtYzY0OTRmYmEtOTRjZmFiZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:38.075951Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:964:2730], Recipient [13:837:2672]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 964 RawX2: 55834577578 } TxBody: " \0018\001j3\010\001\032\'\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\002 \005)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n" TxId: 281474976715666 ExecLevel: 0 Flags: 8 2024-11-21T17:49:38.075958Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:49:38.075983Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:837:2672], Recipient [13:837:2672]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T17:49:38.075988Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T17:49:38.075997Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:38.076021Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T17:49:38.076033Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2024-11-21T17:49:38.076039Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2024-11-21T17:49:38.076043Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T17:49:38.076047Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:38.076052Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:49:38.076058Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/18446744073709551615 ImmediateWriteEdgeReplied# v3501/18446744073709551615 2024-11-21T17:49:38.076065Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715666] at 72075186224037888 2024-11-21T17:49:38.076070Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2024-11-21T17:49:38.076073Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:38.076076Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T17:49:38.076080Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T17:49:38.076091Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193454 2024-11-21T17:49:38.076103Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 5 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2024-11-21T17:49:38.076118Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T17:49:38.076126Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2024-11-21T17:49:38.076130Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T17:49:38.076133Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:38.076136Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:38.076142Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T17:49:38.076156Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2024-11-21T17:49:38.076160Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:38.076163Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:38.076167Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:38.076174Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2024-11-21T17:49:38.076177Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:38.076182Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2024-11-21T17:49:38.076192Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:38.076196Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:38.076201Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:38.076408Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [13:54:2101], Recipient [13:837:2672]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 13 Status: STATUS_NOT_FOUND >> TxUsage::WriteToTopic_Demo_44 [GOOD] |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> DataShardReadIterator::ShouldReadRangeChunk3 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions >> DataShardReadIterator::ShouldReadRangePrefix4 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix5 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:38.478445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:38.478468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:38.478473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:38.478478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:38.478490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:38.478494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:38.478502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:38.478574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:38.488375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:38.488396Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:38.491571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:38.492406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:38.492446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:38.494118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:38.494303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:38.494394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:38.494476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:38.495977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:38.496298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:38.496311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:38.496359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:38.496368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:38.496375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:38.496390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:38.497849Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:38.513293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:38.513386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:38.513451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:38.513498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:38.513508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:38.514405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:38.514455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:38.514509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:38.514520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:38.514524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:38.514530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:38.515036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:38.515048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:38.515053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:38.515430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:38.515443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:38.515449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:38.515456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:38.515973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:38.516381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:38.516433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:38.516617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:38.516635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:38.516647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:38.516695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:38.516703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:38.516730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:38.516742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:38.517154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:38.517161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:38.517198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:38.517204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:38.517286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:38.517293Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:38.517304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:38.517309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:38.517316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:38.517321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:38.517327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:38.517331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:38.517342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:38.517348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:38.517352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:38.517632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:38.517644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:38.517647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:38.517651Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:38.517655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:38.517664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... calPathId: 1] was 2 2024-11-21T17:49:38.525247Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:49:38.525262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:49:38.525269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:49:38.525273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:49:38.525296Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 1 2024-11-21T17:49:38.525301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:38.525309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2024-11-21T17:49:38.526059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:49:38.526073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateRTMR TConfigureParts ProgressState operationId#100:0 at tablet72057594046678944 2024-11-21T17:49:38.526080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2024-11-21T17:49:38.526129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:49:38.526209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:49:38.527041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:49:38.527058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:38.527067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2024-11-21T17:49:38.527100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:38.527567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T17:49:38.527601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T17:49:38.527674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:38.527695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:38.527702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2024-11-21T17:49:38.527729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T17:49:38.527755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:38.527765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T17:49:38.528221Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:38.528252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:38.528288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:49:38.528308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:38.528314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T17:49:38.528319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T17:49:38.528391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:49:38.528398Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T17:49:38.528410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T17:49:38.528414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:49:38.528421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T17:49:38.528426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:49:38.528431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T17:49:38.528435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T17:49:38.528447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:38.528453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T17:49:38.528457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:49:38.528461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T17:49:38.528578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:49:38.528592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:49:38.528597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:49:38.528602Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:49:38.528607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:38.528724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:49:38.528735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:49:38.528739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:49:38.528744Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T17:49:38.528749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:49:38.528760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T17:49:38.529806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:49:38.529868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T17:49:38.529930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:49:38.529942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T17:49:38.530036Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:49:38.530053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:49:38.530058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:306:2298] TestWaitNotification: OK eventTxId 100 2024-11-21T17:49:38.530134Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:38.530167Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 44us result status StatusSuccess 2024-11-21T17:49:38.530248Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded >> Secret::SimpleQueryService >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite >> DataShardReadIterator::ShouldReadFromHeadWithConflict [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus |80.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:09.345899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:09.345921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:09.345926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:09.345930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:09.345944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:09.345947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:09.345956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:09.346030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:09.356322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:09.356344Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:09.358631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:09.358729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:09.358762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:09.361153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:09.361236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:09.361339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.361529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:09.362193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:09.362473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:09.362487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:09.362499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:09.362507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:09.362513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:09.362551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:09.363857Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:09.380081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:09.380163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.380272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:09.380318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:09.380326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.381109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.381132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:09.381181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.381191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:09.381195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:09.381199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:09.381576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.381584Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:09.381588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:09.381847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.381855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.381861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:09.381867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:09.382427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:09.382805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:09.382850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:09.383016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:09.383040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:09.383046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:09.383103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:09.383109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:09.383140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:09.383150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:09.383510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:09.383518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:09.383553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:09.383558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:09.383629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:09.383634Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:09.383645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:09.383649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:09.383655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:09.383660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:09.383665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:09.383668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:09.383678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:09.383683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:09.383686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... schemeshard: 72057594046678944 2024-11-21T17:49:38.909383Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 91us result status StatusSuccess 2024-11-21T17:49:38.909660Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:38.910759Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:38.910816Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 66us result status StatusSuccess 2024-11-21T17:49:38.910963Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateDropSolomon |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_44 [GOOD] Test command err: 2024-11-21T17:47:18.219163Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790621073522492:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:18.219306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00141c/r3tmp/tmpmNMNvb/pdisk_1.dat 2024-11-21T17:47:18.266018Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:47:18.292342Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16306, node 1 2024-11-21T17:47:18.308167Z INFO: TTestServer started on Port 7581 GrpcPort 16306 2024-11-21T17:47:18.308395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/00141c/r3tmp/yandexBJH1BR.tmp 2024-11-21T17:47:18.308409Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/00141c/r3tmp/yandexBJH1BR.tmp 2024-11-21T17:47:18.308459Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/00141c/r3tmp/yandexBJH1BR.tmp 2024-11-21T17:47:18.308491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7581 2024-11-21T17:47:18.321291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:47:18.321315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:47:18.322744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:16306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:47:18.391552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.395120Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:47:18.402309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:47:18.526368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790621073523247:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.526387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790621073523252:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.526394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.527107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:47:18.527507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790621073523290:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.527544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:47:18.528734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790621073523261:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:47:18.581000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.628391Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790621073523398:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:47:18.628495Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWE2NGZhYjItNWU0ZTY4MWItNjQ3YjI1ZGMtYzE5MjZh, ActorId: [1:7439790621073523244:2304], ActorState: ExecuteState, TraceId: 01jd7xbskx3e9fn5qds36f5ks3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:47:18.629024Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:47:18.635287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:47:18.694381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439790621073523603:2594] 2024-11-21T17:47:23.219394Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439790621073522492:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:47:23.219430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:47:23.794519Z :WriteToTopic_Two_WriteSession INFO: TTopicSdkTestSetup started 2024-11-21T17:47:23.805224Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:47:23.814354Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:47:23.814549Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T17:47:23.814603Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:47:23.814640Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T17:47:23.814649Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:47:23.814651Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T17:47:23.814654Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:47:23.814671Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:47:23.814683Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T17:47:23.814686Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T17:47:23.860033Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439790642548360399:2773] connected; active server actors: 1 2024-11-21T17:47:23.860096Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T17:47:23.860109Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:47:23.860119Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439790642548360398:2772], now have 1 active actors on pipe 2024-11-21T17:47:23.860258Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T17:47:23.860310Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T17:47:23.860437Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T17:47:23.860533Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T17:47:23.862725Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439790621073522901 RawX2: 4294969490 } TxId: 281474976715673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles ... Size = 0 to budget 2024-11-21T17:49:37.167999Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 0 2024-11-21T17:49:37.168011Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 8 2024-11-21T17:49:37.168031Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 96 (startOffset 0) session test-consumer_9_1_3166771204393440486_v1 4 2024-11-21T17:49:37.168040Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 8 } 2024-11-21T17:49:37.168047Z :DEBUG: [/Root] [/Root] [cc23af7f-e166912a-d101fcd4-74ea356f] [] Commit offsets [96, 100). Partition stream id: 1 2024-11-21T17:49:37.168050Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 93 endOffset 100 with cookie 8 2024-11-21T17:49:37.168059Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:49:37.168072Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 replying for commits: assignId# 1, from# 8, to# 8, offset# 93 2024-11-21T17:49:37.168277Z :DEBUG: [/Root] [/Root] [cc23af7f-e166912a-d101fcd4-74ea356f] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 93 } } 2024-11-21T17:49:37.168318Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 96 end: 100 } } } } 2024-11-21T17:49:37.168380Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 100 prev 93 end 100 by cookie 10 2024-11-21T17:49:37.168383Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:49:37.168393Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 9 2024-11-21T17:49:37.168404Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 9 } 2024-11-21T17:49:37.168409Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 96 endOffset 100 with cookie 9 2024-11-21T17:49:37.168412Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 replying for commits: assignId# 1, from# 9, to# 9, offset# 96 2024-11-21T17:49:37.168540Z :DEBUG: [/Root] [/Root] [cc23af7f-e166912a-d101fcd4-74ea356f] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 96 } } 2024-11-21T17:49:37.168539Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T17:49:37.168552Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T17:49:37.168582Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 100 (startOffset 0) session test-consumer_9_1_3166771204393440486_v1 2024-11-21T17:49:37.168609Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:49:37.168837Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 100 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:49:37.168848Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:49:37.168857Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 10 2024-11-21T17:49:37.168872Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 10 } 2024-11-21T17:49:37.168880Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 100 endOffset 100 with cookie 10 2024-11-21T17:49:37.168887Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 replying for commits: assignId# 1, from# 10, to# 10, offset# 100 2024-11-21T17:49:37.169063Z :DEBUG: [/Root] [/Root] [cc23af7f-e166912a-d101fcd4-74ea356f] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 100 } } 2024-11-21T17:49:37.833479Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 checking auth because of timeout 2024-11-21T17:49:37.833543Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 auth for : test-consumer 2024-11-21T17:49:37.833900Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 Handle describe topics response 2024-11-21T17:49:37.833937Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 auth is DEAD 2024-11-21T17:49:37.833962Z node 9 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 auth ok: topics# 1, initDone# 1 2024-11-21T17:49:37.862162Z :INFO: [/Root] [/Root] [cc23af7f-e166912a-d101fcd4-74ea356f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 61080 BytesRead: 100000000 MessagesRead: 100 BytesReadCompressed: 100000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:49:38.829948Z :INFO: [/Root] [/Root] [cc23af7f-e166912a-d101fcd4-74ea356f] Closing read session. Close timeout: 0.000000s 2024-11-21T17:49:38.829996Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:99:100 2024-11-21T17:49:38.830011Z :INFO: [/Root] [/Root] [cc23af7f-e166912a-d101fcd4-74ea356f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 62048 BytesRead: 100000000 MessagesRead: 100 BytesReadCompressed: 100000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:49:38.830041Z :NOTICE: [/Root] [/Root] [cc23af7f-e166912a-d101fcd4-74ea356f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:49:38.830052Z :DEBUG: [/Root] [/Root] [cc23af7f-e166912a-d101fcd4-74ea356f] [] Abort session to cluster 2024-11-21T17:49:38.830413Z :NOTICE: [/Root] [/Root] [cc23af7f-e166912a-d101fcd4-74ea356f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:49:38.830523Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 grpc read done: success# 0, data# { } 2024-11-21T17:49:38.830542Z node 9 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 grpc read failed 2024-11-21T17:49:38.830551Z node 9 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 grpc closed 2024-11-21T17:49:38.830567Z node 9 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_9_1_3166771204393440486_v1 is DEAD 2024-11-21T17:49:38.830677Z :INFO: [/Root] SessionId [test-message_group_id|4ca4f539-74050131-4d5eae06-b3132dd9_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T17:49:38.830682Z :INFO: [/Root] SessionId [test-message_group_id|4ca4f539-74050131-4d5eae06-b3132dd9_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T17:49:38.830692Z :DEBUG: [/Root] SessionId [test-message_group_id|4ca4f539-74050131-4d5eae06-b3132dd9_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T17:49:38.830790Z :INFO: [/Root] SessionId [test-message_group_id|4ca4f539-74050131-4d5eae06-b3132dd9_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T17:49:38.830796Z :DEBUG: [/Root] SessionId [test-message_group_id|4ca4f539-74050131-4d5eae06-b3132dd9_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T17:49:38.830775Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:49:38.830791Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_9_1_3166771204393440486_v1 2024-11-21T17:49:38.830803Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439790954077439918:2504] destroyed 2024-11-21T17:49:38.830823Z node 9 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_9_1_3166771204393440486_v1 2024-11-21T17:49:38.830884Z node 9 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [9:7439790954077439915:2501] disconnected; active server actors: 1 2024-11-21T17:49:38.830899Z node 9 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [9:7439790954077439915:2501] client test-consumer disconnected session test-consumer_9_1_3166771204393440486_v1 2024-11-21T17:49:38.831179Z node 9 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id|4ca4f539-74050131-4d5eae06-b3132dd9_0 grpc read done: success: 0 data: 2024-11-21T17:49:38.831190Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|4ca4f539-74050131-4d5eae06-b3132dd9_0 grpc read failed 2024-11-21T17:49:38.831196Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|4ca4f539-74050131-4d5eae06-b3132dd9_0 grpc closed 2024-11-21T17:49:38.831199Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|4ca4f539-74050131-4d5eae06-b3132dd9_0 is DEAD 2024-11-21T17:49:38.831399Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:49:38.831416Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:49:38.831453Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:49:38.831467Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439790954077439763:2477] destroyed 2024-11-21T17:49:38.831475Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:49:38.831478Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439790954077439766:2477] destroyed 2024-11-21T17:49:38.831491Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. >> DataShardReadIterator::HandlePersistentSnapshotGoneInContinue [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] Test command err: 2024-11-21T17:49:03.028412Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T17:49:03.043281Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:49:03.045856Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T17:49:03.045876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:49:03.045924Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T17:49:03.046492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:49:03.046523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:49:03.046549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:49:03.046561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:49:03.046572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:49:03.046581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:49:03.046591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:49:03.046602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:49:03.046614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:49:03.046625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.046638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:49:03.046648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:49:03.049824Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T17:49:03.049841Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T17:49:03.050696Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T17:49:03.050751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:49:03.050757Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:49:03.050779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:03.050809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:49:03.050818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:49:03.050821Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:49:03.050826Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:49:03.050833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:49:03.050837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:49:03.050840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:49:03.050850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:49:03.050854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:49:03.050858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:49:03.050860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T17:49:03.050866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T17:49:03.050869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:49:03.050874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:49:03.050876Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T17:49:03.050883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:49:03.050888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:49:03.050891Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T17:49:03.050896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:49:03.050901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:49:03.050903Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T17:49:03.050928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2024-11-21T17:49:03.050935Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2024-11-21T17:49:03.050941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2024-11-21T17:49:03.050949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2024-11-21T17:49:03.050963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:49:03.050967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:49:03.050969Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T17:49:03.050983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:49:03.050988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.050990Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T17:49:03.050997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:49:03.051001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:49:03.051004Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T17:49:03.051015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:49:03.051020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:49:03.051022Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;f ... mon_data.cpp:29;EXECUTE:finishLoadingTime=4302; 2024-11-21T17:49:39.100518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7276; 2024-11-21T17:49:39.100589Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=58; 2024-11-21T17:49:39.100770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=23; 2024-11-21T17:49:39.100787Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=190; 2024-11-21T17:49:39.100809Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=14; 2024-11-21T17:49:39.100827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T17:49:39.100835Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=21; 2024-11-21T17:49:39.100851Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=10; 2024-11-21T17:49:39.100861Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=4; 2024-11-21T17:49:39.100995Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=127; 2024-11-21T17:49:39.101186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=184; 2024-11-21T17:49:39.101204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=11; 2024-11-21T17:49:39.101216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=6; 2024-11-21T17:49:39.101223Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2024-11-21T17:49:39.101230Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2024-11-21T17:49:39.101238Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2024-11-21T17:49:39.101254Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=10; 2024-11-21T17:49:39.101261Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=2; 2024-11-21T17:49:39.101278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=8; 2024-11-21T17:49:39.101285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2024-11-21T17:49:39.101296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=5; 2024-11-21T17:49:39.101302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=8723; 2024-11-21T17:49:39.101338Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=62;blobs=124;rows=1845000;bytes=117739448;raw_bytes=191860690; inactive portions=123;blobs=246;rows=3201219;bytes=206224980;raw_bytes=330057248; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T17:49:39.101372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2288:4257];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T17:49:39.101380Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T17:49:39.101394Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T17:49:39.101438Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2288:4257];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T17:49:39.101446Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T17:49:39.101470Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=202797666304;kff=0.3; 2024-11-21T17:49:39.101476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:39.101487Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:39.101552Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=21; 2024-11-21T17:49:39.101565Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T17:49:39.101572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:39.101584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:39.101589Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:39.101629Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:39.101653Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:39.102008Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:39.102030Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2323:4285];tablet_id=9437184;parent=[1:2288:4257];fline=manager.h:99;event=ask_data;request=request_id=411;1={portions_count=185};; 2024-11-21T17:49:39.103111Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2323:4285];tablet_id=9437184;parent=[1:2288:4257];fline=manager.h:99;event=ask_data;request=request_id=413;1={portions_count=62};; 2024-11-21T17:49:39.103199Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T17:49:39.103244Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T17:49:39.103249Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T17:49:39.103254Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T17:49:39.103260Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T17:49:39.103271Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:49:39.103322Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=21; 2024-11-21T17:49:39.103331Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T17:49:39.103337Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0; 2024-11-21T17:49:39.103346Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:39.103352Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T17:49:39.103358Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T17:49:39.103373Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T17:49:39.103533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=185;path_id=1; 2024-11-21T17:49:39.104651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=185;path_id=1; 2024-11-21T17:49:39.108402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T17:49:39.108423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2288:4257];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:08.384258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:08.384287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:08.384292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:08.384297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:08.384313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:08.384316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:08.384325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:08.384423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:08.395661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:08.395690Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:08.398302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:08.398435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:08.398469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:08.401067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:08.401162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:08.401284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:08.401457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:08.402054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:08.402377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:08.402387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:08.402399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:08.402422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:08.402428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:08.402473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:08.403812Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:08.425532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:08.425612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.425670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:08.425709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:08.425715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.426280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:08.426299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:08.426343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.426350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:08.426354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:08.426357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:08.426642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.426649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:08.426652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:08.426850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.426855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.426859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:08.426864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:08.427265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:08.427549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:08.427591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:08.427735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:08.427753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:08.427758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:08.427805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:08.427809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:08.427833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:08.427842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:08.428102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:08.428108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:08.428142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:08.428146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:08.428204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.428208Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:08.428215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:08.428217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:08.428222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:08.428243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:08.428249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:08.428253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:08.428262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:08.428267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:08.428271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... emeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:39.213057Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:39.213177Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T17:49:39.213184Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:39.213244Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T17:49:39.213265Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2024-11-21T17:49:39.213268Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2024-11-21T17:49:39.213274Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2024-11-21T17:49:39.213371Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:39.213379Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:39.213382Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:39.213585Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:39.213595Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:39.213598Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:39.213602Z node 114 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T17:49:39.213605Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:49:39.213616Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T17:49:39.213790Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:49:39.213797Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:39.213833Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:49:39.213849Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2024-11-21T17:49:39.213852Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T17:49:39.213855Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T17:49:39.213858Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T17:49:39.213863Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:49:39.213866Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:49:39.213883Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:39.213886Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T17:49:39.213889Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T17:49:39.213892Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:49:39.213895Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T17:49:39.213897Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T17:49:39.213902Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:49:39.213993Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:39.214309Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:39.214324Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:39.214328Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:39.214338Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:39.215089Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:39.215475Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 489626274069 } TabletId: 72075186233409546 State: 4 2024-11-21T17:49:39.215492Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:49:39.215955Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:39.216022Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:49:39.216063Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:49:39.216109Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:49:39.216191Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:39.216196Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:49:39.216207Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:49:39.216211Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:49:39.216215Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:39.216925Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:49:39.216938Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2024-11-21T17:49:39.216966Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:49:39.217013Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:49:39.217018Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:49:39.217122Z node 114 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:49:39.217148Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:49:39.217152Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [114:615:2544] 2024-11-21T17:49:39.218252Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 342 RawX2: 489626274070 } TabletId: 72075186233409547 State: 4 2024-11-21T17:49:39.218268Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:49:39.218572Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:39.218629Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 2024-11-21T17:49:39.218719Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:39.218765Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2024-11-21T17:49:39.219218Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:39.219227Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:49:39.219239Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:39.219705Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:49:39.219716Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2024-11-21T17:49:39.219785Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:49:39.219850Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:49:39.219860Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> Secret::DeactivatedQueryService >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc [GOOD] Test command err: 2024-11-21T17:49:14.097586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:14.098041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:14.098061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ccc/r3tmp/tmpSUxcGG/pdisk_1.dat 2024-11-21T17:49:14.200954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.219487Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:14.264903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:14.264941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:14.276383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:14.380952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.396017Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:49:14.396269Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:49:14.396352Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:49:14.396421Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:14.404263Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:49:14.404419Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:14.404444Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:49:14.404609Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:49:14.404617Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:49:14.404625Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:49:14.404669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:49:14.408475Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:49:14.408539Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:49:14.408561Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:49:14.408566Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:49:14.408571Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:49:14.408576Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.408679Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.408687Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.408811Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:49:14.408829Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:49:14.408842Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.408847Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.408854Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:49:14.408862Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.408868Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:14.408876Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.408881Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:14.408885Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:14.408890Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:49:14.408896Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:14.408917Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:49:14.408921Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:49:14.408943Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:14.408999Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:49:14.409010Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:49:14.409027Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:49:14.409035Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:49:14.409040Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:49:14.409046Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:49:14.409051Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.409093Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:49:14.409098Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:49:14.409101Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:14.409105Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.409115Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:49:14.409119Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:14.409123Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:49:14.409127Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.409132Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:49:14.409360Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:49:14.409368Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:49:14.419680Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:14.419707Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.419714Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.419740Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:49:14.419757Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:49:14.593628Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.593655Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.593663Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:49:14.593685Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:49:14.593692Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:49:14.593743Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.593755Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:49:14.593760Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:49:14.593766Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:49:14.594618Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:49:14.594641Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.594787Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.594794Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.594801Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.594808Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:14.594814Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.594824Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 2024-11-21T17:49:40.113977Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:40.113979Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:40.113985Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:49:40.113987Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:40.113989Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T17:49:40.113991Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:49:40.113993Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:8] at 72075186224037888 for ExecuteRead 2024-11-21T17:49:40.114012Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:632:2537], Recipient [15:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:40.114014Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:40.114016Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:40.114019Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:49:40.114020Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:8] at 72075186224037888 2024-11-21T17:49:40.114022Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:49:40.114030Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2024-11-21T17:49:40.114049Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2024-11-21T17:49:40.114052Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:558:2485], 3} after executionsCount# 2 2024-11-21T17:49:40.114054Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:40.114060Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 3} finished in read 2024-11-21T17:49:40.114064Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T17:49:40.114066Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:49:40.114067Z node 15 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:40.114069Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:40.114072Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T17:49:40.114074Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:40.114076Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2024-11-21T17:49:40.114078Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:40.114081Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:40.114084Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:40.114086Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:40.114149Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=ZWIwOWI2MWEtNDJjZThlZTctNmNhZjFjYjItNTk5OWEwNjA=, workerId: [15:1120:2893], local sessions count: 0 2024-11-21T17:49:40.114250Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:558:2485], Recipient [15:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T17:49:40.114264Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:49:40.114272Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CheckRead 2024-11-21T17:49:40.114281Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2024-11-21T17:49:40.114283Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:49:40.114286Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:40.114290Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:49:40.114303Z node 15 :TX_DATASHARD TRACE: Activated operation [0:9] at 72075186224037888 2024-11-21T17:49:40.114308Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2024-11-21T17:49:40.114311Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:40.114316Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:49:40.114319Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:49:40.114326Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2024-11-21T17:49:40.114341Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2024-11-21T17:49:40.114343Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:558:2485], 4} after executionsCount# 1 2024-11-21T17:49:40.114347Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 4} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:40.114355Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 4} finished in read 2024-11-21T17:49:40.114358Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2024-11-21T17:49:40.114360Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:49:40.114362Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:40.114364Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:40.114367Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2024-11-21T17:49:40.114369Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:40.114371Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:9] at 72075186224037888 has finished 2024-11-21T17:49:40.114374Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:49:40.114413Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:558:2485], Recipient [15:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T17:49:40.114417Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:49:40.114420Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CheckRead 2024-11-21T17:49:40.114424Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2024-11-21T17:49:40.114426Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:49:40.114428Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:40.114430Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:49:40.114432Z node 15 :TX_DATASHARD TRACE: Activated operation [0:10] at 72075186224037888 2024-11-21T17:49:40.114435Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2024-11-21T17:49:40.114436Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:40.114438Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:49:40.114440Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:49:40.114446Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2024-11-21T17:49:40.114455Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2024-11-21T17:49:40.114457Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:558:2485], 5} after executionsCount# 1 2024-11-21T17:49:40.114460Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 5} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:40.114465Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 5} finished in read 2024-11-21T17:49:40.114468Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2024-11-21T17:49:40.114470Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:49:40.114472Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:40.114474Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:40.114477Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2024-11-21T17:49:40.114478Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:40.114480Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:10] at 72075186224037888 has finished 2024-11-21T17:49:40.114482Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] Test command err: 2024-11-21T17:49:13.923691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:13.924332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:13.924382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ccd/r3tmp/tmpxaenXT/pdisk_1.dat 2024-11-21T17:49:14.031454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.049208Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:14.091507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:14.091534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:14.101993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:14.208593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.223440Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:49:14.223652Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:49:14.223737Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:49:14.223790Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:14.231533Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:49:14.231696Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:14.231717Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:49:14.231880Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:49:14.231889Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:49:14.231896Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:49:14.231940Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:49:14.235742Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:49:14.235824Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:49:14.235849Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:49:14.235855Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:49:14.235860Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:49:14.235866Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.236014Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.236022Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.236165Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:49:14.236189Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:49:14.236201Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.236206Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.236213Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:49:14.236220Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.236267Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:14.236275Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.236280Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:14.236284Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:14.236289Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:49:14.236294Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:14.236315Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:49:14.236319Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:49:14.236346Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:14.236411Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:49:14.236421Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:49:14.236441Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:49:14.236449Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:49:14.236454Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:49:14.236459Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:49:14.236462Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.236509Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:49:14.236512Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:49:14.236515Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:14.236519Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.236530Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:49:14.236533Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:14.236537Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:49:14.236540Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.236545Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:49:14.236794Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:49:14.236803Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:49:14.247089Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:14.247116Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.247122Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.247145Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:49:14.247164Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:49:14.421062Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.421081Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.421090Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:49:14.421108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:49:14.421114Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:49:14.421139Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.421148Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:49:14.421153Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:49:14.421158Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:49:14.421854Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:49:14.421866Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.421965Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.421971Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.421977Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.421984Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:14.421989Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.421996Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:40.083061Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:40.083065Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:40.083093Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [13:842:2676], Recipient [13:842:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:40.083097Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:40.083102Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:49:40.083105Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:40.083108Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T17:49:40.083112Z node 13 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2024-11-21T17:49:40.083115Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit PlanQueue 2024-11-21T17:49:40.083119Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T17:49:40.083123Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit PlanQueue 2024-11-21T17:49:40.083126Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T17:49:40.083129Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit LoadTxDetails 2024-11-21T17:49:40.083147Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2024-11-21T17:49:40.083150Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T17:49:40.083153Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T17:49:40.083156Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:40.083159Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T17:49:40.083163Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically complete end at 72075186224037889 2024-11-21T17:49:40.083167Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically incomplete end at 72075186224037889 2024-11-21T17:49:40.083170Z node 13 :TX_DATASHARD TRACE: Activated operation [3500:281474976715665] at 72075186224037889 2024-11-21T17:49:40.083174Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T17:49:40.083177Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:40.083180Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T17:49:40.083183Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T17:49:40.083215Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T17:49:40.083219Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T17:49:40.083223Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T17:49:40.083226Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T17:49:40.083230Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T17:49:40.083233Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T17:49:40.083236Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T17:49:40.083239Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:49:40.083267Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is DelayComplete 2024-11-21T17:49:40.083270Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T17:49:40.083274Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T17:49:40.083277Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompletedOperations 2024-11-21T17:49:40.083281Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T17:49:40.083284Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T17:49:40.083287Z node 13 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715665] at 72075186224037889 has finished 2024-11-21T17:49:40.083290Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:40.083293Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T17:49:40.083296Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T17:49:40.083298Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T17:49:40.093632Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 281474976715665} 2024-11-21T17:49:40.093654Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2024-11-21T17:49:40.093669Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:40.093676Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037888 on unit CompleteOperation 2024-11-21T17:49:40.093698Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1030:2831], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:49:40.093712Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:40.093771Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715665} 2024-11-21T17:49:40.093777Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2024-11-21T17:49:40.093785Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:49:40.093790Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:49:40.093798Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1030:2831], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:49:40.093808Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:49:40.094215Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:557:2484], Recipient [13:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T17:49:40.094243Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:49:40.094257Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2024-11-21T17:49:40.094278Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T17:49:40.094283Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:49:40.094287Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:40.094291Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:49:40.094300Z node 13 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2024-11-21T17:49:40.094305Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T17:49:40.094308Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:40.094311Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:49:40.094314Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:49:40.094328Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW } 2024-11-21T17:49:40.094397Z node 13 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715665 2024-11-21T17:49:40.094405Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:557:2484], 1} after executionsCount# 1 2024-11-21T17:49:40.094414Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:557:2484], 1} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:40.094451Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:557:2484], 1} finished in read 2024-11-21T17:49:40.094461Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T17:49:40.094464Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:49:40.094467Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:40.094470Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:40.094483Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T17:49:40.094486Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:40.094490Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2024-11-21T17:49:40.094494Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:49:40.094515Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk100 >> TReplicationWithRebootsTests::Alter [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] Test command err: RandomSeed# 1504853082841793394 Reassign# 5 -- VSlotId { NodeId: 6 PDiskId: 1000 VSlotId: 1000 } GroupId: 2181038080 GroupGeneration: 1 VDiskKind: "Default" FailDomainIdx: 5 VDiskMetrics { SatisfactionRank: 0 VSlotId { NodeId: 6 PDiskId: 1000 VSlotId: 1000 } State: OK Replicated: true DiskSpace: Green } Status: "READY" Ready: true Put# [1:1:1:0:0:3:0] Put# [1:1:2:0:0:47:0] Put# [1:1:3:0:0:27:0] 2024-11-21T17:46:37.952659Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:5:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T17:46:37.953055Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:5:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15700337866898848295] 2024-11-21T17:46:37.954036Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:5:0]: THullOsirisActor: RESURRECT: id# [1:1:1:0:0:3:1] 2024-11-21T17:46:37.954049Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:5:0]: THullOsirisActor: RESURRECT: id# [1:1:2:0:0:47:2] 2024-11-21T17:46:37.954056Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:5:0]: THullOsirisActor: RESURRECT: id# [1:1:3:0:0:27:3] 2024-11-21T17:46:37.954102Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:5:0]: THullOsirisActor: FINISH: BlobsResurrected# 3 PartsResurrected# 3 Put# [1:1:4:0:0:6:0] Put# [1:1:5:0:0:85:0] Put# [1:1:6:0:0:55:0] Put# [1:1:7:0:0:93:0] Put# [1:1:8:0:0:25:0] Put# [1:1:9:0:0:13:0] Put# [1:1:10:0:0:47:0] Put# [1:1:11:0:0:82:0] Put# [1:1:12:0:0:34:0] Put# [1:1:13:0:0:77:0] Put# [1:1:14:0:0:85:0] Put# [1:1:15:0:0:35:0] Put# [1:1:16:0:0:18:0] Put# [1:1:17:0:0:58:0] Put# [1:1:18:0:0:36:0] Put# [1:1:19:0:0:82:0] Put# [1:1:20:0:0:39:0] Put# [1:1:21:0:0:87:0] Put# [1:1:22:0:0:79:0] Put# [1:1:23:0:0:74:0] Put# [1:1:24:0:0:87:0] Put# [1:1:25:0:0:5:0] Put# [1:1:26:0:0:70:0] Put# [1:1:27:0:0:48:0] Put# [1:1:28:0:0:17:0] Put# [1:1:29:0:0:1:0] Put# [1:1:30:0:0:91:0] Put# [1:1:31:0:0:2:0] Put# [1:1:32:0:0:53:0] Put# [1:1:33:0:0:61:0] Put# [1:1:34:0:0:33:0] Put# [1:1:35:0:0:40:0] Put# [1:1:36:0:0:17:0] Put# [1:1:37:0:0:14:0] Put# [1:1:38:0:0:96:0] Put# [1:1:39:0:0:56:0] Put# [1:1:40:0:0:11:0] Put# [1:1:41:0:0:65:0] Put# [1:1:42:0:0:88:0] Put# [1:1:43:0:0:94:0] Put# [1:1:44:0:0:95:0] Put# [1:1:45:0:0:98:0] Put# [1:1:46:0:0:39:0] Put# [1:1:47:0:0:89:0] Put# [1:1:48:0:0:58:0] Put# [1:1:49:0:0:81:0] Put# [1:1:50:0:0:45:0] Put# [1:1:51:0:0:79:0] Put# [1:1:52:0:0:89:0] Put# [1:1:53:0:0:86:0] Put# [1:1:54:0:0:76:0] Put# [1:1:55:0:0:97:0] Put# [1:1:56:0:0:92:0] Put# [1:1:57:0:0:33:0] Put# [1:1:58:0:0:69:0] Put# [1:1:59:0:0:84:0] Put# [1:1:60:0:0:94:0] Put# [1:1:61:0:0:59:0] Put# [1:1:62:0:0:76:0] Put# [1:1:63:0:0:10:0] Put# [1:1:64:0:0:7:0] Put# [1:1:65:0:0:15:0] Put# [1:1:66:0:0:58:0] Put# [1:1:67:0:0:29:0] Put# [1:1:68:0:0:54:0] Put# [1:1:69:0:0:14:0] Put# [1:1:70:0:0:13:0] Put# [1:1:71:0:0:66:0] Put# [1:1:72:0:0:80:0] Put# [1:1:73:0:0:5:0] Put# [1:1:74:0:0:28:0] Put# [1:1:75:0:0:24:0] Put# [1:1:76:0:0:88:0] Put# [1:1:77:0:0:44:0] Put# [1:1:78:0:0:44:0] Put# [1:1:79:0:0:16:0] Put# [1:1:80:0:0:59:0] Put# [1:1:81:0:0:13:0] Put# [1:1:82:0:0:15:0] Put# [1:1:83:0:0:56:0] Put# [1:1:84:0:0:12:0] Put# [1:1:85:0:0:93:0] Put# [1:1:86:0:0:83:0] Put# [1:1:87:0:0:75:0] Put# [1:1:88:0:0:60:0] Put# [1:1:89:0:0:17:0] Put# [1:1:90:0:0:92:0] Put# [1:1:91:0:0:73:0] Put# [1:1:92:0:0:9:0] Put# [1:1:93:0:0:84:0] Put# [1:1:94:0:0:42:0] Put# [1:1:95:0:0:24:0] Put# [1:1:96:0:0:40:0] Put# [1:1:97:0:0:30:0] Put# [1:1:98:0:0:76:0] Put# [1:1:99:0:0:10:0] Put# [1:1:100:0:0:6:0] Put# [1:1:101:0:0:6:0] Put# [1:1:102:0:0:87:0] Put# [1:1:103:0:0:29:0] Put# [1:1:104:0:0:22:0] Put# [1:1:105:0:0:61:0] Put# [1:1:106:0:0:12:0] Put# [1:1:107:0:0:32:0] Put# [1:1:108:0:0:94:0] Put# [1:1:109:0:0:26:0] Put# [1:1:110:0:0:53:0] Put# [1:1:111:0:0:36:0] Put# [1:1:112:0:0:76:0] Put# [1:1:113:0:0:6:0] Put# [1:1:114:0:0:95:0] Put# [1:1:115:0:0:5:0] Put# [1:1:116:0:0:36:0] Put# [1:1:117:0:0:51:0] Put# [1:1:118:0:0:59:0] Put# [1:1:119:0:0:7:0] Put# [1:1:120:0:0:30:0] Put# [1:1:121:0:0:37:0] Put# [1:1:122:0:0:31:0] Put# [1:1:123:0:0:83:0] Put# [1:1:124:0:0:64:0] Put# [1:1:125:0:0:53:0] Put# [1:1:126:0:0:87:0] Put# [1:1:127:0:0:23:0] Put# [1:1:128:0:0:100:0] Put# [1:1:129:0:0:90:0] Put# [1:1:130:0:0:48:0] Put# [1:1:131:0:0:42:0] Put# [1:1:132:0:0:21:0] Put# [1:1:133:0:0:63:0] Put# [1:1:134:0:0:75:0] Put# [1:1:135:0:0:85:0] Put# [1:1:136:0:0:67:0] Put# [1:1:137:0:0:13:0] Put# [1:1:138:0:0:80:0] Put# [1:1:139:0:0:73:0] Put# [1:1:140:0:0:53:0] Put# [1:1:141:0:0:87:0] Put# [1:1:142:0:0:38:0] Put# [1:1:143:0:0:100:0] Put# [1:1:144:0:0:55:0] Put# [1:1:145:0:0:31:0] Put# [1:1:146:0:0:1:0] Put# [1:1:147:0:0:3:0] Put# [1:1:148:0:0:35:0] Put# [1:1:149:0:0:5:0] Put# [1:1:150:0:0:24:0] Put# [1:1:151:0:0:97:0] Put# [1:1:152:0:0:42:0] Put# [1:1:153:0:0:54:0] Put# [1:1:154:0:0:42:0] Put# [1:1:155:0:0:3:0] Put# [1:1:156:0:0:81:0] Put# [1:1:157:0:0:37:0] Put# [1:1:158:0:0:73:0] Put# [1:1:159:0:0:40:0] Put# [1:1:160:0:0:99:0] Put# [1:1:161:0:0:15:0] Put# [1:1:162:0:0:99:0] Put# [1:1:163:0:0:50:0] Put# [1:1:164:0:0:44:0] Put# [1:1:165:0:0:90:0] Put# [1:1:166:0:0:49:0] Put# [1:1:167:0:0:90:0] Put# [1:1:168:0:0:22:0] Put# [1:1:169:0:0:7:0] Put# [1:1:170:0:0:17:0] Put# [1:1:171:0:0:22:0] Put# [1:1:172:0:0:72:0] Put# [1:1:173:0:0:83:0] Put# [1:1:174:0:0:25:0] Put# [1:1:175:0:0:29:0] Put# [1:1:176:0:0:38:0] Put# [1:1:177:0:0:69:0] Put# [1:1:178:0:0:81:0] Put# [1:1:179:0:0:51:0] Put# [1:1:180:0:0:13:0] Put# [1:1:181:0:0:70:0] Put# [1:1:182:0:0:11:0] Put# [1:1:183:0:0:24:0] Put# [1:1:184:0:0:33:0] Put# [1:1:185:0:0:17:0] Put# [1:1:186:0:0:54:0] Put# [1:1:187:0:0:17:0] Put# [1:1:188:0:0:23:0] Put# [1:1:189:0:0:85:0] Put# [1:1:190:0:0:28:0] Put# [1:1:191:0:0:9:0] Put# [1:1:192:0:0:65:0] Put# [1:1:193:0:0:56:0] Put# [1:1:194:0:0:10:0] Put# [1:1:195:0:0:32:0] Put# [1:1:196:0:0:65:0] Put# [1:1:197:0:0:61:0] Put# [1:1:198:0:0:92:0] Put# [1:1:199:0:0:63:0] Put# [1:1:200:0:0:96:0] Put# [1:1:201:0:0:93:0] Put# [1:1:202:0:0:50:0] Put# [1:1:203:0:0:47:0] Put# [1:1:204:0:0:94:0] Put# [1:1:205:0:0:29:0] Put# [1:1:206:0:0:59:0] Put# [1:1:207:0:0:34:0] Put# [1:1:208:0:0:77:0] Put# [1:1:209:0:0:92:0] Put# [1:1:210:0:0:73:0] Put# [1:1:211:0:0:51:0] Put# [1:1:212:0:0:62:0] Put# [1:1:213:0:0:34:0] Put# [1:1:214:0:0:21:0] Put# [1:1:215:0:0:99:0] Put# [1:1:216:0:0:64:0] Put# [1:1:217:0:0:93:0] Put# [1:1:218:0:0:45:0] Put# [1:1:219:0:0:61:0] Put# [1:1:220:0:0:53:0] Put# [1:1:221:0:0:16:0] Put# [1:1:222:0:0:46:0] Put# [1:1:223:0:0:16:0] Put# [1:1:224:0:0:14:0] Put# [1:1:225:0:0:82:0] Put# [1:1:226:0:0:60:0] Put# [1:1:227:0:0:37:0] Put# [1:1:228:0:0:71:0] Put# [1:1:229:0:0:38:0] Put# [1:1:230:0:0:11:0] Put# [1:1:231:0:0:32:0] Put# [1:1:232:0:0:89:0] Put# [1:1:233:0:0:21:0] Put# [1:1:234:0:0:51:0] Put# [1:1:235:0:0:2:0] Put# [1:1:236:0:0:66:0] Put# [1:1:237:0:0:3:0] Put# [1:1:238:0:0:56:0] Put# [1:1:239:0:0:14:0] Put# [1:1:240:0:0:96:0] Put# [1:1:241:0:0:96:0] Put# [1:1:242:0:0:6:0] Put# [1:1:243:0:0:87:0] Put# [1:1:244:0:0:79:0] Put# [1:1:245:0:0:84:0] Put# [1:1:246:0:0:19:0] Put# [1:1:247:0:0:36:0] Put# [1:1:248:0:0:58:0] Put# [1:1:249:0:0:64:0] Put# [1:1:250:0:0:50:0] Put# [1:1:251:0:0:55:0] Put# [1:1:252:0:0:27:0] Put# [1:1:253:0:0:61:0] Put# [1:1:254:0:0:86:0] Put# [1:1:255:0:0:52:0] Put# [1:1:256:0:0:24:0] Put# [1:1:257:0:0:26:0] Put# [1:1:258:0:0:43:0] Put# [1:1:259:0:0:41:0] Put# [1:1:260:0:0:28:0] Put# [1:1:261:0:0:10:0] Put# [1:1:262:0:0:28:0] Put# [1:1:263:0:0:27:0] Put# [1:1:264:0:0:20:0] Put# [1:1:265:0:0:76:0] Put# [1:1:266:0:0:17:0] Put# [1:1:267:0:0:22:0] Put# [1:1:268:0:0:90:0] Put# [1:1:269:0:0:86:0] Put# [1:1:270:0:0:93:0] Put# [1:1:271:0:0:61:0] Put# [1:1:272:0:0:5:0] Put# [1:1:273:0:0:1:0] Put# [1:1:274:0:0:71:0] Put# [1:1:275:0:0:52:0] Put# [1:1:276:0:0:90:0] Put# [1:1:277:0:0:93:0] Put# [1:1:278:0:0:18:0] Put# [1:1:279:0:0:51:0] Put# [1:1:280:0:0:74:0] Put# [1:1:281:0:0:55:0] Put# [1:1:282:0:0:13:0] Put# [1:1:283:0:0:81:0] Put# [1:1:284:0:0:28:0] Put# [1:1:285:0:0:51:0] Put# [1:1:286:0:0:18:0] Put# [1:1:287:0:0:100:0] Put# [1:1:288:0:0:43:0] Put# [1:1:289:0:0:65:0] Put# [1:1:290:0:0:61:0] Put# [1:1:291:0:0:2:0] Put# [1:1:292:0:0:26:0] Put# [1:1:293:0:0:97:0] Put# [1:1:294:0:0:1:0] Put# [1:1:295:0:0:88:0] Put# [1:1:296:0:0:50:0] Put# [1:1:297:0:0:18:0] Put# [1:1:298:0:0:23:0] Put# [1:1:299:0:0:25:0] Put# [1:1:300:0:0:65:0] Put# [1:1:301:0:0:68:0] Put# [1:1:302:0:0:36:0] Put# [1:1:303:0:0:80:0] Put# [1:1:304:0:0:75:0] Put# [1:1:305:0:0:43:0] Put# [1:1:306:0:0:33:0] Put# [1:1:307:0:0:6:0] Put# [1:1:308:0:0:32:0] Put# [1:1:309:0:0:35:0] Put# [1:1:310:0:0:39:0] Put# [1:1:311:0:0:75:0] Put# [1:1:312:0:0:25:0] Put# [1:1:313:0:0:82:0] Put# [1:1:314:0:0:74:0] Put# [1:1:315:0:0:4:0] Put# [1:1:316:0:0:56:0] Put# [1:1:317:0:0:8:0] Put# [1:1:318:0:0:85:0] Put# [1:1:319:0:0:57:0] Put# [1:1:320:0:0:19:0] Put# [1:1:321:0:0:21:0] Put# [1:1:322:0:0:32:0] Put# [1:1:323:0:0:79:0] Put# [1:1:324:0:0:98:0] Put# [1:1:325:0:0:35:0] Put# [1:1:326:0:0:95:0] Put# [1:1:327:0:0:45:0] Put# [1:1:328:0:0:71:0] Put# [1:1:329:0:0:39:0] Put# [1:1:330:0:0:70:0] Put# [1:1:331:0:0:12:0] Put# [1:1:332:0:0:79:0] Put# [1:1:333:0:0:36:0] Put# [1:1:334:0:0:14:0] Put# [1:1:335:0:0:55:0] Put# [1:1:336:0:0:50:0] Put# [1:1:337:0:0:84:0] Put# [1:1:338:0:0:61:0] Put# [1:1:339:0:0:56:0] Put# [1:1:340:0:0:93:0] Put# [1:1:341:0:0:32:0] Put# [1:1:342:0:0:38:0] Put# [1:1:343:0:0:49:0] Put# [1:1:344:0:0:71:0] Put# [1:1:345:0:0:35:0] Put# [1:1:346:0:0:24:0] Put# [1:1:347:0:0:87:0] Put# [1:1:348:0:0:45:0] Put# [1:1:349:0:0:94:0] Put# [1:1:350:0:0:47:0] Put# [1:1:351:0:0:28:0] Put# [1:1:352:0:0:4:0] Put# [1:1:353:0:0:30:0] Put# [1:1:354:0:0:81:0] Put# [1:1:355:0:0:43:0] Put# [1:1:356:0:0:19:0] Put# [1:1:357:0:0:86:0] Put# [1:1:358:0:0:4:0] Put# [1:1:359:0:0:17:0] Put# [1:1:360:0:0:62:0] Put# [1:1:361:0:0:68:0] Put# [1:1:362:0:0:34:0] Put# [1:1:363:0:0:99:0] Put# [1:1:364:0:0:87:0] Put# [1:1:365:0:0:7:0] Put# [1:1:366:0:0:80:0] Put# [1:1:367:0:0:28:0] Put# [1:1:368:0:0:76:0] Put# [1:1:369:0:0:66:0] Put# [1:1:370:0:0:13:0] Put# [1:1:371:0:0:76:0] Put# [1:1:372:0:0:20:0] Put# [1:1:373:0:0:19:0] Put# [1:1:374:0:0:45:0] Put# [1:1:375:0:0:51:0] Put# [1:1:376:0:0:62:0] Put# [1:1:377:0:0:33:0] Put# [1:1:378:0:0:55:0] Put# [1:1:379:0:0:58:0] Put# [1:1:380:0:0:36:0] Put# [1:1:381:0:0:93:0] Put# [1:1:382:0:0:34:0] Put# [1:1:383:0:0:52:0] Put# [1:1:384:0:0:38:0] Put# [1:1:385:0:0:12:0] Put# [1:1:386:0:0:4:0] Put# [1:1:387:0:0:84:0] Put# [1:1:388:0:0:42:0] Put# [1:1:389:0:0:67:0] Put# [1:1:390:0:0:56:0] Put# [1:1:391:0:0:58:0] Put# [1:1:392:0:0:3:0] Put# [1:1:393:0:0:14:0] Put# [1:1:394:0:0:15:0] Put# [1:1:395:0:0:97:0] Put# [1:1:396:0:0:23:0] Put# [1:1:397:0:0:95:0] Put# [1:1:398:0:0:81:0] Put# [1:1:399:0:0:70:0] Put# [1:1:400:0:0:47:0] Put# [1:1:401:0:0:15:0] Put# [1:1:402:0:0:4:0] Put# [1:1:403:0:0:64:0] Put# [1:1:404:0:0:99:0] Put# [1:1:405:0:0:71:0] Put# [1:1:406:0:0:52:0] Put# [1:1:407:0:0:71:0] Put# [1:1:408:0:0:65:0] Put# [1:1:409:0:0:18:0] Put# [1:1:410:0:0:26:0] Put# [1:1:411:0:0:41:0] Put# [1:1:412:0:0:59:0] Put# [1:1:413:0:0:60:0] Put# [1:1:414:0:0:95:0] Put# [1:1:415:0:0:51:0] Put# [1:1:416:0:0:29:0] Put# [1:1:417:0:0:41:0] Put# [1:1:418:0:0:85:0] Put# [1:1:419:0:0:9:0] Put# [1:1:420:0:0:72:0] Put# [1:1:421:0:0:1:0] Put# [1:1:422:0:0:67:0] Put# [1:1:423:0:0:53:0] Put# [1:1:424:0:0:58:0] Put# [1:1:425:0:0:57:0] Put# [1:1:426:0:0:6:0] Put# [1:1:427:0:0:2:0] Put# [1:1:428:0:0:48:0] Put# [1:1:429:0:0:81:0] Put# [1:1:430:0:0:30:0] Put# [1:1:431:0:0:75:0] Put# [1:1:432:0:0:2:0] Put# [1:1:433:0:0:22:0] Put# [1:1:434:0:0:6:0] Put# [1:1:435:0:0:16:0] Put# [1:1:436:0:0:1:0] Put# [1:1:437:0:0:35:0] Put# [1:1:438:0:0:67:0] Put# [1:1:439:0:0:49:0] Put# [1:1:440:0:0:24:0] Put# [1:1:441:0:0:94:0] Put# [1:1:442:0:0:85:0] Put# [1:1:443:0:0:100:0] Put# [1:1:444:0:0:70:0] Put# [1:1:445:0:0:91:0] Put# [1:1:446:0:0:51:0] Put# [1:1:447:0:0:16:0] Put# [1:1:448:0:0:56:0] Put# [1:1:449:0:0:17:0] Put# [1:1:450:0:0:36:0] Put# [1:1:451:0:0:75:0] Put# [1:1:452:0:0:55:0] Put# [1:1:453:0:0:95:0] Put# [1:1:454:0:0:97:0] Put ... Put# [1:26:9538:0:0:67:0] Put# [1:26:9539:0:0:66:0] Put# [1:26:9540:0:0:90:0] Put# [1:26:9541:0:0:10:0] Put# [1:26:9542:0:0:76:0] Put# [1:26:9543:0:0:36:0] Put# [1:26:9544:0:0:69:0] Put# [1:26:9545:0:0:18:0] Put# [1:26:9546:0:0:71:0] Put# [1:26:9547:0:0:46:0] Put# [1:26:9548:0:0:15:0] Put# [1:26:9549:0:0:37:0] Put# [1:26:9550:0:0:36:0] Put# [1:26:9551:0:0:33:0] Put# [1:26:9552:0:0:31:0] Put# [1:26:9553:0:0:19:0] Put# [1:26:9554:0:0:48:0] Put# [1:26:9555:0:0:41:0] Put# [1:26:9556:0:0:42:0] Put# [1:26:9557:0:0:83:0] Put# [1:26:9558:0:0:59:0] Put# [1:26:9559:0:0:57:0] Put# [1:26:9560:0:0:3:0] Put# [1:26:9561:0:0:93:0] Put# [1:26:9562:0:0:28:0] Put# [1:26:9563:0:0:45:0] Put# [1:26:9564:0:0:56:0] Put# [1:26:9565:0:0:23:0] Put# [1:26:9566:0:0:30:0] Put# [1:26:9567:0:0:94:0] Put# [1:26:9568:0:0:92:0] Put# [1:26:9569:0:0:31:0] Put# [1:26:9570:0:0:82:0] Put# [1:26:9571:0:0:6:0] Put# [1:26:9572:0:0:38:0] Put# [1:26:9573:0:0:10:0] Put# [1:26:9574:0:0:51:0] Put# [1:26:9575:0:0:71:0] Put# [1:26:9576:0:0:62:0] Put# [1:26:9577:0:0:30:0] Put# [1:26:9578:0:0:9:0] Put# [1:26:9579:0:0:26:0] Put# [1:26:9580:0:0:31:0] Put# [1:26:9581:0:0:49:0] Put# [1:26:9582:0:0:76:0] Put# [1:26:9583:0:0:16:0] Put# [1:26:9584:0:0:46:0] Put# [1:26:9585:0:0:32:0] Put# [1:26:9586:0:0:62:0] Put# [1:26:9587:0:0:34:0] Put# [1:26:9588:0:0:25:0] Put# [1:26:9589:0:0:76:0] Put# [1:26:9590:0:0:1:0] Put# [1:26:9591:0:0:27:0] Put# [1:26:9592:0:0:4:0] Put# [1:26:9593:0:0:40:0] Put# [1:26:9594:0:0:23:0] Put# [1:26:9595:0:0:55:0] Put# [1:26:9596:0:0:25:0] Put# [1:26:9597:0:0:38:0] Put# [1:26:9598:0:0:58:0] Put# [1:26:9599:0:0:20:0] Put# [1:26:9600:0:0:60:0] Put# [1:26:9601:0:0:39:0] Put# [1:26:9602:0:0:79:0] Put# [1:26:9603:0:0:56:0] Put# [1:26:9604:0:0:63:0] Put# [1:26:9605:0:0:86:0] Put# [1:26:9606:0:0:96:0] Put# [1:26:9607:0:0:58:0] Put# [1:26:9608:0:0:65:0] Put# [1:26:9609:0:0:34:0] Put# [1:26:9610:0:0:89:0] Put# [1:26:9611:0:0:1:0] Put# [1:26:9612:0:0:5:0] Put# [1:26:9613:0:0:91:0] Put# [1:26:9614:0:0:48:0] Put# [1:26:9615:0:0:32:0] Put# [1:26:9616:0:0:37:0] Put# [1:26:9617:0:0:56:0] Put# [1:26:9618:0:0:69:0] Put# [1:26:9619:0:0:88:0] Put# [1:26:9620:0:0:8:0] Put# [1:26:9621:0:0:58:0] Put# [1:26:9622:0:0:20:0] Put# [1:26:9623:0:0:26:0] Put# [1:26:9624:0:0:14:0] Put# [1:26:9625:0:0:45:0] Put# [1:26:9626:0:0:44:0] Put# [1:26:9627:0:0:38:0] Put# [1:26:9628:0:0:51:0] Put# [1:26:9629:0:0:99:0] Put# [1:26:9630:0:0:7:0] Put# [1:26:9631:0:0:54:0] Put# [1:26:9632:0:0:71:0] Put# [1:26:9633:0:0:97:0] Put# [1:26:9634:0:0:24:0] Put# [1:26:9635:0:0:70:0] Put# [1:26:9636:0:0:41:0] Put# [1:26:9637:0:0:85:0] Put# [1:26:9638:0:0:85:0] Put# [1:26:9639:0:0:74:0] Put# [1:26:9640:0:0:63:0] Put# [1:26:9641:0:0:60:0] Put# [1:26:9642:0:0:28:0] Put# [1:26:9643:0:0:27:0] Put# [1:26:9644:0:0:5:0] Put# [1:26:9645:0:0:51:0] Put# [1:26:9646:0:0:44:0] Put# [1:26:9647:0:0:55:0] Put# [1:26:9648:0:0:7:0] Put# [1:26:9649:0:0:36:0] Put# [1:26:9650:0:0:16:0] Put# [1:26:9651:0:0:78:0] Put# [1:26:9652:0:0:44:0] Put# [1:26:9653:0:0:68:0] Put# [1:26:9654:0:0:72:0] Put# [1:26:9655:0:0:4:0] Put# [1:26:9656:0:0:60:0] Put# [1:26:9657:0:0:22:0] Put# [1:26:9658:0:0:6:0] Put# [1:26:9659:0:0:16:0] Put# [1:26:9660:0:0:84:0] Put# [1:26:9661:0:0:10:0] Put# [1:26:9662:0:0:89:0] Put# [1:26:9663:0:0:35:0] Put# [1:26:9664:0:0:66:0] Put# [1:26:9665:0:0:15:0] Put# [1:26:9666:0:0:82:0] Put# [1:26:9667:0:0:70:0] Put# [1:26:9668:0:0:1:0] Put# [1:26:9669:0:0:14:0] Put# [1:26:9670:0:0:9:0] Put# [1:26:9671:0:0:11:0] Put# [1:26:9672:0:0:48:0] Put# [1:26:9673:0:0:2:0] Put# [1:26:9674:0:0:34:0] Put# [1:26:9675:0:0:84:0] Put# [1:26:9676:0:0:16:0] Put# [1:26:9677:0:0:51:0] Put# [1:26:9678:0:0:17:0] Put# [1:26:9679:0:0:54:0] Put# [1:26:9680:0:0:60:0] Put# [1:26:9681:0:0:94:0] Put# [1:26:9682:0:0:80:0] Put# [1:26:9683:0:0:16:0] Put# [1:26:9684:0:0:33:0] Put# [1:26:9685:0:0:65:0] Put# [1:26:9686:0:0:64:0] Put# [1:26:9687:0:0:62:0] Put# [1:26:9688:0:0:55:0] Put# [1:26:9689:0:0:26:0] Put# [1:26:9690:0:0:44:0] Put# [1:26:9691:0:0:42:0] Put# [1:26:9692:0:0:55:0] Put# [1:26:9693:0:0:71:0] Put# [1:26:9694:0:0:45:0] Put# [1:26:9695:0:0:78:0] Put# [1:26:9696:0:0:84:0] Put# [1:26:9697:0:0:19:0] Put# [1:26:9698:0:0:92:0] Put# [1:26:9699:0:0:30:0] Put# [1:26:9700:0:0:43:0] Put# [1:26:9701:0:0:88:0] Put# [1:26:9702:0:0:48:0] Put# [1:26:9703:0:0:63:0] Put# [1:26:9704:0:0:39:0] Put# [1:26:9705:0:0:43:0] Put# [1:26:9706:0:0:61:0] Put# [1:26:9707:0:0:51:0] Put# [1:26:9708:0:0:17:0] Put# [1:26:9709:0:0:21:0] Put# [1:26:9710:0:0:51:0] Put# [1:26:9711:0:0:88:0] Put# [1:26:9712:0:0:23:0] Put# [1:26:9713:0:0:22:0] Put# [1:26:9714:0:0:23:0] Put# [1:26:9715:0:0:100:0] Put# [1:26:9716:0:0:63:0] Put# [1:26:9717:0:0:53:0] Put# [1:26:9718:0:0:10:0] Put# [1:26:9719:0:0:59:0] Put# [1:26:9720:0:0:99:0] Put# [1:26:9721:0:0:52:0] Put# [1:26:9722:0:0:35:0] Put# [1:26:9723:0:0:91:0] Put# [1:26:9724:0:0:15:0] Put# [1:26:9725:0:0:34:0] Put# [1:26:9726:0:0:93:0] Put# [1:26:9727:0:0:55:0] Put# [1:26:9728:0:0:48:0] Put# [1:26:9729:0:0:15:0] Put# [1:26:9730:0:0:43:0] Put# [1:26:9731:0:0:72:0] Put# [1:26:9732:0:0:18:0] Put# [1:26:9733:0:0:25:0] Put# [1:26:9734:0:0:20:0] Put# [1:26:9735:0:0:8:0] Put# [1:26:9736:0:0:35:0] Put# [1:26:9737:0:0:19:0] Put# [1:26:9738:0:0:68:0] Put# [1:26:9739:0:0:89:0] Put# [1:26:9740:0:0:28:0] Put# [1:26:9741:0:0:19:0] Put# [1:26:9742:0:0:68:0] Put# [1:26:9743:0:0:33:0] Put# [1:26:9744:0:0:71:0] Put# [1:26:9745:0:0:22:0] Put# [1:26:9746:0:0:75:0] Put# [1:26:9747:0:0:50:0] Put# [1:26:9748:0:0:41:0] Put# [1:26:9749:0:0:9:0] Put# [1:26:9750:0:0:54:0] Put# [1:26:9751:0:0:24:0] Put# [1:26:9752:0:0:3:0] Put# [1:26:9753:0:0:83:0] Put# [1:26:9754:0:0:62:0] Put# [1:26:9755:0:0:13:0] Put# [1:26:9756:0:0:5:0] Put# [1:26:9757:0:0:92:0] Put# [1:26:9758:0:0:83:0] Put# [1:26:9759:0:0:67:0] Put# [1:26:9760:0:0:26:0] Put# [1:26:9761:0:0:10:0] Put# [1:26:9762:0:0:8:0] Put# [1:26:9763:0:0:72:0] Put# [1:26:9764:0:0:17:0] Put# [1:26:9765:0:0:57:0] Put# [1:26:9766:0:0:92:0] Put# [1:26:9767:0:0:94:0] Put# [1:26:9768:0:0:70:0] Put# [1:26:9769:0:0:39:0] Put# [1:26:9770:0:0:29:0] Put# [1:26:9771:0:0:52:0] Put# [1:26:9772:0:0:56:0] Put# [1:26:9773:0:0:14:0] Put# [1:26:9774:0:0:90:0] Put# [1:26:9775:0:0:52:0] Put# [1:26:9776:0:0:29:0] Put# [1:26:9777:0:0:8:0] Put# [1:26:9778:0:0:55:0] Put# [1:26:9779:0:0:8:0] Put# [1:26:9780:0:0:36:0] Put# [1:26:9781:0:0:80:0] Put# [1:26:9782:0:0:27:0] Put# [1:26:9783:0:0:100:0] Put# [1:26:9784:0:0:66:0] Put# [1:26:9785:0:0:57:0] Put# [1:26:9786:0:0:51:0] Put# [1:26:9787:0:0:93:0] Put# [1:26:9788:0:0:48:0] Put# [1:26:9789:0:0:34:0] Put# [1:26:9790:0:0:18:0] Put# [1:26:9791:0:0:28:0] Put# [1:26:9792:0:0:38:0] Put# [1:26:9793:0:0:53:0] Put# [1:26:9794:0:0:45:0] Put# [1:26:9795:0:0:50:0] Put# [1:26:9796:0:0:99:0] Put# [1:26:9797:0:0:52:0] Put# [1:26:9798:0:0:93:0] Put# [1:26:9799:0:0:38:0] Put# [1:26:9800:0:0:74:0] Put# [1:26:9801:0:0:85:0] Put# [1:26:9802:0:0:62:0] Put# [1:26:9803:0:0:8:0] Put# [1:26:9804:0:0:70:0] Put# [1:26:9805:0:0:83:0] Put# [1:26:9806:0:0:10:0] Put# [1:26:9807:0:0:73:0] Put# [1:26:9808:0:0:90:0] Put# [1:26:9809:0:0:78:0] Put# [1:26:9810:0:0:11:0] Put# [1:26:9811:0:0:96:0] Put# [1:26:9812:0:0:68:0] Put# [1:26:9813:0:0:79:0] Put# [1:26:9814:0:0:33:0] Put# [1:26:9815:0:0:1:0] Put# [1:26:9816:0:0:2:0] Put# [1:26:9817:0:0:67:0] Put# [1:26:9818:0:0:56:0] Put# [1:26:9819:0:0:88:0] Put# [1:26:9820:0:0:42:0] Put# [1:26:9821:0:0:87:0] Put# [1:26:9822:0:0:99:0] Put# [1:26:9823:0:0:56:0] Put# [1:26:9824:0:0:56:0] Put# [1:26:9825:0:0:79:0] Put# [1:26:9826:0:0:80:0] Put# [1:26:9827:0:0:51:0] Put# [1:26:9828:0:0:84:0] Put# [1:26:9829:0:0:7:0] Put# [1:26:9830:0:0:96:0] Put# [1:26:9831:0:0:77:0] Put# [1:26:9832:0:0:58:0] Put# [1:26:9833:0:0:8:0] Put# [1:26:9834:0:0:66:0] Put# [1:26:9835:0:0:16:0] Put# [1:26:9836:0:0:72:0] Put# [1:26:9837:0:0:22:0] Put# [1:26:9838:0:0:57:0] Put# [1:26:9839:0:0:82:0] Put# [1:26:9840:0:0:18:0] Put# [1:26:9841:0:0:28:0] Put# [1:26:9842:0:0:1:0] Put# [1:26:9843:0:0:69:0] Put# [1:26:9844:0:0:20:0] Put# [1:26:9845:0:0:88:0] Put# [1:26:9846:0:0:13:0] Put# [1:26:9847:0:0:34:0] Put# [1:26:9848:0:0:33:0] Put# [1:26:9849:0:0:3:0] Put# [1:26:9850:0:0:93:0] Put# [1:26:9851:0:0:47:0] Put# [1:26:9852:0:0:24:0] Put# [1:26:9853:0:0:7:0] Put# [1:26:9854:0:0:58:0] Put# [1:26:9855:0:0:93:0] Put# [1:26:9856:0:0:85:0] Put# [1:26:9857:0:0:64:0] Put# [1:26:9858:0:0:56:0] Put# [1:26:9859:0:0:16:0] Put# [1:26:9860:0:0:37:0] Put# [1:26:9861:0:0:69:0] Put# [1:26:9862:0:0:57:0] Put# [1:26:9863:0:0:96:0] Put# [1:26:9864:0:0:97:0] Put# [1:26:9865:0:0:17:0] Put# [1:26:9866:0:0:47:0] Put# [1:26:9867:0:0:1:0] Put# [1:26:9868:0:0:34:0] Put# [1:26:9869:0:0:45:0] Put# [1:26:9870:0:0:6:0] Put# [1:26:9871:0:0:93:0] Put# [1:26:9872:0:0:12:0] Put# [1:26:9873:0:0:23:0] Put# [1:26:9874:0:0:70:0] Put# [1:26:9875:0:0:33:0] Put# [1:26:9876:0:0:91:0] Put# [1:26:9877:0:0:43:0] Put# [1:26:9878:0:0:26:0] Put# [1:26:9879:0:0:42:0] Put# [1:26:9880:0:0:74:0] Put# [1:26:9881:0:0:15:0] Put# [1:26:9882:0:0:44:0] Put# [1:26:9883:0:0:48:0] Put# [1:26:9884:0:0:93:0] Put# [1:26:9885:0:0:50:0] Put# [1:26:9886:0:0:11:0] Put# [1:26:9887:0:0:88:0] Put# [1:26:9888:0:0:73:0] Put# [1:26:9889:0:0:9:0] Put# [1:26:9890:0:0:31:0] Put# [1:26:9891:0:0:69:0] Put# [1:26:9892:0:0:45:0] Put# [1:26:9893:0:0:38:0] Put# [1:26:9894:0:0:7:0] Put# [1:26:9895:0:0:13:0] Put# [1:26:9896:0:0:63:0] Put# [1:26:9897:0:0:80:0] Put# [1:26:9898:0:0:6:0] Put# [1:26:9899:0:0:67:0] Put# [1:26:9900:0:0:23:0] Put# [1:26:9901:0:0:76:0] Put# [1:26:9902:0:0:19:0] Put# [1:26:9903:0:0:35:0] Put# [1:26:9904:0:0:26:0] Put# [1:26:9905:0:0:1:0] Put# [1:26:9906:0:0:68:0] Put# [1:26:9907:0:0:100:0] Put# [1:26:9908:0:0:87:0] Put# [1:26:9909:0:0:97:0] Put# [1:26:9910:0:0:33:0] Put# [1:26:9911:0:0:19:0] Put# [1:26:9912:0:0:94:0] Put# [1:26:9913:0:0:70:0] Put# [1:26:9914:0:0:74:0] Put# [1:26:9915:0:0:92:0] Put# [1:26:9916:0:0:7:0] Put# [1:26:9917:0:0:61:0] Put# [1:26:9918:0:0:5:0] Put# [1:26:9919:0:0:47:0] Put# [1:26:9920:0:0:2:0] Put# [1:26:9921:0:0:62:0] Put# [1:26:9922:0:0:29:0] Put# [1:26:9923:0:0:23:0] Put# [1:26:9924:0:0:41:0] Put# [1:26:9925:0:0:93:0] Put# [1:26:9926:0:0:63:0] Put# [1:26:9927:0:0:98:0] Put# [1:26:9928:0:0:3:0] Put# [1:26:9929:0:0:64:0] Put# [1:26:9930:0:0:67:0] Put# [1:26:9931:0:0:69:0] Put# [1:26:9932:0:0:17:0] Put# [1:26:9933:0:0:77:0] Put# [1:26:9934:0:0:100:0] Put# [1:26:9935:0:0:11:0] Put# [1:26:9936:0:0:54:0] Put# [1:26:9937:0:0:60:0] Put# [1:26:9938:0:0:39:0] Put# [1:26:9939:0:0:18:0] Put# [1:26:9940:0:0:50:0] Put# [1:26:9941:0:0:15:0] Put# [1:26:9942:0:0:31:0] Put# [1:26:9943:0:0:35:0] Put# [1:26:9944:0:0:62:0] Put# [1:26:9945:0:0:16:0] Put# [1:26:9946:0:0:37:0] Put# [1:26:9947:0:0:62:0] Put# [1:26:9948:0:0:98:0] Put# [1:26:9949:0:0:84:0] Put# [1:26:9950:0:0:18:0] Put# [1:26:9951:0:0:44:0] Put# [1:26:9952:0:0:76:0] Put# [1:26:9953:0:0:40:0] Put# [1:26:9954:0:0:12:0] Put# [1:26:9955:0:0:59:0] Put# [1:26:9956:0:0:81:0] Put# [1:26:9957:0:0:91:0] Put# [1:26:9958:0:0:86:0] Put# [1:26:9959:0:0:41:0] Put# [1:26:9960:0:0:18:0] Put# [1:26:9961:0:0:44:0] Put# [1:26:9962:0:0:32:0] Put# [1:26:9963:0:0:48:0] Put# [1:26:9964:0:0:25:0] Put# [1:26:9965:0:0:16:0] Put# [1:26:9966:0:0:70:0] Put# [1:26:9967:0:0:14:0] Put# [1:26:9968:0:0:83:0] Put# [1:26:9969:0:0:3:0] Put# [1:26:9970:0:0:29:0] Put# [1:26:9971:0:0:75:0] Put# [1:26:9972:0:0:43:0] Put# [1:26:9973:0:0:63:0] Put# [1:26:9974:0:0:59:0] Put# [1:26:9975:0:0:35:0] Put# [1:26:9976:0:0:93:0] Put# [1:26:9977:0:0:86:0] Put# [1:26:9978:0:0:19:0] Put# [1:26:9979:0:0:93:0] Put# [1:26:9980:0:0:67:0] Put# [1:26:9981:0:0:63:0] Put# [1:26:9982:0:0:30:0] Put# [1:26:9983:0:0:47:0] Put# [1:26:9984:0:0:73:0] Put# [1:26:9985:0:0:34:0] Put# [1:26:9986:0:0:14:0] Put# [1:26:9987:0:0:86:0] Put# [1:26:9988:0:0:22:0] Put# [1:26:9989:0:0:52:0] Put# [1:26:9990:0:0:46:0] Put# [1:26:9991:0:0:76:0] Put# [1:26:9992:0:0:6:0] Put# [1:26:9993:0:0:45:0] Put# [1:26:9994:0:0:89:0] Put# [1:26:9995:0:0:55:0] Put# [1:26:9996:0:0:25:0] Put# [1:26:9997:0:0:78:0] Put# [1:26:9998:0:0:75:0] Put# [1:26:9999:0:0:37:0] Put# [1:26:10000:0:0:96:0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::Alter [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:31.055034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:31.055058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:31.055064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:31.055069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:31.055076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:31.055080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:31.055090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:31.055186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:31.066668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:31.066693Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:31.069269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:31.069390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:31.069429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:31.072155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:31.072251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:31.072375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:31.072562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:31.073288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:31.073568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:31.073580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:31.073593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:31.073600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:31.073606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:31.073652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:31.074943Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:31.092466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:31.092569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.092644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:31.092695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:31.092705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.093608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:31.093636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:31.093690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.093702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:31.093707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:31.093712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:31.094136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.094149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:31.094155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:31.094502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.094514Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.094520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:31.094527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:31.095116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:31.095492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:31.095547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:31.095751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:31.095778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:31.095785Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:31.095843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:31.095851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:31.095882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:31.095893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:31.096296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:31.096307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:31.096349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:31.096354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:31.096434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.096440Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:31.096451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:31.096455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:31.096461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:31.096466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:31.096471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:31.096475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:31.096485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:31.096491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:31.096495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... lags: 2 } ExecLevel: 0 TxId: 1003 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:40.593188Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:49:40.593197Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2024-11-21T17:49:40.593215Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T17:49:40.593273Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [35:118:2144], Recipient [35:256:2248] 2024-11-21T17:49:40.593277Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:49:40.593287Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:40.593303Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 118 RawX2: 150323857504 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:40.593310Z node 35 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TPropose opId# 1003:0 HandleReply TEvOperationPlan: step# 5000004 2024-11-21T17:49:40.593334Z node 35 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T17:49:40.593356Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:49:40.593366Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:40.593376Z node 35 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 2024-11-21T17:49:40.593687Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:49:40.593698Z node 35 :FLAT_TX_SCHEMESHARD TRACE: Ack coordinator stepId#5000004 first txId#1003 countTxs#1 2024-11-21T17:49:40.593705Z node 35 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000004 2024-11-21T17:49:40.593709Z node 35 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1003:0 2024-11-21T17:49:40.593735Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [35:128:2152], Recipient [35:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T17:49:40.593739Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:49:40.593754Z node 35 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:40.593758Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:49:40.593800Z node 35 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:40.593804Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [35:205:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:49:40.593913Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:49:40.593921Z node 35 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:49:40.593930Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:49:40.593935Z node 35 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:49:40.593939Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:49:40.593945Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:49:40.593950Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:49:40.593955Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:49:40.593959Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:49:40.593979Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:49:40.593985Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 1, subscribers: 0 2024-11-21T17:49:40.593989Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:49:40.594049Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [35:205:2208], Recipient [35:128:2152]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 3 } 2024-11-21T17:49:40.594059Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T17:49:40.594077Z node 35 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:40.594089Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:40.594096Z node 35 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:40.594101Z node 35 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:49:40.594106Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:40.594118Z node 35 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T17:49:40.594122Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:49:40.594579Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:49:40.594881Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:40.594894Z node 35 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:49:40.594949Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:49:40.594957Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:49:40.595019Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [35:449:2406], Recipient [35:128:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:40.595057Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:40.595062Z node 35 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:49:40.595084Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [35:415:2372], Recipient [35:128:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1003 2024-11-21T17:49:40.595089Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T17:49:40.595100Z node 35 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:49:40.595136Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:49:40.595141Z node 35 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [35:447:2404] 2024-11-21T17:49:40.595162Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [35:449:2406], Recipient [35:128:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:49:40.595167Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:49:40.595171Z node 35 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 2024-11-21T17:49:40.595224Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [35:450:2407], Recipient [35:128:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T17:49:40.595228Z node 35 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:49:40.595238Z node 35 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:40.595289Z node 35 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 48us result status StatusSuccess 2024-11-21T17:49:40.595363Z node 35 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication" PathDescription { Self { Name: "Replication" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 2 ControllerId: 72075186233409546 State { Done { FailoverMode: FAILOVER_MODE_FORCE } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:25.235789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:25.235815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:25.235820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:25.235824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:25.235838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:25.235842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:25.235850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:25.235924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:25.248730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:25.248748Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:25.251684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:25.252500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:25.252530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:25.253971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:25.254138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:25.254229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:25.254316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:25.255340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:25.255608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:25.255617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:25.255651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:25.255657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:25.255662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:25.255673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:25.256813Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:25.273655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:25.273719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:25.273758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:25.273795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:25.273802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:25.274386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:25.274402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:25.274430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:25.274438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:25.274442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:25.274446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:25.276862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:25.276879Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:25.276884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:25.277450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:25.277460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:25.277466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:25.277485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:25.278116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:25.280575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:25.280627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:25.280792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:25.280830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:25.280868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:25.280922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:25.280929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:25.280955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:25.280966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:25.281473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:25.281481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:25.281510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:25.281515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:25.281585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:25.281591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:25.281601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:25.281605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:25.281610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:25.281615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:25.281619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:25.281622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:25.281632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:25.281637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:25.281641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:25.281938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:25.281951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:25.281955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:25.281960Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:25.281963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:25.281972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... d: 2], 18446744073709551615 2024-11-21T17:49:40.472531Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:49:40.472541Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:49:40.472545Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:49:40.472549Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:49:40.472553Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:49:40.472617Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:49:40.472626Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:49:40.472629Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:49:40.472632Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:49:40.472635Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:40.472641Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T17:49:40.475178Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:40.475199Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:40.475204Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:40.475208Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:49:40.475772Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:49:40.475844Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:49:40.475936Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:40.476028Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:49:40.476060Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409548 2024-11-21T17:49:40.476305Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:40.476348Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:40.476588Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T17:49:40.476646Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409546 2024-11-21T17:49:40.476826Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:49:40.476861Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:49:40.476908Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:49:40.476943Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:49:40.476968Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:49:40.477069Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:40.477076Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:49:40.477090Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 Forgetting tablet 72075186233409549 Forgetting tablet 72075186233409547 2024-11-21T17:49:40.477998Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:49:40.478015Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:49:40.478209Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:49:40.478224Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:49:40.478279Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:49:40.478285Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:49:40.478392Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:49:40.478400Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:49:40.478448Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T17:49:40.478509Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:49:40.478516Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:49:40.478597Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:49:40.478618Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:49:40.478622Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [15:527:2482] TestWaitNotification: OK eventTxId 103 2024-11-21T17:49:40.478712Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:40.478756Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 59us result status StatusPathDoesNotExist 2024-11-21T17:49:40.478795Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2024-11-21T17:49:40.478860Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:49:40.478872Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:49:40.478879Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:49:40.478887Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2024-11-21T17:49:40.478948Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:40.478976Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 30us result status StatusSuccess 2024-11-21T17:49:40.479045Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] Test command err: 2024-11-21T17:49:14.431076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:14.431610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:14.431641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001cbd/r3tmp/tmpQCc2sN/pdisk_1.dat 2024-11-21T17:49:14.540558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.559669Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:14.602354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:14.602384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:14.612918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:14.717932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.735874Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:49:14.736112Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:49:14.736207Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:49:14.736281Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:14.751216Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:49:14.751394Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:14.751420Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:49:14.751589Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:49:14.751598Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:49:14.751606Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:49:14.751654Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:49:14.755561Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:49:14.755650Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:49:14.755674Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:49:14.755680Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:49:14.755685Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:49:14.755691Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.755848Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.755856Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.756009Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:49:14.756034Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:49:14.756050Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.756056Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.756063Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:49:14.756072Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.756078Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:14.756086Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.756092Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:14.756095Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:14.756101Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:49:14.756107Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:14.756133Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:49:14.756138Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:49:14.756163Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:14.756225Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:49:14.756254Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:49:14.756276Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:49:14.756286Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:49:14.756290Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:49:14.756296Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:49:14.756300Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.756350Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:49:14.756354Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:49:14.756358Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:14.756361Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.756374Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:49:14.756378Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:14.756382Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:49:14.756386Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.756391Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:49:14.756664Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:49:14.756675Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:49:14.767000Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:14.767023Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.767029Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.767053Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:49:14.767072Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:49:14.946791Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.946812Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.946822Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:49:14.946841Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:49:14.946846Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:49:14.946872Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.946880Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:49:14.946885Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:49:14.946890Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:49:14.947657Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:49:14.947674Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.947790Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.947797Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.947804Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.947810Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:14.947815Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.947822Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:40.654335Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:40.654339Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:40.654379Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:842:2676], Recipient [14:842:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:40.654385Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:40.654392Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:49:40.654396Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:40.654399Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T17:49:40.654404Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2024-11-21T17:49:40.654408Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2024-11-21T17:49:40.654413Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T17:49:40.654417Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2024-11-21T17:49:40.654421Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T17:49:40.654424Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2024-11-21T17:49:40.654445Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2024-11-21T17:49:40.654448Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T17:49:40.654452Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T17:49:40.654455Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:40.654459Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T17:49:40.654466Z node 14 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2024-11-21T17:49:40.654469Z node 14 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2024-11-21T17:49:40.654473Z node 14 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2024-11-21T17:49:40.654477Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T17:49:40.654481Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:40.654484Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T17:49:40.654488Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T17:49:40.654506Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T17:49:40.654510Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T17:49:40.654516Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T17:49:40.654519Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T17:49:40.654523Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T17:49:40.654526Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T17:49:40.654529Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T17:49:40.654533Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:49:40.654562Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2024-11-21T17:49:40.654566Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T17:49:40.654569Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T17:49:40.654572Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2024-11-21T17:49:40.654577Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T17:49:40.654580Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T17:49:40.654583Z node 14 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2024-11-21T17:49:40.654587Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:40.654590Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T17:49:40.654593Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T17:49:40.654597Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T17:49:40.675281Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3000 txid# 281474976715664} 2024-11-21T17:49:40.675304Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2024-11-21T17:49:40.675318Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:40.675336Z node 14 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2024-11-21T17:49:40.675353Z node 14 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [14:999:2800], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:49:40.675362Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:40.675448Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3000 txid# 281474976715664} 2024-11-21T17:49:40.675451Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2024-11-21T17:49:40.675456Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:49:40.675459Z node 14 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:49:40.675464Z node 14 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [14:999:2800], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:49:40.675469Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:49:40.675790Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:557:2484], Recipient [14:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW RangesSize: 1 2024-11-21T17:49:40.675809Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:49:40.675818Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2024-11-21T17:49:40.675835Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:49:40.675839Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:49:40.675842Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:40.675844Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:49:40.675851Z node 14 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2024-11-21T17:49:40.675854Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:49:40.675856Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:40.675858Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:49:40.675860Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:49:40.675870Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW } 2024-11-21T17:49:40.675955Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2024-11-21T17:49:40.675961Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:557:2484], 1} after executionsCount# 1 2024-11-21T17:49:40.675966Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:40.676000Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 1} finished in read 2024-11-21T17:49:40.676009Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:49:40.676011Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:49:40.676013Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:40.676016Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:40.676025Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:49:40.676026Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:40.676029Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2024-11-21T17:49:40.676032Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:49:40.676046Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict [GOOD] Test command err: 2024-11-21T17:49:13.964088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:13.964636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:13.964659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ce4/r3tmp/tmpRb5e8n/pdisk_1.dat 2024-11-21T17:49:14.072776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.090135Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:14.132422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:14.132451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:14.143002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:14.247035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.261362Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:49:14.261573Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:49:14.261663Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:49:14.261715Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:14.269093Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:49:14.269277Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:14.269306Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:49:14.269481Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:49:14.269490Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:49:14.269497Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:49:14.269548Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:49:14.273358Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:49:14.273429Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:49:14.273451Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:49:14.273457Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:49:14.273461Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:49:14.273467Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.273586Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.273594Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.273719Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:49:14.273741Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:49:14.273753Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.273758Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.273764Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:49:14.273772Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.273778Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:14.273786Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.273791Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:14.273795Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:14.273801Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:49:14.273806Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:14.273827Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:49:14.273832Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:49:14.273854Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:14.273913Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:49:14.273923Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:49:14.273941Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:49:14.273950Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:49:14.273954Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:49:14.273959Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:49:14.273963Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.274008Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:49:14.274012Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:49:14.274016Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:14.274019Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.274029Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:49:14.274032Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:14.274035Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:49:14.274039Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.274044Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:49:14.274280Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:49:14.274290Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:49:14.284795Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:14.284824Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.284832Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.284858Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:49:14.284882Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:49:14.460946Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.460971Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.460979Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:49:14.461001Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:49:14.461007Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:49:14.461035Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.461045Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:49:14.461050Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:49:14.461056Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:49:14.461756Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:49:14.461773Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.461869Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.461874Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.461878Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.461884Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:14.461888Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.461895Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3001 txid# 281474976715667} 2024-11-21T17:49:41.535322Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3001} 2024-11-21T17:49:41.535332Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:41.535337Z node 14 :TX_DATASHARD TRACE: Complete execution for [3001:281474976715667] at 72075186224037888 on unit CompleteOperation 2024-11-21T17:49:41.535345Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:41.535579Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [14:1122:2896], Recipient [14:630:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:41.535588Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:41.535595Z node 14 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [14:1121:2895], serverId# [14:1122:2896], sessionId# [0:0:0] 2024-11-21T17:49:41.535725Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:557:2484], Recipient [14:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 KeysSize: 6 2024-11-21T17:49:41.535745Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:49:41.535756Z node 14 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T17:49:41.535763Z node 14 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3001/18446744073709551615 2024-11-21T17:49:41.535772Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2024-11-21T17:49:41.535795Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:49:41.535800Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:49:41.535805Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:41.535810Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:49:41.535827Z node 14 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2024-11-21T17:49:41.535832Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:49:41.535835Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:41.535839Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:49:41.535843Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:49:41.535859Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2024-11-21T17:49:41.535951Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Continue 2024-11-21T17:49:41.535959Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Continue at tablet# 72075186224037888 2024-11-21T17:49:41.535969Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T17:49:41.567152Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [14:1019:2817], Recipient [14:630:2536]: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T17:49:41.567175Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T17:49:41.567182Z node 14 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037891 dest 72075186224037888 producer 72075186224037891 txId 281474976715667 2024-11-21T17:49:41.567204Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T17:49:41.567274Z node 14 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715667] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1111:2871], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:49:41.567289Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:49:41.567300Z node 14 :TX_DATASHARD DEBUG: Found ready candidate operation [0:4] at 72075186224037888 for ExecuteRead 2024-11-21T17:49:41.567385Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1111:2871] TxId: 281474976715667. Ctx: { TraceId: 01jd7xg57eahwy77xs9pyj9sp5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZmE3MDZiYTUtYmQ5N2FkMGEtODE0YmJhZTEtMWJmNDViZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2024-11-21T17:49:41.567427Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1111:2871] TxId: 281474976715667. Ctx: { TraceId: 01jd7xg57eahwy77xs9pyj9sp5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZmE3MDZiYTUtYmQ5N2FkMGEtODE0YmJhZTEtMWJmNDViZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:49:41.567438Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1111:2871] TxId: 281474976715667. Ctx: { TraceId: 01jd7xg57eahwy77xs9pyj9sp5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZmE3MDZiYTUtYmQ5N2FkMGEtODE0YmJhZTEtMWJmNDViZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2024-11-21T17:49:41.567457Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:630:2536], Recipient [14:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:41.567462Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:41.567666Z node 14 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [14:557:2484], selfId: [14:50:2097], source: [14:1089:2871] 2024-11-21T17:49:41.567697Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:41.567769Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:41.567777Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:49:41.567780Z node 14 :TX_DATASHARD DEBUG: Return cached ready operation [0:4] at 72075186224037888 2024-11-21T17:49:41.567784Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:49:41.567822Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2024-11-21T17:49:41.567906Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2024-11-21T17:49:41.567911Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:557:2484], 1} after executionsCount# 2 2024-11-21T17:49:41.567916Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 0 2024-11-21T17:49:41.567949Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:49:41.567954Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:49:41.567959Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:41.567964Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:41.567975Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:49:41.567978Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:41.567982Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2024-11-21T17:49:41.567985Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:41.567989Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:41.567993Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:41.567997Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:41.568041Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:630:2536], Recipient [14:630:2536]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:41.568049Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 2 2024-11-21T17:49:41.568066Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 2 2024-11-21T17:49:41.568076Z node 14 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[14:557:2484], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551611, quota bytes left# 18446744073709551551, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 2 2024-11-21T17:49:41.568220Z node 14 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=14&id=ZmE3MDZiYTUtYmQ5N2FkMGEtODE0YmJhZTEtMWJmNDViZTE=, workerId: [14:1089:2871], local sessions count: 0 2024-11-21T17:49:41.568263Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:630:2536], Recipient [14:630:2536]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:41.568271Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 4 2024-11-21T17:49:41.568282Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 4 2024-11-21T17:49:41.568298Z node 14 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[14:557:2484], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551609, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 6, firstUnprocessed# 4 2024-11-21T17:49:41.568310Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 read iterator# {[14:557:2484], 1} finished in ReadContinue 2024-11-21T17:49:41.568339Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [14:54:2101], Recipient [14:1019:2817]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 14 Status: STATUS_NOT_FOUND |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Validation |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] Test command err: 2024-11-21T17:49:16.506345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:16.506795Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:16.506827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c9e/r3tmp/tmpKVtM7J/pdisk_1.dat 2024-11-21T17:49:16.618761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:16.637750Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:16.681485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:16.681515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:16.692110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:16.798528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:16.813059Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:49:16.813274Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:49:16.813355Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:49:16.813408Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:16.821702Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:49:16.821880Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:16.821906Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:49:16.822058Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:49:16.822067Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:49:16.822075Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:49:16.822122Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:49:16.826088Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:49:16.826174Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:49:16.826202Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:49:16.826209Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:49:16.826214Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:49:16.826219Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:16.826348Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:16.826356Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:16.826501Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:49:16.826522Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:49:16.826537Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:16.826542Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:16.826549Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:49:16.826557Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:16.826563Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:16.826569Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:49:16.826574Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:16.826578Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:16.826583Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:49:16.826589Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:16.826611Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:49:16.826617Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:49:16.826639Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:16.826695Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:49:16.826705Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:49:16.826724Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:49:16.826732Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:49:16.826736Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:49:16.826742Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:49:16.826746Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:16.826802Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:49:16.826806Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:49:16.826810Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:16.826813Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:16.826824Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:49:16.826827Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:16.826831Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:49:16.826834Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:16.826840Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:49:16.827076Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:49:16.827085Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:49:16.837364Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:16.837398Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:16.837405Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:16.837432Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:49:16.837452Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:49:17.012553Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:17.012580Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:17.012590Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:49:17.012624Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:49:17.012630Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:49:17.012657Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:17.012668Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:49:17.012673Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:49:17.012678Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:49:17.013497Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:49:17.013517Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:17.013666Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:17.013672Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:17.013680Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:17.013688Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:17.013693Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:17.013702Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... :2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.433534Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.433582Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709543002, quota bytes left# 18446744073709138191, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.433603Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.433608Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.433612Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.433660Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542903, quota bytes left# 18446744073709133439, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.433684Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.433688Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.433692Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.433741Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542804, quota bytes left# 18446744073709128687, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.433755Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.433759Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.433763Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.433810Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542705, quota bytes left# 18446744073709123935, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.433834Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.433838Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.433842Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.433890Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542606, quota bytes left# 18446744073709119183, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.433901Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.433907Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.433911Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.433959Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542507, quota bytes left# 18446744073709114431, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.433985Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.433989Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.433993Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.434070Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542408, quota bytes left# 18446744073709109679, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.434082Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.434087Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.434091Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.434138Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542309, quota bytes left# 18446744073709104927, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.434162Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.434166Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.434171Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.434216Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542210, quota bytes left# 18446744073709100175, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.434230Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.434234Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.434238Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.434292Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542111, quota bytes left# 18446744073709095423, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.434321Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.434325Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.434332Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.434397Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542012, quota bytes left# 18446744073709090671, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.434414Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.434419Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.434424Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.434475Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541913, quota bytes left# 18446744073709085919, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.434504Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.434509Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.434513Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.434563Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541814, quota bytes left# 18446744073709081167, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.434582Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.434587Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.434591Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.434636Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541715, quota bytes left# 18446744073709076415, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.434656Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.434661Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.434681Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.434734Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541616, quota bytes left# 18446744073709071663, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.434748Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T17:49:42.434753Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T17:49:42.434757Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T17:49:42.434766Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 1, bytes# 48, quota rows left# 18446744073709541615, quota bytes left# 18446744073709071615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:42.434775Z node 14 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[14:557:2484], 1} finished in ReadContinue |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> TInterconnectTest::TestNotifyUndelivered >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry >> KqpPg::NoTableQuery >> KqpPg::InsertFromSelect_Simple >> KqpPg::CreateTableBulkUpsertAndRead >> KqpPg::CopyTableSerialColumns >> KqpPg::TypeCoercionBulkUpsert >> TestProtocols::TestConnectProtocol >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] >> KqpPg::EmptyQuery >> KqpPg::ValuesInsert >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor >> KqpPg::JoinWithQueryService+StreamLookup >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableInsert >> TestProtocols::TestConnectProtocol [GOOD] >> TestProtocols::TestHTTPCollected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] Test command err: 2024-11-21T17:49:14.369203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:14.369856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:14.369898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001cac/r3tmp/tmpZmoPcI/pdisk_1.dat 2024-11-21T17:49:14.473582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.489717Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:14.531834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:14.531866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:14.542353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:14.647203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.661418Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:49:14.661638Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:49:14.661718Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:49:14.661772Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:14.669929Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:49:14.670097Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:14.670120Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:49:14.670284Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:49:14.670294Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:49:14.670300Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:49:14.670344Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:49:14.674110Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:49:14.674179Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:49:14.674201Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:49:14.674207Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:49:14.674212Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:49:14.674218Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.674340Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.674348Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.674492Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:49:14.674514Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:49:14.674528Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.674533Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.674539Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:49:14.674546Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.674553Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:14.674560Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.674566Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:14.674570Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:14.674575Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:49:14.674580Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:14.674602Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:49:14.674607Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:49:14.674630Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:14.674690Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:49:14.674701Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:49:14.674719Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:49:14.674728Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:49:14.674732Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:49:14.674738Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:49:14.674742Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.674791Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:49:14.674795Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:49:14.674799Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:14.674802Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.674813Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:49:14.674817Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:14.674821Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:49:14.674824Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.674829Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:49:14.675064Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:49:14.675073Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:49:14.685310Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:14.685337Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.685343Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.685361Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:49:14.685379Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:49:14.861163Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.861185Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.861193Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:49:14.861214Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:49:14.861220Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:49:14.861247Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.861255Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:49:14.861259Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:49:14.861264Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:49:14.862011Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:49:14.862036Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.862167Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.862177Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.862184Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.862192Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:14.862197Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.862206Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:43.516280Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:842:2676], Recipient [14:842:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:43.516283Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:43.516288Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:49:43.516291Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:43.516294Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T17:49:43.516297Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2024-11-21T17:49:43.516300Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2024-11-21T17:49:43.516304Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T17:49:43.516307Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2024-11-21T17:49:43.516310Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T17:49:43.516312Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2024-11-21T17:49:43.516328Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2024-11-21T17:49:43.516331Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T17:49:43.516334Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T17:49:43.516337Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:43.516340Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T17:49:43.516344Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2024-11-21T17:49:43.516347Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2024-11-21T17:49:43.516350Z node 14 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2024-11-21T17:49:43.516354Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T17:49:43.516357Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:43.516359Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T17:49:43.516362Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T17:49:43.516376Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T17:49:43.516379Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T17:49:43.516383Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T17:49:43.516386Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T17:49:43.516389Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T17:49:43.516392Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T17:49:43.516395Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T17:49:43.516398Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:49:43.516419Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2024-11-21T17:49:43.516422Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T17:49:43.516424Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T17:49:43.516427Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2024-11-21T17:49:43.516431Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T17:49:43.516434Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T17:49:43.516437Z node 14 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2024-11-21T17:49:43.516440Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:43.516442Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T17:49:43.516445Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T17:49:43.516447Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T17:49:43.526894Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 281474976715666} 2024-11-21T17:49:43.526921Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2024-11-21T17:49:43.526939Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:43.526947Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2024-11-21T17:49:43.526970Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1035:2832], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:49:43.526983Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:43.527015Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715666} 2024-11-21T17:49:43.527019Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2024-11-21T17:49:43.527025Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:49:43.527030Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:49:43.527036Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [14:1035:2832], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:49:43.527043Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:49:43.527427Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:557:2484], Recipient [14:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T17:49:43.527450Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:49:43.527462Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T17:49:43.527483Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:49:43.527488Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:49:43.527493Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:43.527497Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:49:43.527506Z node 14 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T17:49:43.527511Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:49:43.527514Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:43.527518Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:49:43.527521Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:49:43.527537Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2024-11-21T17:49:43.527621Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T17:49:43.527628Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2024-11-21T17:49:43.527635Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:557:2484], 10} after executionsCount# 1 2024-11-21T17:49:43.527643Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 10} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:43.527674Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 10} finished in read 2024-11-21T17:49:43.527684Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:49:43.527688Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:49:43.527691Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:43.527695Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:43.527707Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:49:43.527709Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:43.527715Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T17:49:43.527720Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:49:43.527736Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns >> KqpPg::NoTableQuery [GOOD] >> KqpPg::PgCreateTable >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> KqpPg::InsertNoTargetColumns_Simple >> KqpPg::CopyTableSerialColumns [GOOD] >> KqpPg::CreateIndex |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] >> KqpPg::EmptyQuery [GOOD] >> KqpPg::DuplicatedColumns |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] >> TSchemeShardTest::ManyDirs [GOOD] >> TSchemeShardTest::NestedDirs >> KqpPg::InsertNoTargetColumns_Simple [GOOD] >> KqpPg::InsertNoTargetColumns_Serial >> TSchemeShardTest::NestedDirs [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] Test command err: 2024-11-21T17:49:14.968877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:14.969396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:14.969429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ca6/r3tmp/tmps8pXTs/pdisk_1.dat 2024-11-21T17:49:15.074705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:15.096570Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:15.139877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:15.139907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:15.152134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:15.256206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:15.270836Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:49:15.271079Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:49:15.271172Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:49:15.271230Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:15.279001Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:49:15.279169Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:15.279190Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:49:15.279344Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:49:15.279352Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:49:15.279358Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:49:15.279399Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:49:15.283536Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:49:15.283608Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:49:15.283628Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:49:15.283633Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:49:15.283638Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:49:15.283644Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:15.283765Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:15.283772Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:15.283885Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:49:15.283907Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:49:15.283919Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:15.283924Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:15.283930Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:49:15.283936Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:15.283942Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:15.283949Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:49:15.283954Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:15.283957Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:15.283962Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:49:15.283966Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:15.284001Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:49:15.284005Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:49:15.284025Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:15.284075Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:49:15.284085Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:49:15.284100Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:49:15.284108Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:49:15.284111Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:49:15.284116Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:49:15.284119Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:15.284161Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:49:15.284164Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:49:15.284168Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:15.284171Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:15.284180Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:49:15.284184Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:15.284187Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:49:15.284190Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:15.284195Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:49:15.284419Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:49:15.284430Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:49:15.294712Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:15.294742Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:15.294749Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:15.294770Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:49:15.294789Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:49:15.469019Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:15.469036Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:15.469104Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:49:15.469123Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:49:15.469127Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:49:15.469148Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:15.469154Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:49:15.469158Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:49:15.469162Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:49:15.469865Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:49:15.469881Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:15.469992Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:15.469998Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:15.470002Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:15.470008Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:15.470012Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:15.470017Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:44.337312Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:842:2676], Recipient [14:842:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:44.337315Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:44.337319Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:49:44.337323Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:44.337326Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T17:49:44.337330Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2024-11-21T17:49:44.337333Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2024-11-21T17:49:44.337337Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T17:49:44.337341Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2024-11-21T17:49:44.337344Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T17:49:44.337347Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2024-11-21T17:49:44.337361Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2024-11-21T17:49:44.337365Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T17:49:44.337368Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T17:49:44.337371Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:44.337374Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T17:49:44.337378Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2024-11-21T17:49:44.337381Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2024-11-21T17:49:44.337385Z node 14 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2024-11-21T17:49:44.337389Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T17:49:44.337392Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:44.337395Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T17:49:44.337398Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T17:49:44.337410Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T17:49:44.337414Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T17:49:44.337418Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T17:49:44.337421Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T17:49:44.337425Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T17:49:44.337428Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T17:49:44.337431Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T17:49:44.337435Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:49:44.337457Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2024-11-21T17:49:44.337460Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T17:49:44.337463Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T17:49:44.337466Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2024-11-21T17:49:44.337470Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T17:49:44.337473Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T17:49:44.337477Z node 14 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2024-11-21T17:49:44.337480Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:44.337483Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T17:49:44.337485Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T17:49:44.337488Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T17:49:44.347819Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 281474976715666} 2024-11-21T17:49:44.347843Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2024-11-21T17:49:44.347857Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:44.347864Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2024-11-21T17:49:44.347894Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1035:2832], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:49:44.347903Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:44.347978Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715666} 2024-11-21T17:49:44.347981Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2024-11-21T17:49:44.347986Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:49:44.347988Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:49:44.347992Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [14:1035:2832], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:49:44.347998Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:49:44.348291Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:557:2484], Recipient [14:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T17:49:44.348311Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:49:44.348321Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T17:49:44.348337Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:49:44.348340Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:49:44.348343Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:49:44.348346Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:49:44.348352Z node 14 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T17:49:44.348356Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:49:44.348357Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:49:44.348360Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:49:44.348362Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:49:44.348372Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2024-11-21T17:49:44.348437Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T17:49:44.348442Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2024-11-21T17:49:44.348446Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:557:2484], 3} after executionsCount# 1 2024-11-21T17:49:44.348451Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:49:44.348476Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 3} finished in read 2024-11-21T17:49:44.348484Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:49:44.348485Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:49:44.348488Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:49:44.348490Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:49:44.348499Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:49:44.348501Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:49:44.348505Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T17:49:44.348508Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:49:44.348520Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> KqpPg::CreateIndex [GOOD] >> KqpPg::CreateNotNullPgColumn >> KqpPg::DuplicatedColumns [GOOD] >> KqpPg::DropTablePg |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2024-11-21T17:49:18.069314Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791134521948069:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:18.069403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0017a0/r3tmp/tmpuIRkOa/pdisk_1.dat 2024-11-21T17:49:18.131552Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30968, node 1 2024-11-21T17:49:18.139654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:18.139679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:18.139681Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:18.139726Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:18.168974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:18.169017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:18.170540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:18.199197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:18.206524Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T17:49:18.442468Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T17:49:18.442561Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791134521948508:2298], Start check tables existence, number paths: 2 2024-11-21T17:49:18.442578Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T17:49:18.442582Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T17:49:18.442586Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T17:49:18.445046Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjIyZTIwNGItYmFlZDY5ODQtMjc3ZDIzZC04NTk5MDE3MQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjIyZTIwNGItYmFlZDY5ODQtMjc3ZDIzZC04NTk5MDE3MQ== 2024-11-21T17:49:18.445207Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791134521948508:2298], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T17:49:18.445216Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791134521948508:2298], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T17:49:18.445220Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791134521948508:2298], Successfully finished 2024-11-21T17:49:18.445291Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjIyZTIwNGItYmFlZDY5ODQtMjc3ZDIzZC04NTk5MDE3MQ==, ActorId: [1:7439791134521948525:2299], ActorState: unknown state, session actor bootstrapped 2024-11-21T17:49:18.447364Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T17:49:18.448525Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791134521948527:2284], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T17:49:18.449327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:18.449889Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791134521948527:2284], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T17:49:18.450404Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791134521948527:2284], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T17:49:18.451528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791134521948527:2284], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:18.543122Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791134521948527:2284], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T17:49:18.544121Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791134521948527:2284], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T17:49:18.544760Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2YxZGM5M2YtOGFjZDIxNjgtYTdhYTVlOTktNjVhZWM0ZTE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id M2YxZGM5M2YtOGFjZDIxNjgtYTdhYTVlOTktNjVhZWM0ZTE= 2024-11-21T17:49:18.544831Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2YxZGM5M2YtOGFjZDIxNjgtYTdhYTVlOTktNjVhZWM0ZTE=, ActorId: [1:7439791134521948586:2300], ActorState: unknown state, session actor bootstrapped 2024-11-21T17:49:18.544883Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2YxZGM5M2YtOGFjZDIxNjgtYTdhYTVlOTktNjVhZWM0ZTE=, ActorId: [1:7439791134521948586:2300], ActorState: ReadyState, TraceId: 01jd7xfetg1r1qexae8knktsba, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439791134521948585:2322] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T17:49:18.544892Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T17:49:18.544895Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T17:49:18.544908Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439791134521948586:2300], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=M2YxZGM5M2YtOGFjZDIxNjgtYTdhYTVlOTktNjVhZWM0ZTE= 2024-11-21T17:49:18.544917Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791134521948588:2301], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T17:49:18.544936Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439791134521948589:2302], Database: /Root, Start database fetching 2024-11-21T17:49:18.545922Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439791134521948589:2302], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T17:49:18.545943Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2024-11-21T17:49:18.545956Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439791134521948598:2303], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=M2YxZGM5M2YtOGFjZDIxNjgtYTdhYTVlOTktNjVhZWM0ZTE=, Start pool fetching 2024-11-21T17:49:18.545962Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791134521948599:2304], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T17:49:18.546103Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791134521948588:2301], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T17:49:18.546118Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791134521948599:2304], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T17:49:18.546123Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2024-11-21T17:49:18.546125Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T17:49:18.546187Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439791134521948598:2303], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=M2YxZGM5M2YtOGFjZDIxNjgtYTdhYTVlOTktNjVhZWM0ZTE=, Pool info successfully resolved 2024-11-21T17:49:18.546201Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439791134521948602:2305], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T17:49:18.546210Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2YxZGM5M2YtOGFjZDIxNjgtYTdhYTVlOTktNjVhZWM0ZTE= 2024-11-21T17:49:18.546223Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439791134521948602:2305], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7439791134521948586:2300], session id: ydb://session/3?node_id=1&id=M2YxZGM5M2YtOGFjZDIxNjgtYTdhYTVlOTktNjVhZWM0ZTE= 2024-11-21T17:49:18.546239Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=M2YxZGM5M2YtOGFjZDIxNjgtYTdhYTVlOTktNjVhZWM0ZTE= 2024-11-21T17:49:18.546247Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got create teables request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T17:49:18.546252Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service tables creation 2024-11-21T17:49:18.546298Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439791134521948602:2305], DatabaseId: /Root, PoolId: sample_pool_id, Update ... n, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:44.313076Z node 6 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T17:49:44.533928Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T17:49:44.534000Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7439791245980759526:2298], Start check tables existence, number paths: 2 2024-11-21T17:49:44.534125Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T17:49:44.534136Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T17:49:44.534142Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T17:49:44.534337Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7439791245980759526:2298], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T17:49:44.534352Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7439791245980759526:2298], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T17:49:44.534357Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7439791245980759526:2298], Successfully finished 2024-11-21T17:49:44.534367Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T17:49:44.534368Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MmRiYTI4Y2MtMjNmMzlmNDUtNTdmMDgwNjMtMjU5NjkyNjU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MmRiYTI4Y2MtMjNmMzlmNDUtNTdmMDgwNjMtMjU5NjkyNjU= 2024-11-21T17:49:44.534380Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MmRiYTI4Y2MtMjNmMzlmNDUtNTdmMDgwNjMtMjU5NjkyNjU=, ActorId: [6:7439791245980759542:2299], ActorState: unknown state, session actor bootstrapped 2024-11-21T17:49:44.535158Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791245980759544:2283], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T17:49:44.535907Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:44.536255Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791245980759544:2283], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T17:49:44.536294Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791245980759544:2283], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T17:49:44.537993Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791245980759544:2283], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:44.632578Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791245980759544:2283], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T17:49:44.633311Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791245980759544:2283], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T17:49:44.633953Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY= 2024-11-21T17:49:44.634000Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY=, ActorId: [6:7439791245980759602:2300], ActorState: unknown state, session actor bootstrapped 2024-11-21T17:49:44.634036Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T17:49:44.634047Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T17:49:44.634061Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY=, ActorId: [6:7439791245980759602:2300], ActorState: ReadyState, TraceId: 01jd7xg89t2z1dfap4x5jkm1rs, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [6:7439791245980759601:2320] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T17:49:44.634062Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791245980759604:2301], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T17:49:44.634069Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [6:7439791245980759602:2300], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY= 2024-11-21T17:49:44.634079Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439791245980759605:2302], Database: /Root, Start database fetching 2024-11-21T17:49:44.634143Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439791245980759605:2302], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T17:49:44.634164Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2024-11-21T17:49:44.634173Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7439791245980759614:2303], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY=, Start pool fetching 2024-11-21T17:49:44.634178Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791245980759615:2304], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T17:49:44.634308Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791245980759615:2304], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T17:49:44.634325Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7439791245980759614:2303], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY=, Pool info successfully resolved 2024-11-21T17:49:44.634330Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY= 2024-11-21T17:49:44.634334Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T17:49:44.634337Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791245980759604:2301], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T17:49:44.634367Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2024-11-21T17:49:44.634377Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY= 2024-11-21T17:49:44.634380Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439791245980759618:2305], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T17:49:44.634397Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY=, ActorId: [6:7439791245980759602:2300], ActorState: ExecuteState, TraceId: 01jd7xg89t2z1dfap4x5jkm1rs, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id 2024-11-21T17:49:44.634442Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY=, ActorId: [6:7439791245980759602:2300], ActorState: ExecuteState, TraceId: 01jd7xg89t2z1dfap4x5jkm1rs, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2024-11-21T17:49:44.634454Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [6:7439791245980759602:2300], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY= 2024-11-21T17:49:44.634461Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY=, ActorId: [6:7439791245980759602:2300], ActorState: CleanupState, TraceId: 01jd7xg89t2z1dfap4x5jkm1rs, EndCleanup, isFinal: 1 2024-11-21T17:49:44.634486Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY=, ActorId: [6:7439791245980759602:2300], ActorState: CleanupState, TraceId: 01jd7xg89t2z1dfap4x5jkm1rs, Sent query response back to proxy, proxyRequestId: 3, proxyId: [6:7439791245980759093:2190] 2024-11-21T17:49:44.634495Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY=, ActorId: [6:7439791245980759602:2300], ActorState: unknown state, TraceId: 01jd7xg89t2z1dfap4x5jkm1rs, Cleanup temp tables: 0 2024-11-21T17:49:44.634515Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YzcyZTIxMDEtYzNhOWEyN2ItZWUwNmU2MDMtYjJkYTFkYWY=, ActorId: [6:7439791245980759602:2300], ActorState: unknown state, TraceId: 01jd7xg89t2z1dfap4x5jkm1rs, Session actor destroyed 2024-11-21T17:49:44.634539Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439791245980759618:2305], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T17:49:44.635464Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=MmRiYTI4Y2MtMjNmMzlmNDUtNTdmMDgwNjMtMjU5NjkyNjU=, ActorId: [6:7439791245980759542:2299], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T17:49:44.635477Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=MmRiYTI4Y2MtMjNmMzlmNDUtNTdmMDgwNjMtMjU5NjkyNjU=, ActorId: [6:7439791245980759542:2299], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:49:44.635479Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MmRiYTI4Y2MtMjNmMzlmNDUtNTdmMDgwNjMtMjU5NjkyNjU=, ActorId: [6:7439791245980759542:2299], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T17:49:44.635482Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MmRiYTI4Y2MtMjNmMzlmNDUtNTdmMDgwNjMtMjU5NjkyNjU=, ActorId: [6:7439791245980759542:2299], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T17:49:44.635496Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MmRiYTI4Y2MtMjNmMzlmNDUtNTdmMDgwNjMtMjU5NjkyNjU=, ActorId: [6:7439791245980759542:2299], ActorState: unknown state, Session actor destroyed >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> TExternalDataSourceTestReboots::SimpleDropExternalDataSourceWithReboots >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> KqpPg::CreateNotNullPgColumn [GOOD] >> KqpPg::CreateSequence >> KqpPg::InsertNoTargetColumns_Serial [GOOD] >> KqpPg::InsertValuesFromTableWithDefault |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> KqpPg::DropTablePg [GOOD] >> KqpPg::DropTablePgMultiple >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:49:24.908922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:24.908943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:24.908949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:24.908954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:24.908967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:24.908971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:24.908979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:24.909045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:24.919679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:24.919696Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:24.922299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:24.923130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:24.923157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:24.924294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:24.924438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:24.924521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:24.924593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:24.925398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:24.925639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:24.925648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:24.925676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:24.925683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:24.925689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:24.925701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.926793Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:49:24.940770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:24.940850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.940906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:24.940950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:24.940958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.941584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:24.941605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:24.941642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.941652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:24.941656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:24.941660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:24.942019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.942029Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:24.942033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:24.942386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.942396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.942401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:24.942419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:24.942964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:24.943396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:24.943447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:24.943636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:24.943664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:24.943682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:24.943746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:24.943753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:24.943781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:24.943792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:24.944156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:24.944163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:24.944202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:24.944207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:24.944306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:24.944313Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:24.944324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:24.944335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:24.944341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:24.944346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:24.944350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:24.944354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:24.944364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:24.944370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:24.944374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:24.944648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:24.944661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:24.944666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:24.944670Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:24.944674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:24.944689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... athType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:45.361842Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:45.361871Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir/Table/UserDefinedIndex" took 31us result status StatusSuccess 2024-11-21T17:49:45.362009Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "indexed" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:45.362069Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:45.362095Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir/Table/UserDefinedIndex/indexImplTable" took 28us result status StatusSuccess 2024-11-21T17:49:45.362192Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpPg::InsertValuesFromTableWithDefault [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast >> KqpPg::CreateSequence [GOOD] >> KqpPg::AlterSequence |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropIndex >> KqpPg::JoinWithQueryService-StreamLookup |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> KqpPg::AlterSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence >> KqpPg::DropIndex [GOOD] >> KqpPg::DropSequence >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::Insert_Serial >> KqpPg::InsertValuesFromTableWithDefaultAndCast [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::CreateDroppedExternalDataSourceWithReboots |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> KqpPg::InsertFromSelect_Simple [GOOD] >> KqpPg::InsertFromSelect_NoReorder |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> KqpPg::Insert_Serial [GOOD] >> TExternalDataSourceTestReboots::CreateExternalDataSourceWithReboots >> KqpPg::InsertValuesFromTableWithDefaultBool [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase >> KqpPg::InsertValuesFromTableWithDefaultText >> KqpPg::DropSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::SimpleDropExternalDataSourceWithReboots2 >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[TabletReboots] [GOOD] >> KqpPg::DropTableIfExists >> DataShardReadIterator::TryWriteManyRows+Commit [GOOD] >> DataShardReadIterator::TryWriteManyRows-Commit >> KqpPg::InsertFromSelect_NoReorder [GOOD] |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> KqpPg::InsertFromSelect_Serial >> KqpPg::DropTableIfExists [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull >> KqpPg::DropTableIfExists_GenericQuery >> KqpPg::InsertValuesFromTableWithDefaultText [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] >> KqpPg::CheckPgAutoParams |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] >> KqpPg::InsertFromSelect_Serial [GOOD] >> KqpPg::EquiJoin >> KqpPg::InsertNoTargetColumns_SerialNotNull [GOOD] >> KqpPg::ValuesInsert [GOOD] >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::TableInsert [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull [GOOD] >> KqpPg::CreateTableSerialColumns >> PgCatalog::PgType >> KqpPg::TableArrayInsert >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull |80.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalDataSourceTestReboots::DropExternalDataSourceWithReboots >> KqpPg::CheckPgAutoParams [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull [GOOD] >> KqpPg::CreateTableSerialColumns [GOOD] >> KqpPg::LongDomainName >> PgCatalog::PgType [GOOD] >> KqpPg::TypeCoercionInsert [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> KqpPg::CreateUniqPgColumn >> PgCatalog::InformationSchema >> KqpPg::TableSelect |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf >> KqpPg::LongDomainName [GOOD] >> KqpPg::CreateUniqPgColumn [GOOD] >> KqpPg::CreateUniqComplexPgColumn >> PgCatalog::InformationSchema [GOOD] |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] >> YdbTableSplit::SplitByLoadWithReads [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::DropTableIfExists_GenericQuery [GOOD] Test command err: Trying to start YDB, gRPC: 25548, MsgBus: 32195 2024-11-21T17:49:43.944346Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791243491196005:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:43.944369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aa5/r3tmp/tmpjlDEDz/pdisk_1.dat 2024-11-21T17:49:43.990048Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25548, node 1 2024-11-21T17:49:44.005340Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:44.005349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:44.005350Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:44.005385Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32195 TClient is connected to server localhost:32195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:49:44.044652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:44.044681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:44.045794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:44.053930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:44.274562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791247786163895:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.274599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791247786163906:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.274606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.275227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:44.276685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791247786163909:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } Trying to start YDB, gRPC: 4936, MsgBus: 11709 2024-11-21T17:49:44.611433Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791246181020050:2242];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aa5/r3tmp/tmpARD0oz/pdisk_1.dat 2024-11-21T17:49:44.617804Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:49:44.619547Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4936, node 2 2024-11-21T17:49:44.634208Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:44.634222Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:44.634224Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:44.634268Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11709 TClient is connected to server localhost:11709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:44.711260Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:44.711301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:44.711558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:44.712429Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:44.928768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791246181020458:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.928837Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791246181020447:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.928845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.929287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:44.930571Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791246181020461:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } Trying to start YDB, gRPC: 10523, MsgBus: 1146 2024-11-21T17:49:45.332578Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439791248883420022:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:45.332664Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aa5/r3tmp/tmpjVOCld/pdisk_1.dat 2024-11-21T17:49:45.342077Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10523, node 3 2024-11-21T17:49:45.359652Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:45.359665Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:45.359667Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:45.359698Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1146 TClient is connected to server localhost:1146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:45.433173Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:45.433201Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:45.434364Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:45.435436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:45.448298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:45.463262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Oper ... xists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439791262088704733:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:48.060268Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aa5/r3tmp/tmpNMaj9w/pdisk_1.dat 2024-11-21T17:49:48.074206Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17861, node 7 2024-11-21T17:49:48.085096Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:48.085109Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:48.085111Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:48.085151Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15742 TClient is connected to server localhost:15742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:48.160898Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:48.160932Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:48.161768Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:48.163683Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:48.365418Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791262088705332:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.365448Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.369009Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791262088705360:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.369030Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.370909Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:49:48.378064Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791262088705448:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.378080Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.378095Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791262088705453:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.378583Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:49:48.382953Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439791262088705455:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:49:48.497545Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2024-11-21T17:49:48.498785Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7439791262088705600:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At function: KiReadTable!
:2:31: Error: Cannot find table 'db.[/Root/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:49:48.498860Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MTAzMGUxYmItZDgyZTcxZDYtMjIyNmM1MS1iZTU1YmNmMQ==, ActorId: [7:7439791262088705593:2342], ActorState: ExecuteState, TraceId: 01jd7xgc2g8hqvz712czq3fama, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 26540, MsgBus: 5715 2024-11-21T17:49:48.733055Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439791262532475884:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:48.733101Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aa5/r3tmp/tmpSrjXQM/pdisk_1.dat 2024-11-21T17:49:48.743894Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26540, node 8 2024-11-21T17:49:48.750929Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:48.750942Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:48.750944Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:48.750981Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5715 TClient is connected to server localhost:5715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:48.833403Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:48.833435Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:48.834486Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:48.836283Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:48.997801Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439791262532476483:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.997814Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439791262532476492:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.997819Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.998329Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:48.999648Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439791262532476497:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:49.085322Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:49:49.109333Z node 8 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 8, TabletId: 72075186224037888 not found 2024-11-21T17:49:49.109809Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7439791266827444042:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At function: KiReadTable!
:2:31: Error: Cannot find table 'db.[/Root/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:49:49.109898Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=ODE4N2EzOTUtZDk0NzQ2MWUtMmZjNTZhZWMtMWM4Y2NiODM=, ActorId: [8:7439791266827444039:2336], ActorState: ExecuteState, TraceId: 01jd7xgcnk4sdeqyzptczhqbr4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> KqpPg::InsertNoTargetColumns_ColumnOrder [GOOD] >> TCdcStreamTests::MeteringDedicated [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> KqpPg::PgCreateTable [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns [GOOD] >> KqpPg::CreateUniqComplexPgColumn [GOOD] >> KqpPg::CreateTempTable >> TExternalDataSourceTestReboots::SimpleDropExternalDataSourceWithReboots [GOOD] >> Secret::DeactivatedQueryService [GOOD] >> PgCatalog::CheckSetConfig >> KqpPg::InsertNoTargetColumns_NotOneSize >> TCdcStreamTests::ChangeOwner >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows >> KqpPg::PgUpdate >> RetryPolicy::RetryWithBatching >> KqpPg::CreateTempTable [GOOD] >> KqpPg::TableArrayInsert [GOOD] >> KqpPg::TableSelect [GOOD] >> KqpPg::Returning >> PgCatalog::CheckSetConfig [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize [GOOD] >> TCdcStreamTests::ChangeOwner [GOOD] >> KqpPg::PgUpdate [GOOD] >> KqpPg::CreateTempTableSerial >> KqpPg::V1CreateTable >> PgCatalog::PgDatabase >> KqpPg::InsertNoTargetColumns_Alter >> TCdcStreamTests::DropIndexWithStream >> KqpPg::PgUpdateCompoundKey >> PgCatalog::PgDatabase [GOOD] >> PgCatalog::PgRoles >> KqpPg::CreateTempTableSerial [GOOD] >> KqpPg::InsertNoTargetColumns_Alter [GOOD] >> TCdcStreamTests::DropIndexWithStream [GOOD] >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables >> KqpPg::DeleteWithQueryService >> Secret::Deactivated [GOOD] >> KqpPg::Returning [GOOD] >> KqpPg::DeleteWithQueryService [GOOD] >> KqpPg::PgUpdateCompoundKey [GOOD] >> KqpPg::ExplainColumnsReorder >> TCdcStreamTests::DropTableWithIndexWithStream >> KqpQueryPerf::Upsert-QueryService >> TExternalDataSourceTestReboots::CreateDroppedExternalDataSourceWithReboots [GOOD] >> KqpQueryPerf::ComputeLength-QueryService >> PgCatalog::PgTables [GOOD] >> KqpPg::SelectIndex >> KqpPg::PgAggregate >> KqpPg::CreateTableIfNotExists_GenericQuery >> KqpPg::ExplainColumnsReorder [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] >> KqpQueryPerf::Upsert-QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService [GOOD] >> KqpPg::SelectIndex [GOOD] >> KqpPg::PgAggregate [GOOD] >> KqpPg::MkqlTerminate >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::TableDeleteAllData |80.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: 2024-11-21T17:49:40.711645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:294:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c77/r3tmp/tmpParcid/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22124, node 1 TClient is connected to server localhost:10471 2024-11-21T17:49:40.836264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:40.852983Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:40.853618Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:40.853634Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:40.853637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:40.853716Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:49:40.895422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:40.895462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:40.906036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2024-11-21T17:49:53.440105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:648:2541], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.440126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:662:2550], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.440182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.441077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T17:49:53.443296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:666:2553], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T17:49:53.468219Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:727:2594], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2024-11-21T17:49:53.468543Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjMxOGE2NWYtNmJmZDdkZDUtNzYxM2Y5YjAtNzNjNjUwNTA=, ActorId: [1:645:2538], ActorState: ExecuteState, TraceId: 01jd7xggwy85kkj1tf6hj915x0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |80.4%| [TA] $(B)/ydb/core/blobstorage/pdisk/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest |80.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 3960, MsgBus: 5638 2024-11-21T17:49:44.035711Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791246882771978:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:44.036046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a98/r3tmp/tmpUucWCy/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3960, node 1 2024-11-21T17:49:44.102086Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:44.106136Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:44.106150Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:44.106151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:44.106177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5638 2024-11-21T17:49:44.135538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:44.135570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:44.136685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:44.167314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 16 2024-11-21T17:49:44.365692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgbool; DECLARE $key1 as pgbool; DECLARE $value0 as pgbool; DECLARE $value1 as pgbool; INSERT INTO `Pg16_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T17:49:44.426291Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:49:44.426751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791246882772667:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.426757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791246882772675:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.426768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.427278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:49:44.428797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791246882772682:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } f f t t 2024-11-21T17:49:44.561976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgbool; DECLARE $value1 as _pgbool; INSERT INTO `Pg1000_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T17:49:44.569182Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {t,t} 18 2024-11-21T17:49:44.616469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgchar; DECLARE $key1 as pgchar; DECLARE $value0 as pgchar; DECLARE $value1 as pgchar; INSERT INTO `Pg18_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T17:49:44.626375Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2024-11-21T17:49:44.669372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgchar; DECLARE $value1 as _pgchar; INSERT INTO `Pg1002_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T17:49:44.679699Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {1,1} 21 2024-11-21T17:49:44.730242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as pgint2; DECLARE $value1 as pgint2; INSERT INTO `Pg21_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T17:49:44.738233Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2024-11-21T17:49:44.793297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgint2; DECLARE $value1 as _pgint2; INSERT INTO `Pg1005_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); {0,0} {1,1} 23 2024-11-21T17:49:44.851584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint4; DECLARE $key1 as pgint4; DECLARE $value0 as pgint4; DECLARE $value1 as pgint4; INSERT INTO `Pg23_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T17:49:44.860176Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2024-11-21T17:49:44.908582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgint4; DECLARE $value1 as _pgint4; INSERT INTO `Pg1007_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T17:49:44.916150Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {1,1} 20 2024-11-21T17:49:44.965562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint8; DECLARE $key1 as pgint8; DECLARE $value0 as pgint8; DECLARE $value1 as pgint8; INSERT INTO `Pg20_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T17:49:44.972990Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2024-11-21T17:49:45.020915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgint8; DECLARE $value1 as _pgint8; INSERT INTO `Pg1016_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T17:49:45.029753Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {1,1} 700 2024-11-21T17:49:45.079200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgfloat4; DECLARE $key1 as pgfloat4; DECLARE $value0 as pgfloat4; DECLARE $value1 as pgfloat4; INSERT INTO `Pg700_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T17:49:45.091617Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2024-11-21T17:49:45.147019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715725:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgint2; DECLARE $key1 as pgint2; DECLARE $value0 as _pgfloat4; DECLARE $value1 as _pgfloat4; INSERT INTO `Pg1021_t` (key, value) VALUES ($key0, $value0), ($key1, $value1); 2024-11-21T17:49:45.156532Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0.5,0.5} {1.5,1.5} 701 2024-11-21T17:49:45.210375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715731:0, at schemeshard: 72057594046644480 --!syntax_v1 DECLARE $key0 as pgfloat8; ... oadService] [TPoolFetcherActor] ActorId: [8:7439791278816741120:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.837592Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.838224Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:52.839840Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439791278816741123:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } Trying to start YDB, gRPC: 24185, MsgBus: 3238 2024-11-21T17:49:53.059221Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7439791285231024999:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:53.059357Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a98/r3tmp/tmpiPo9xj/pdisk_1.dat 2024-11-21T17:49:53.066126Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24185, node 9 2024-11-21T17:49:53.082224Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:53.082235Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:53.082237Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:53.082269Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3238 TClient is connected to server localhost:3238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:53.159576Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:53.159602Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:53.160678Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:53.160859Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:53.293731Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439791285231025585:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.293750Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439791285231025599:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.293756Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.294298Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:53.295625Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7439791285231025609:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } Trying to start YDB, gRPC: 61977, MsgBus: 30891 2024-11-21T17:49:53.524250Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7439791286055492184:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:53.524265Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a98/r3tmp/tmpcCzMzf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61977, node 10 2024-11-21T17:49:53.539066Z node 10 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:53.546093Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:53.546105Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:53.546107Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:53.546139Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30891 TClient is connected to server localhost:30891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:53.624389Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:53.624415Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:53.625507Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:53.626671Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:53.773826Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7439791286055492763:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.773842Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7439791286055492782:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.773846Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.774441Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:53.775853Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439791286055492792:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:53.832322Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:53.838431Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:49:54.034190Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2024-11-21T17:49:54.035517Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:49:54.109463Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7439791290350460641:2401], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jd7xghha0q4pbqt30cwp23sy. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=10&id=NTFkNmRiYWQtMTkxNzZlMzEtYTU1ODQxYTgtYmY3MWNiOGI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2024-11-21T17:49:54.109581Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7439791290350460642:2402], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jd7xghha0q4pbqt30cwp23sy. SessionId : ydb://session/3?node_id=10&id=NTFkNmRiYWQtMTkxNzZlMzEtYTU1ODQxYTgtYmY3MWNiOGI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [10:7439791290350460638:2398], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T17:49:54.109831Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NTFkNmRiYWQtMTkxNzZlMzEtYTU1ODQxYTgtYmY3MWNiOGI=, ActorId: [10:7439791290350460632:2398], ActorState: ExecuteState, TraceId: 01jd7xghha0q4pbqt30cwp23sy, Create QueryResponse for error on request, msg: >> TExternalDataSourceTestReboots::SimpleDropExternalDataSourceWithReboots2 [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> KqpPg::NoSelectFullScan >> KqpPg::NoSelectFullScan [GOOD] >> DataShardReadIterator::TryWriteManyRows-Commit [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: 2024-11-21T17:49:41.601976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:294:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c6c/r3tmp/tmpZRqk4A/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12405, node 1 TClient is connected to server localhost:2811 2024-11-21T17:49:41.724345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:41.742039Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:41.742844Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:41.742859Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:41.742863Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:41.742942Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:49:41.784643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:41.784671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:41.795264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2024-11-21T17:49:54.363471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:659:2550], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:54.363507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::DropStreamOnIndexTable[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:18.918725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:18.918745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:18.918749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:18.918753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:18.918766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:18.918770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:18.918778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:18.918847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:18.932334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:18.932352Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:18.934266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:18.934352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:18.934369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:18.938163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:18.938261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:18.938374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:18.938887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:18.940281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:18.940498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:18.940507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:18.940516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:18.940534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:18.940540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:18.940571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:18.942377Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:18.955610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:18.955683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.955728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:18.955763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:18.955768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.956490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:18.956517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:18.956576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.956586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:18.956590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:18.956596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:18.957104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.957117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:18.957121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:18.957467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.957476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.957480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:18.957484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:18.958026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:18.958414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:18.958462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:18.958661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:18.958688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:18.958694Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:18.958746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:18.958752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:18.958778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:18.958788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:18.959165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:18.959174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:18.959213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:18.959218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:18.959283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:18.959290Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:18.959299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:18.959303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:18.959308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:18.959312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:18.959316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:18.959320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:18.959331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:18.959336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:18.959340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... tablet: 72075186233409546, partId: 0 2024-11-21T17:49:48.024677Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 272 } } 2024-11-21T17:49:48.024689Z node 81 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1004 Step: 5000005 OrderId: 1004 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 272 } } 2024-11-21T17:49:48.024763Z node 81 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:48.024771Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:48.024774Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:49:48.024778Z node 81 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:49:48.024781Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:49:48.024791Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T17:49:48.024842Z node 81 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:48.024848Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:48.024850Z node 81 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:49:48.024852Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 3/4, is published: true 2024-11-21T17:49:48.024871Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 347892353301 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:49:48.024874Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409546, partId: 0 2024-11-21T17:49:48.024882Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 347892353301 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:49:48.024885Z node 81 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:49:48.024889Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1004:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 340 RawX2: 347892353301 } Origin: 72075186233409546 State: 2 TxId: 1004 Step: 0 Generation: 2 2024-11-21T17:49:48.024897Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1004:0, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:48.024899Z node 81 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.024902Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1004:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:49:48.024905Z node 81 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 240 2024-11-21T17:49:48.025562Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:49:48.025580Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:49:48.026170Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:49:48.026191Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:49:48.026200Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:49:48.026209Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:49:48.026225Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.026241Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:49:48.026247Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:49:48.026261Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.026325Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.026331Z node 81 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T17:49:48.026342Z node 81 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 4/4 2024-11-21T17:49:48.026346Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T17:49:48.026351Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 4/4, is published: true 2024-11-21T17:49:48.026356Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 4/4 2024-11-21T17:49:48.026361Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:49:48.026365Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:49:48.026387Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T17:49:48.026392Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:1 2024-11-21T17:49:48.026395Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:1 2024-11-21T17:49:48.026400Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:49:48.026402Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:2 2024-11-21T17:49:48.026405Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:2 2024-11-21T17:49:48.026409Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T17:49:48.026412Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:3 2024-11-21T17:49:48.026415Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:3 2024-11-21T17:49:48.026422Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T17:49:48.026488Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:48.026492Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T17:49:48.026501Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T17:49:48.026505Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T17:49:48.026508Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:49:48.026984Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:49:48.027063Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:49:48.027071Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:49:48.027125Z node 81 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:49:48.027141Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:49:48.027144Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [81:815:2708] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:49:48.027203Z node 81 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:48.027239Z node 81 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 47us result status StatusPathDoesNotExist 2024-11-21T17:49:48.027277Z node 81 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table/Index/indexImplTable" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeSyncIndexImplTable } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::SimpleDropExternalDataSourceWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:45.704469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:45.704489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:45.704492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:45.704495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:45.704506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:45.704508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:45.704514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:45.704571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:45.713361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:45.713379Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:45.715976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:45.716101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:45.716133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:45.718675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:45.718787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:45.718883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:45.719086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:45.719937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:45.720276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:45.720291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:45.720306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:45.720314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:45.720319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:45.720361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:45.721588Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:45.738151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:45.738240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:45.738310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:45.738378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:45.738388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:45.739260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:45.739290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:45.739343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:45.739355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:45.739360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:45.739365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:45.739830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:45.739843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:45.739848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:45.740207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:45.740217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:45.740224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:45.740249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:45.740825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:45.741258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:45.741319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:45.741516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:45.741540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:45.741546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:45.741597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:45.741604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:45.741636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:45.741647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:45.742112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:45.742129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:45.742171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:45.742176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:45.742249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:45.742255Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:45.742273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:45.742276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:45.742280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:45.742284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:45.742287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:45.742290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:45.742299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:45.742303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:45.742306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 4, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:53.645637Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:53.645640Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T17:49:53.645642Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:53.645698Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:53.645704Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:53.645705Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:53.645708Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:49:53.645710Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:53.645714Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:49:53.646223Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2024-11-21T17:49:53.646249Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T17:49:53.646426Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:53.646449Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 141733922920 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:53.646457Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 1003:0 HandleReply TEvOperationPlan: step# 5000004 2024-11-21T17:49:53.646479Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:53.646494Z node 33 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T17:49:53.646518Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:53.646526Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:53.646787Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:53.646860Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:49:53.647297Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:53.647307Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:53.647331Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:49:53.647353Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:53.647357Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T17:49:53.647362Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:49:53.647433Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:49:53.647443Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:49:53.647456Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:49:53.647460Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:49:53.647465Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:49:53.647470Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:49:53.647474Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:49:53.647478Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:49:53.647493Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:53.647500Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2024-11-21T17:49:53.647506Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T17:49:53.647515Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:49:53.647631Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:53.647644Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:53.647649Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:53.647653Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:49:53.647657Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:53.647710Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:53.647715Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:49:53.647724Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:53.647759Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:53.647768Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:53.647772Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:53.647775Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T17:49:53.647779Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:53.647787Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T17:49:53.648596Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:53.648622Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:49:53.648631Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:49:53.648675Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:49:53.648682Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:49:53.648755Z node 33 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:49:53.648774Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:49:53.648783Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [33:350:2342] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:49:53.648851Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:53.648880Z node 33 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 39us result status StatusPathDoesNotExist 2024-11-21T17:49:53.648919Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::SimpleDropExternalDataSourceWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:48.335453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:48.335469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:48.335472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:48.335476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:48.335485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:48.335490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:48.335498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:48.335563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:48.342947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:48.342959Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:48.344442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:48.344531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:48.344554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:48.346258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:48.346315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:48.346388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:48.346511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:48.347023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:48.347278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:48.347290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:48.347304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:48.347311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:48.347317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:48.347354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:48.348333Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:48.361451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:48.361517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.361570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:48.361623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:48.361630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.362252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:48.362282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:48.362319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.362328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:48.362332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:48.362337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:48.362780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.362793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:48.362797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:48.363140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.363149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.363153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:48.363157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:48.363724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:48.364147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:48.364196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:48.364389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:48.364414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:48.364421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:48.364472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:48.364479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:48.364502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:48.364513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:48.364863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:48.364869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:48.364891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:48.364894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:48.364938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.364942Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:48.364951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:48.364953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:48.364957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:48.364960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:48.364964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:48.364966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:48.364973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:48.364977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:48.364979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 4, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:56.130237Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:56.130241Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T17:49:56.130245Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:56.130342Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:56.130352Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:56.130356Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:56.130360Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:49:56.130363Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:49:56.130373Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:49:56.130934Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2024-11-21T17:49:56.130961Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T17:49:56.131131Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:56.131154Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 141733922920 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:56.131161Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 1003:0 HandleReply TEvOperationPlan: step# 5000004 2024-11-21T17:49:56.131179Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:56.131193Z node 33 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T17:49:56.131215Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:56.131223Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:56.131452Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:56.131535Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:49:56.131912Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:56.131920Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:56.131943Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:49:56.131964Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:56.131968Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T17:49:56.131972Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:49:56.132020Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:49:56.132028Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:49:56.132038Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:49:56.132042Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:49:56.132047Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:49:56.132052Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:49:56.132056Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:49:56.132059Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:49:56.132069Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:56.132073Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2024-11-21T17:49:56.132077Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T17:49:56.132083Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:49:56.132150Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:56.132160Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:56.132164Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:56.132168Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:49:56.132172Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:56.132223Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:56.132251Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:49:56.132259Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:56.132299Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:56.132307Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:56.132310Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:56.132314Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T17:49:56.132317Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:56.132326Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T17:49:56.133087Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:56.133114Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:49:56.133121Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:49:56.133162Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:49:56.133169Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:49:56.133234Z node 33 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:49:56.133252Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:49:56.133261Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [33:350:2342] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:49:56.133326Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:56.133351Z node 33 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 34us result status StatusPathDoesNotExist 2024-11-21T17:49:56.133385Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 3818, MsgBus: 25292 2024-11-21T17:49:55.365031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791294742604174:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:55.365165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c3e/r3tmp/tmpyWwCJw/pdisk_1.dat 2024-11-21T17:49:55.409402Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3818, node 1 2024-11-21T17:49:55.420990Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:55.421001Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:55.421002Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:55.421045Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25292 TClient is connected to server localhost:25292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:49:55.465862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:55.465884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:55.466980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:55.468217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:55.476549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:55.536743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:55.551112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:55.559469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:55.597061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791294742605717:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.597089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.620576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:55.626042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:55.635702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:55.643050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:55.697522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:55.705729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:55.714551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791294742606211:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.714573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.714577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791294742606216:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.715117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:55.718906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791294742606218:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 25716, MsgBus: 26452 2024-11-21T17:49:55.282531Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791293339305111:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:55.282652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c40/r3tmp/tmpCPUStv/pdisk_1.dat 2024-11-21T17:49:55.327729Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25716, node 1 2024-11-21T17:49:55.332880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:55.332903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:55.332904Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:55.332928Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26452 TClient is connected to server localhost:26452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:55.373931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:55.383337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:55.383380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:55.384469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:55.385488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:55.398899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:55.413823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:55.426740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:55.540307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791293339306655:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.540341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.566006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:55.621186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:55.675354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:55.684492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:55.691940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:55.698931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:55.707176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791293339307174:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.707189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791293339307179:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.707193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.707639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:55.712253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791293339307181:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::NoSelectFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 8672, MsgBus: 13887 2024-11-21T17:49:43.731640Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791243647278230:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:43.731694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b68/r3tmp/tmpqNtUOJ/pdisk_1.dat 2024-11-21T17:49:43.793122Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8672, node 1 2024-11-21T17:49:43.810112Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:43.810142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:43.810143Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:43.810176Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13887 2024-11-21T17:49:43.831904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:43.831945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:43.833089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:43.875249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:44.062025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791247942246126:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.062024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791247942246137:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.062047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.062802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:44.064397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791247942246140:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } Trying to start YDB, gRPC: 17619, MsgBus: 12353 2024-11-21T17:49:44.399125Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791245386187750:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:44.399142Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b68/r3tmp/tmpU2nWzD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17619, node 2 2024-11-21T17:49:44.417419Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:44.427040Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:44.427053Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:44.427055Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:44.427096Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12353 TClient is connected to server localhost:12353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:44.499550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:44.499582Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:44.500598Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:44.501834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 16 --!syntax_pg CREATE TABLE Pg16 ( key bool PRIMARY KEY, value bool ); 2024-11-21T17:49:44.674942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791245386188350:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.674994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.676365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg16 (key, value) VALUES ( 'false'::bool, 'false'::bool ) 2024-11-21T17:49:44.735849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791245386188452:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.735876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791245386188457:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.735879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.736507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:49:44.743496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791245386188459:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } --!syntax_pg INSERT INTO Pg16 (key, value) VALUES ( 'true'::bool, 'true'::bool ) f f t t --!syntax_pg CREATE TABLE Pg16array ( key int2 PRIMARY KEY, value _bool ); 2024-11-21T17:49:44.880458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg16array (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); --!syntax_pg INSERT INTO Pg16array (key, value) VALUES ( '1'::int2, ARRAY ['true'::bool, 'true'::bool] ); 18 21 --!syntax_pg CREATE TABLE Pg21 ( key int2 PRIMARY KEY, value int2 ); 2024-11-21T17:49:45.008512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg21 (key, value) VALUES ( '0'::int2, '0'::int2 ) --!syntax_pg INSERT INTO Pg21 (key, value) VALUES ( '1'::int2, '1'::int2 ) --!syntax_pg INSERT INTO Pg21 (key, value) VALUES ( '2'::int2, '2'::int2 ) 0 0 1 1 2 2 --!syntax_pg CREATE TABLE Pg21array ( key int2 PRIMARY KEY, value _int2 ); 2024-11-21T17:49:45.117214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg21array (key, value) VALUES ( '0'::int2, ARRAY ['0'::int2, '0'::int2] ); --!syntax_pg INSERT INTO Pg21array (key, value) VALUES ( '1'::int2, ARRAY ['1'::int2, '1'::int2] ); --!syntax_pg INSERT INTO Pg21array (key, value) VALUES ( '2'::int ... _client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b68/r3tmp/tmpol89Pr/pdisk_1.dat 2024-11-21T17:49:55.759572Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63284, node 6 2024-11-21T17:49:55.773901Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:55.773914Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:55.773915Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:55.773954Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24818 TClient is connected to server localhost:24818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:55.852127Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:55.852153Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:55.853165Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:55.853859Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:55.962757Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791294348023852:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.962784Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.962828Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791294348023878:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.963322Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:55.964566Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791294348023881:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:56.024185Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:56.046962Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ODIxOTNiMTItZDgxMjAyZDktYWRhMTJlZjktOGM2ZjMxYzQ=, ActorId: [6:7439791298642991337:2314], ActorState: ExecuteState, TraceId: 01jd7xgkdxef86kn3j0xghhg54, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:913: ydb/core/kqp/query_data/kqp_query_data.cpp:517: Terminate was called, reason(56): ERROR: invalid byte sequence for encoding "UTF8": 0x00 Trying to start YDB, gRPC: 3252, MsgBus: 28606 2024-11-21T17:49:56.211735Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439791299818192088:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:56.211749Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b68/r3tmp/tmpcTnxov/pdisk_1.dat 2024-11-21T17:49:56.220012Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3252, node 7 2024-11-21T17:49:56.236267Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:56.236288Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:56.236293Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:56.236341Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28606 TClient is connected to server localhost:28606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:56.311875Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:56.311898Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:56.312938Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:56.313510Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:56.447695Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791299818192688:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:56.447750Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791299818192669:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:56.447759Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:56.448197Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:56.449705Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439791299818192698:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:56.541374Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["aid (4, 3)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["aid (4, 3)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::CreateDroppedExternalDataSourceWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:47.880569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:47.880594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:47.880599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:47.880604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:47.880615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:47.880619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:47.880628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:47.880709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:47.891674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:47.891700Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:47.894294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:47.894408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:47.894442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:47.900012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:47.900098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:47.900199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:47.903373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:47.904352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:47.904611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:47.904619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:47.904630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:47.904636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:47.904641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:47.904678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:47.907604Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:47.930165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:47.930260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:47.930320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:47.930372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:47.930380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:47.931290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:47.931320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:47.931369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:47.931380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:47.931384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:47.931389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:47.932077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:47.932103Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:47.932111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:47.932613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:47.932624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:47.932629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:47.932636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:47.933285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:47.933782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:47.933841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:47.934042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:47.934066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:47.934074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:47.934136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:47.934145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:47.934178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:47.934192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:47.934666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:47.934676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:47.934736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:47.934740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:47.934824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:47.934831Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:47.934843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:47.934847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:47.934852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:47.934857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:47.934861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:47.934865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:47.934876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:47.934881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:47.934884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ator: [32:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T17:49:55.496560Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [32:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T17:49:55.496600Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:49:55.496604Z node 32 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 1005:0 ProgressState 2024-11-21T17:49:55.496610Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1005 ready parts: 1/1 2024-11-21T17:49:55.496627Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1005 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:55.496698Z node 32 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:55.496705Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:55.496709Z node 32 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:49:55.496712Z node 32 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2024-11-21T17:49:55.496716Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:55.496815Z node 32 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:55.496825Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:55.496828Z node 32 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:49:55.496831Z node 32 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T17:49:55.496834Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:49:55.496842Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T17:49:55.497262Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1005 msg type: 269090816 2024-11-21T17:49:55.497281Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1005 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1005 at step: 5000006 2024-11-21T17:49:55.497349Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:55.497363Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 137438955626 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:55.497368Z node 32 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 1005:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T17:49:55.497383Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:49:55.497393Z node 32 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 128 -> 240 2024-11-21T17:49:55.497411Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:55.497417Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:49:55.497521Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:49:55.497732Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T17:49:55.497967Z node 32 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:55.497972Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:55.497990Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:49:55.498007Z node 32 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:55.498010Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [32:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2024-11-21T17:49:55.498017Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [32:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T17:49:55.498057Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:49:55.498062Z node 32 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:0 ProgressState 2024-11-21T17:49:55.498073Z node 32 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T17:49:55.498076Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:49:55.498081Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2024-11-21T17:49:55.498085Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:49:55.498089Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T17:49:55.498092Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T17:49:55.498102Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:49:55.498106Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2024-11-21T17:49:55.498109Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2024-11-21T17:49:55.498112Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:49:55.498151Z node 32 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:55.498159Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:55.498162Z node 32 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:49:55.498166Z node 32 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:49:55.498170Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:49:55.498204Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:55.498208Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:49:55.498214Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:55.498229Z node 32 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:55.498235Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:49:55.498238Z node 32 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:49:55.498243Z node 32 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T17:49:55.498246Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:55.498252Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2024-11-21T17:49:55.498856Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:49:55.498874Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:49:55.498883Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T17:49:55.498921Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T17:49:55.498926Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T17:49:55.498979Z node 32 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T17:49:55.498994Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:49:55.498998Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [32:405:2397] TestWaitNotification: OK eventTxId 1005 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2024-11-21T17:49:09.532094Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791095935167946:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:09.532321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00071a/r3tmp/tmp3ZgN3n/pdisk_1.dat 2024-11-21T17:49:09.581782Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28859, node 1 2024-11-21T17:49:09.600525Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:09.600553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:09.600556Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:09.600609Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:09.632341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:09.632368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:09.632498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.633625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:09.633636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.634635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:49:09.634709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:49:09.634713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:49:09.634852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:49:09.635295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:49:09.635302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:49:09.635491Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:49:09.636002Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.641325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211349687, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:09.641349Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:49:09.641418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:49:09.642096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.642149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.642160Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:49:09.642175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:49:09.642188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:49:09.642201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:49:09.642768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:49:09.642801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:49:09.642809Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:49:09.642821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:8220 2024-11-21T17:49:09.849878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791095935168862:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:09.849909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:09.876643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.876751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:49:09.876877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:09.876881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.877668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T17:49:09.877714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.877756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.877766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:49:09.878153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:49:09.878159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:49:09.878162Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:49:09.878195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:49:09.878197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:49:09.878198Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:49:09.878267Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:49:09.880251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:49:09.880289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T17:49:09.880812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:49:09.933288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:49:09.933304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:49:09.933336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T17:49:09.933901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:49:09.935190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211349981, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:09.935208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211349981 2024-11-21T17:49:09.935243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T17:49:09.937576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.937687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.937699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:49:09.938213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:49:09.938221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:49:09.938225Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 7205759 ... t, use /Root 2024-11-21T17:49:49.872899Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783677. Ctx: { TraceId: 01jd7xgddf3xdyd4wtnsqc50se, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FjMGEzNTEtY2E1MDZjM2YtZGM1NDNlYTEtYTM0YmIxODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.873129Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783678. Ctx: { TraceId: 01jd7xgddf8z6f40n4hnvk6snb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2EwY2UxODgtMTllMTE5YjktZmExZDg2MzQtNDQ1MTIwMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.873175Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783679. Ctx: { TraceId: 01jd7xgddga9h3aqt0yns7twzg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGRjYmMxOTgtNWJiZGUzZGItZjBlMzBkMWYtOWY2NjBiYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.873807Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783680. Ctx: { TraceId: 01jd7xgddg3tz7vvhqbekswce6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzEzZWJiODEtNjljZDczYjAtNzAyMGExOTEtYzdmNzE0MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.873905Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783681. Ctx: { TraceId: 01jd7xgddg7sz06cfjb8arbkb0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdlOGRlYTktNmFiNmU3OWMtOWNmMDJiYmMtM2NlODg2Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.874008Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783683. Ctx: { TraceId: 01jd7xgddgf64w5t23343yea35, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmY2YjQ3ZDUtYTkzNTM0ZDItNjg1NTBiYTYtNzNjZDhjNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.874101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783685. Ctx: { TraceId: 01jd7xgddh4wbawqftezfdsdvw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWYyNzMwNzQtYjE5NmNiOGItNWRjNGM0ODgtODdmNDQxYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.874159Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783684. Ctx: { TraceId: 01jd7xgddh21kssbkmckjwjyas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGIwNTIxNTYtZWRlNTIxNTAtYTM1NzhjZS05NTI1OTE5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.874194Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783682. Ctx: { TraceId: 01jd7xgddg2dy2ssc1z0cp1r7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWJmYjgyYzEtYzk0YWVlZDYtMjRkNTUwNjYtYjU4OTg1ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.875714Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783686. Ctx: { TraceId: 01jd7xgddhdbzdr7rgrw4am6ch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE5MjgyZmItZmRiZWZjMzAtMjYwMjQ0MWYtZGNiNjkxYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.875995Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783687. Ctx: { TraceId: 01jd7xgddj419mdaswdh68ggjd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2EwY2UxODgtMTllMTE5YjktZmExZDg2MzQtNDQ1MTIwMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.876192Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783689. Ctx: { TraceId: 01jd7xgddk11a04cenvnf0cenw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGRjYmMxOTgtNWJiZGUzZGItZjBlMzBkMWYtOWY2NjBiYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.876274Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783688. Ctx: { TraceId: 01jd7xgddj1zp62xx3zjyq4gb9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FjMGEzNTEtY2E1MDZjM2YtZGM1NDNlYTEtYTM0YmIxODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.876431Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783690. Ctx: { TraceId: 01jd7xgddk58jzwt0v2zwra860, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzEzZWJiODEtNjljZDczYjAtNzAyMGExOTEtYzdmNzE0MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.877073Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783691. Ctx: { TraceId: 01jd7xgddkb6asj8q3ff4y2zfa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGIwNTIxNTYtZWRlNTIxNTAtYTM1NzhjZS05NTI1OTE5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.877726Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783692. Ctx: { TraceId: 01jd7xgddm2mjrjc96zf5hz83p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdlOGRlYTktNmFiNmU3OWMtOWNmMDJiYmMtM2NlODg2Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2024-11-21T17:49:49.878873Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783695. Ctx: { TraceId: 01jd7xgddn6f1j0yr2j48tfrgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmY2YjQ3ZDUtYTkzNTM0ZDItNjg1NTBiYTYtNzNjZDhjNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.878932Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783693. Ctx: { TraceId: 01jd7xgddn9h11kv1wevrapxeh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWJmYjgyYzEtYzk0YWVlZDYtMjRkNTUwNjYtYjU4OTg1ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.879021Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783694. Ctx: { TraceId: 01jd7xgddn6mv10vyfr7p8ypv1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWYyNzMwNzQtYjE5NmNiOGItNWRjNGM0ODgtODdmNDQxYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732211349981 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T17:49:49.879528Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783697. Ctx: { TraceId: 01jd7xgddp2hxm488vdd324qtd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzEzZWJiODEtNjljZDczYjAtNzAyMGExOTEtYzdmNzE0MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.879579Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783696. Ctx: { TraceId: 01jd7xgddp7mew5myp9a82xsrs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGRjYmMxOTgtNWJiZGUzZGItZjBlMzBkMWYtOWY2NjBiYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.879876Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783698. Ctx: { TraceId: 01jd7xgddp8k4wbazm35pk5xby, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGIwNTIxNTYtZWRlNTIxNTAtYTM1NzhjZS05NTI1OTE5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.880159Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783700. Ctx: { TraceId: 01jd7xgddp6hhxyms81vp0jyx4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FjMGEzNTEtY2E1MDZjM2YtZGM1NDNlYTEtYTM0YmIxODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.880210Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783699. Ctx: { TraceId: 01jd7xgddp749rb3rg9s16x1f6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2EwY2UxODgtMTllMTE5YjktZmExZDg2MzQtNDQ1MTIwMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.880459Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783701. Ctx: { TraceId: 01jd7xgddp1sjbwh2317pem1kf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdlOGRlYTktNmFiNmU3OWMtOWNmMDJiYmMtM2NlODg2Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:49.880617Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976783702. Ctx: { TraceId: 01jd7xgddpaqxxnrb9wvyts47a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE5MjgyZmItZmRiZWZjMzAtMjYwMjQ0MWYtZGNiNjkxYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732211349981 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T17:49:50.002805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 65059 rowCount 1086 cpuUsage 0 2024-11-21T17:49:50.002832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 54879 rowCount 914 cpuUsage 0 2024-11-21T17:49:50.103005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2024-11-21T17:49:50.103080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 1086, DataSize 65059 2024-11-21T17:49:50.103129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 914, DataSize 54879 2024-11-21T17:49:50.103484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:48:21.677735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:48:21.677758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:21.677763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:48:21.677769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:48:21.677784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:48:21.677788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:48:21.677796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:48:21.677872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:48:21.685212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:48:21.685227Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:48:21.687308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:48:21.687812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:48:21.687835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:48:21.689178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:48:21.689412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:48:21.689494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.689554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:48:21.690577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.690788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:21.690796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.690824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:48:21.690829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.690833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:48:21.690843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.691844Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:48:21.707893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:48:21.707966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.708022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:48:21.708098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:48:21.708106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.708857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.708886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:48:21.708933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.708942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:48:21.708946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:48:21.708950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:48:21.709303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.709311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:48:21.709315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:48:21.709579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.709586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.709591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.709596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.710113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:48:21.710422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:48:21.710466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:48:21.710620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:48:21.710642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:48:21.710648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.710693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:48:21.710699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:48:21.710730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:21.710741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:48:21.711098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:48:21.711104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:48:21.711139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:48:21.711144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:48:21.711214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:48:21.711220Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:48:21.711230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:48:21.711234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.711240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:48:21.711245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:48:21.711249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:48:21.711253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:48:21.711263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:48:21.711268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:48:21.711272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:48:21.711547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:21.711560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:48:21.711565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:48:21.711569Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:48:21.711573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:48:21.711583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... X_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:49:53.664481Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:53.664800Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:49:53.664813Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:49:53.664818Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:49:53.664906Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:49:53.664915Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:49:53.664918Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:49:53.665043Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2024-11-21T17:49:53.665051Z node 18 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:53.665113Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2024-11-21T17:49:53.665139Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2024-11-21T17:49:53.665143Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2024-11-21T17:49:53.665147Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: false 2024-11-21T17:49:53.665374Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:49:53.665385Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:49:53.665389Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:49:53.665393Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:49:53.665397Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:49:53.665410Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2024-11-21T17:49:53.665569Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:49:53.665577Z node 18 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:53.665620Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:49:53.665639Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2024-11-21T17:49:53.665642Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2024-11-21T17:49:53.665647Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2024-11-21T17:49:53.665657Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:379:2344] message: TxId: 103 2024-11-21T17:49:53.665662Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2024-11-21T17:49:53.665668Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:49:53.665672Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:49:53.665691Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:49:53.665695Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2024-11-21T17:49:53.665698Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2024-11-21T17:49:53.665703Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:53.665706Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2024-11-21T17:49:53.665709Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2024-11-21T17:49:53.665714Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:49:53.665718Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2024-11-21T17:49:53.665721Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2024-11-21T17:49:53.665724Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:49:53.665728Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:4 2024-11-21T17:49:53.665730Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:4 2024-11-21T17:49:53.665739Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T17:49:53.665834Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:53.665840Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T17:49:53.665851Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:49:53.665856Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:49:53.665864Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:49:53.665898Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:49:53.666236Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:49:53.666255Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:49:53.666260Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:49:53.666264Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:49:53.666766Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:49:53.666803Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:49:53.666809Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [18:763:2656] 2024-11-21T17:49:53.666849Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2024-11-21T17:49:53.666933Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:53.666967Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 43us result status StatusPathDoesNotExist 2024-11-21T17:49:53.667001Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:49:53.667046Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:49:53.667061Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 15us result status StatusPathDoesNotExist 2024-11-21T17:49:53.667074Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2024-11-21T17:49:09.941950Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791095104289628:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:09.942017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00060d/r3tmp/tmp5FpSpa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17470, node 1 2024-11-21T17:49:10.003688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:49:10.003700Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:10.008649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:10.008661Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:10.008662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:10.008691Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:10.042139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:10.042165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:10.043684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:10.071440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:10.072600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:10.072618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:10.073158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:49:10.073217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:49:10.073225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:49:10.073729Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:10.073809Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:49:10.073825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:49:10.074250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:10.075071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211350121, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:10.075083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:49:10.075145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:49:10.075483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:10.075529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:10.075544Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:49:10.075555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:49:10.075567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:49:10.075579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:49:10.076031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:49:10.076064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:49:10.076075Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:49:10.076104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:22637 2024-11-21T17:49:10.249255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791099399257644:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:10.249325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:10.252924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:10.253044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:49:10.253227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:10.253233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:10.253978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T17:49:10.254030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:10.254078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:10.254094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:49:10.254167Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:49:10.254300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:49:10.254311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:49:10.254315Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:49:10.254360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:49:10.254362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:49:10.254368Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:49:10.256378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:49:10.256404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:49:10.259485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:49:10.273588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:49:10.273600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:49:10.273638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:49:10.274091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:49:10.274936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211350324, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:10.274948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211350324 2024-11-21T17:49:10.274972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:49:10.275289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:10.275362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:10.275382Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:49:10.275604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:49:10.275617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:49 ... default}. Database not set, use /Root 2024-11-21T17:49:50.325267Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817180. Ctx: { TraceId: 01jd7xgdvmb67hc3y1sjfnnm9z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJiMGYzMWUtMjkwZGU4MTQtNzRhZGYyYWEtNDUyM2I2NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.325356Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817181. Ctx: { TraceId: 01jd7xgdvm4gvmbsxgxqr3rqc9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGMzZDhlZmMtZTczZTIzODMtMzFiN2M2MDctYzZmYWQ3MTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.325634Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817182. Ctx: { TraceId: 01jd7xgdvm8m74j0ggrdqfgxzf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVlOWE5MzItZjlkZDJiNS01M2IyNjY1ZC1lYjUxZGM5ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.326025Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817183. Ctx: { TraceId: 01jd7xgdvmc4k3dtgkbty7pkap, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmODk0ODMtYjY0YjhjNTQtODU4ZDVkZDYtM2U5NDkxZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.326156Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817184. Ctx: { TraceId: 01jd7xgdvm5xbqff1hm91gw2j0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRkODMyM2UtYmMwODNiOGUtNjQ1NzU2MjktNDgxZDhhNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.326296Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817186. Ctx: { TraceId: 01jd7xgdvn5r3vjesq4m3mewaz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc5MDY3NDktYjZiNjlkMTktOTM1Y2ExNjgtNmZhMjUyYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.326375Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817185. Ctx: { TraceId: 01jd7xgdvnb5z7dhn91wg8btjw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjMyNmE0OWEtMzA3MWYxZmQtNjJlMDc2NzUtZTJhYzEyZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.326784Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817187. Ctx: { TraceId: 01jd7xgdvn9py1c9c9jg120s8s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGI5NzJiZGMtMzg2MjA5YjAtMTk0NzAwM2EtMjMwNzIzM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.326821Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817188. Ctx: { TraceId: 01jd7xgdvpb46p6qz5rdrm4h80, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTA3ZDExMDQtODVkYTc2NjUtMWE3NDFkMjUtMWZlZTU4YWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.327381Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817189. Ctx: { TraceId: 01jd7xgdvn2qfbwvkwbn2m44x1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdkYmE4Ny0xZWNkMzU4Yy1lM2RlMTUyNi00MGU3ZjQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.327466Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817190. Ctx: { TraceId: 01jd7xgdvp09aknwvyrv1s80m3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGMzZDhlZmMtZTczZTIzODMtMzFiN2M2MDctYzZmYWQ3MTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.327984Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817191. Ctx: { TraceId: 01jd7xgdvqdmnknwdt8bg5kxm2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmODk0ODMtYjY0YjhjNTQtODU4ZDVkZDYtM2U5NDkxZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.328086Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817192. Ctx: { TraceId: 01jd7xgdvpc2aaft0efafmxn98, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVlOWE5MzItZjlkZDJiNS01M2IyNjY1ZC1lYjUxZGM5ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.328087Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817193. Ctx: { TraceId: 01jd7xgdvp4tcqw7rtdgxxhwn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJiMGYzMWUtMjkwZGU4MTQtNzRhZGYyYWEtNDUyM2I2NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.328313Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817194. Ctx: { TraceId: 01jd7xgdvq4b6j4gsbt88xdbct, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc5MDY3NDktYjZiNjlkMTktOTM1Y2ExNjgtNmZhMjUyYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.328548Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817195. Ctx: { TraceId: 01jd7xgdvq6wm9mrvvz4w48qaq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRkODMyM2UtYmMwODNiOGUtNjQ1NzU2MjktNDgxZDhhNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2024-11-21T17:49:50.328933Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817196. Ctx: { TraceId: 01jd7xgdvrd4jbqpt41nqjx4wy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdkYmE4Ny0xZWNkMzU4Yy1lM2RlMTUyNi00MGU3ZjQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.329340Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817198. Ctx: { TraceId: 01jd7xgdvr222ppr3ewe12nrn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGI5NzJiZGMtMzg2MjA5YjAtMTk0NzAwM2EtMjMwNzIzM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.329444Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817199. Ctx: { TraceId: 01jd7xgdvrf76qs9f3s2mjyj2w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTA3ZDExMDQtODVkYTc2NjUtMWE3NDFkMjUtMWZlZTU4YWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211350324 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T17:49:50.329838Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817200. Ctx: { TraceId: 01jd7xgdvrc9mdvc7y85vknph0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGMzZDhlZmMtZTczZTIzODMtMzFiN2M2MDctYzZmYWQ3MTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.329845Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817197. Ctx: { TraceId: 01jd7xgdvr07qerh6pxqm2n197, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjMyNmE0OWEtMzA3MWYxZmQtNjJlMDc2NzUtZTJhYzEyZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.330310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817201. Ctx: { TraceId: 01jd7xgdvsd8xes7p2tdfb5y6t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJiMGYzMWUtMjkwZGU4MTQtNzRhZGYyYWEtNDUyM2I2NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.330367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817202. Ctx: { TraceId: 01jd7xgdvrbetg8z9qv1208qhm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmODk0ODMtYjY0YjhjNTQtODU4ZDVkZDYtM2U5NDkxZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.330443Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817203. Ctx: { TraceId: 01jd7xgdvs99vy7e00bm5c4wky, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRkODMyM2UtYmMwODNiOGUtNjQ1NzU2MjktNDgxZDhhNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.331251Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817204. Ctx: { TraceId: 01jd7xgdvs7ksdc4hdjh9nakqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdkYmE4Ny0xZWNkMzU4Yy1lM2RlMTUyNi00MGU3ZjQ2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:50.331323Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976817205. Ctx: { TraceId: 01jd7xgdvs62bkw5gq29sr4y68, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc5MDY3NDktYjZiNjlkMTktOTM1Y2ExNjgtNmZhMjUyYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211350324 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T17:49:50.382544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T17:49:50.382589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T17:49:50.482794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2024-11-21T17:49:50.482871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T17:49:50.483052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T17:49:50.483255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams [GOOD] Test command err: Trying to start YDB, gRPC: 20632, MsgBus: 15950 2024-11-21T17:49:43.817943Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791243553028385:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:43.817958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a91/r3tmp/tmpqvTYaU/pdisk_1.dat 2024-11-21T17:49:43.871501Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20632, node 1 2024-11-21T17:49:43.882381Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:43.882404Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:43.882405Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:43.882432Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15950 TClient is connected to server localhost:15950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:49:43.920498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:43.920527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:43.921652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:43.927749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:43.932484Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:49:44.069760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791247847996276:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.069780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.075414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.135171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791247847996408:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.135196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.135198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791247847996413:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.135872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:49:44.141339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791247847996415:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:49:44.234445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976710665:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22982, MsgBus: 17883 2024-11-21T17:49:44.502907Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791244957365180:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:44.503101Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a91/r3tmp/tmpSnecMe/pdisk_1.dat 2024-11-21T17:49:44.513557Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22982, node 2 2024-11-21T17:49:44.526056Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:44.526067Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:44.526069Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:44.526107Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17883 TClient is connected to server localhost:17883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:44.603061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:44.603094Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:44.606374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:44.606492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:44.608192Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:44.766942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791244957365775:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.766963Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791244957365783:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.766969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.767498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:44.768894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791244957365789:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:44.829437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.841399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:49:44.849539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.869304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710762:2, at schemeshard: 72057594046644480 2024-11-21T17:49:44.877117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.894801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.904536Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2U5ZDg5YjAtOGU1MWEyYTctOWFkYzkzOTUtNTAwMjkyZGI=, ActorId: [2:7439791244957365756:2297], ActorState: ExecuteState, TraceId: 01jd7xg8hve34p6c799hq6amzf, Create QueryResponse for error on request, msg: 2024-11-21T17:49:44.908419Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2U5ZDg5YjAtOGU1MWEyYTctOWFkYzkzOTUtNTAwMjkyZGI=, ActorId: [2:7439791244957365756:2297], ActorState: ExecuteState, TraceId: 01j ... have access permissions } 2024-11-21T17:49:49.367057Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:49.368405Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439791268866474304:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:49.433481Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24579, MsgBus: 11831 2024-11-21T17:49:49.569703Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7439791266261948791:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:49.569717Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a91/r3tmp/tmpFLCzNr/pdisk_1.dat 2024-11-21T17:49:49.582834Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24579, node 9 2024-11-21T17:49:49.595439Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:49.595451Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:49.595454Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:49.595512Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11831 TClient is connected to server localhost:11831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:49.670267Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:49.670302Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:49.671351Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:49.672715Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:49.838510Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439791266261949401:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:49.838527Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439791266261949390:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:49.838573Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:49.839018Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:49.840137Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7439791266261949404:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:49.898344Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:49.974583Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:50.019554Z node 9 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [9:7439791270556917073:2361], owner: [9:7439791266261949366:2292], statement id: 0 2024-11-21T17:49:50.019625Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=MTMxNjcwNGEtZWU0NDBjNDItMjNhN2RiZmYtYjkyMzg1OTc=, ActorId: [9:7439791270556917071:2360], ActorState: ExecuteState, TraceId: 01jd7xgdj209qa85wt0ddgmama, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:49:50.039616Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7439791270556917098:2371], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:55: Error: At function: PgOp
:2:55: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2024-11-21T17:49:50.039697Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=ZTc5MmM1MTgtYTFjOWIyNmItMWU5NTMyMzQtZDhkNmRlNmI=, ActorId: [9:7439791270556917095:2369], ActorState: ExecuteState, TraceId: 01jd7xgdjm4d5a971fzxcsra1r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:49:50.043112Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7439791270556917110:2377], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:57: Error: At function: PgAnd
:2:67: Error: At function: PgOp
:2:67: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2024-11-21T17:49:50.043574Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=NjZiMDkxMDktY2FmZTQ0YjItNjkyNDA3OGYtODEzN2U5N2U=, ActorId: [9:7439791270556917107:2375], ActorState: ExecuteState, TraceId: 01jd7xgdjra6465sgtbykdkj39, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:49:50.045653Z node 9 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7xgdjw1cjyqtyaj30yhzmr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZTg1MDVjMGEtZmMyNjRlNWYtNmExNjdhMTctM2ZkYTJkNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (yexception) yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2024-11-21T17:49:50.045824Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=ZTg1MDVjMGEtZmMyNjRlNWYtNmExNjdhMTctM2ZkYTJkNGU=, ActorId: [9:7439791270556917119:2381], ActorState: ExecuteState, TraceId: 01jd7xgdjw1cjyqtyaj30yhzmr, Create QueryResponse for error on request, msg: 2024-11-21T17:49:50.049939Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:49:50.061270Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:49:50.074869Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7439791270556917283:2406], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2024-11-21T17:49:50.074953Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=YmI2NjA4MTItZmNlMzkwOTctN2QyOWUzYzAtNmEwMGIyODk=, ActorId: [9:7439791270556917280:2404], ActorState: ExecuteState, TraceId: 01jd7xgdkq0gbf45d9swt4eg67, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:49:50.077732Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7439791270556917295:2412], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2024-11-21T17:49:50.077820Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=N2VhM2UwNDMtOWFkYTQzZDYtMTZjNmMwMDgtMjI5MGJiMmY=, ActorId: [9:7439791270556917292:2410], ActorState: ExecuteState, TraceId: 01jd7xgdkv0dpc7tyrz8jmawy1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:49:50.109368Z node 9 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd7xgdky0ysk5svq38d6t55x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=OGVhN2JlNGMtY2FhYTYyZjEtMjJkYTAxOGUtMjE0MmM1NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (yexception) yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2024-11-21T17:49:50.109477Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=OGVhN2JlNGMtY2FhYTYyZjEtMjJkYTAxOGUtMjE0MmM1NjU=, ActorId: [9:7439791270556917304:2416], ActorState: ExecuteState, TraceId: 01jd7xgdky0ysk5svq38d6t55x, Create QueryResponse for error on request, msg: 2024-11-21T17:49:50.113983Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T17:49:50.158700Z node 9 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 9, TabletId: 72075186224037892 not found 2024-11-21T17:49:50.161890Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 27443, MsgBus: 10508 2024-11-21T17:49:44.144631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791246327198870:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:44.144805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a86/r3tmp/tmptIJwlB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27443, node 1 2024-11-21T17:49:44.212419Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:44.215901Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:44.215911Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:44.215913Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:44.215938Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10508 2024-11-21T17:49:44.244686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:44.244709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:44.245814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:44.277115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:44.475887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791246327199467:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.475915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.483116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.545148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791246327199571:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.545174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.548019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.603750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791246327199649:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.603774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.603781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791246327199654:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.604474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:49:44.610465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791246327199656:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } Trying to start YDB, gRPC: 13017, MsgBus: 10674 2024-11-21T17:49:46.777946Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791253141011611:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:46.778157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a86/r3tmp/tmpYgAopZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13017, node 1 2024-11-21T17:49:46.829862Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:46.839760Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:46.839773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:46.839775Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:46.839809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10674 TClient is connected to server localhost:10674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:49:46.878868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:46.878894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:46.879815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:46.887271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:47.061921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791257435979500:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:47.061950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:47.069651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:47.131065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791257435979604:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:47.131088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:47.133067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:49:47.140030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791257435979680:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:47.140048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791257435979685:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:47.140052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:47.140656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T17:49:47.144276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791257435979687:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } Trying to start YDB, gRPC: 24016, MsgBus: 19774 2024-11-21T17:49:47.460364Z node 2 :METADATA_PROVIDER WARN: flin ... ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:49.131002Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791265979226100:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:49.131026Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791265979226110:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:49.131033Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:49.131800Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:49.133704Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439791265979226114:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:49.286417Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12566, MsgBus: 20903 2024-11-21T17:49:49.522075Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439791265972679574:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:49.522266Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a86/r3tmp/tmp70wb2a/pdisk_1.dat 2024-11-21T17:49:49.531511Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12566, node 5 2024-11-21T17:49:49.547271Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:49.547291Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:49.547293Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:49.547330Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20903 TClient is connected to server localhost:20903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:49.622522Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:49.622555Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:49.623609Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:49.624328Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:49.788281Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791265972680172:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:49.788304Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791265972680151:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:49.788320Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:49.789064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:49.790847Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439791265972680180:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:49.865363Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7439791265972680248:2306], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Default expr b is nullable or optional, but column has not null constraint. 2024-11-21T17:49:49.865447Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NTg5YmY0NjktNjQ3MzE5MS1hODNjMzBkMC02ODE4ZWNiNg==, ActorId: [5:7439791265972680149:2297], ActorState: ExecuteState, TraceId: 01jd7xgd6022kkvrgbtrqs5gc4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Default expr b is nullable or optional, but column has not null constraint. Trying to start YDB, gRPC: 10683, MsgBus: 8907 2024-11-21T17:49:49.995522Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439791266091752153:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:49.995542Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a86/r3tmp/tmpLgFEn2/pdisk_1.dat 2024-11-21T17:49:50.008318Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10683, node 6 2024-11-21T17:49:50.023836Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:50.023854Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:50.023858Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:50.023903Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8907 TClient is connected to server localhost:8907 WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'... TClient::Ls request: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 Processi... (TRUNCATED) WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' success. 2024-11-21T17:49:50.095579Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:50.095603Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:50.096843Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:50.098530Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:50.251788Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791270386720031:2298], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:50.251807Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791270386720056:2301], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:50.251816Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:50.252364Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:50.254178Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791270386720060:2302], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:50.347531Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> TExternalDataSourceTestReboots::DropExternalDataSourceWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] Test command err: Trying to start YDB, gRPC: 28833, MsgBus: 14226 2024-11-21T17:49:43.757320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791243563877767:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:43.757339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b40/r3tmp/tmp5IGu1U/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28833, node 1 2024-11-21T17:49:43.825017Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:43.830608Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:43.830618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:43.830620Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:43.830645Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14226 2024-11-21T17:49:43.857549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:43.857568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:43.858633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:43.893322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:44.042533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.110063Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2024-11-21T17:49:44.116073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.125613Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2024-11-21T17:49:44.138597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.153355Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2024-11-21T17:49:44.167988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.181130Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2024-11-21T17:49:44.194777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.207973Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill f f t t 2024-11-21T17:49:44.215487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.231077Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill f f t t 2024-11-21T17:49:44.244209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.258567Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2024-11-21T17:49:44.274060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.286027Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2024-11-21T17:49:44.300217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.313488Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2024-11-21T17:49:44.322596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.333915Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2024-11-21T17:49:44.341346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.356148Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2024-11-21T17:49:44.370062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.383989Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2024-11-21T17:49:44.398486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.411318Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2024-11-21T17:49:44.422871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.431691Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2024-11-21T17:49:44.439496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.497420Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2024-11-21T17:49:44.510265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.522844Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2024-11-21T17:49:44.531031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.544935Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2024-11-21T17:49:44.551511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715726:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.565492Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2024-11-21T17:49:44.572725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715730:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.595321Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2024-11-21T17:49:44.603009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715734:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.614512Z node 1 :READ_TABLE_API WARN: ForgetAction occurre ... N: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:52.312848Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037889 not found 2024-11-21T17:49:52.316910Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715665:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18379, MsgBus: 10548 2024-11-21T17:49:52.414652Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439791279612611103:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:52.414694Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b40/r3tmp/tmpJV9sNk/pdisk_1.dat 2024-11-21T17:49:52.424759Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18379, node 7 2024-11-21T17:49:52.441519Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:52.441547Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:52.441549Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:52.441601Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10548 TClient is connected to server localhost:10548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:52.515001Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:52.515028Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:52.516038Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:52.517249Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:52.518090Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:52.677124Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791279612611700:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.677154Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.678915Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:52.735310Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791279612611804:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.735338Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791279612611809:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.735339Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.735849Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:49:52.737119Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439791279612611811:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } Trying to start YDB, gRPC: 25542, MsgBus: 4525 2024-11-21T17:49:53.097076Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439791283988261633:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:53.097115Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b40/r3tmp/tmpuzas91/pdisk_1.dat 2024-11-21T17:49:53.107661Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25542, node 8 2024-11-21T17:49:53.118340Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:53.118354Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:53.118356Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:53.118390Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4525 TClient is connected to server localhost:4525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:53.197536Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:53.197588Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:53.198630Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:53.199293Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:53.337932Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439791283988262214:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.337946Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439791283988262240:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.337950Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.338409Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:53.339502Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439791283988262243:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:53.439617Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:53.447450Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7439791283988262410:2318], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Table name conflict: db.[/Root/test] is used to reference multiple tables. 2024-11-21T17:49:53.447524Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=YTBhZjEwYWMtNTBjMDU3NzgtNDU5Y2QxMGYtMTc3ODhiYTg=, ActorId: [8:7439791283988262403:2314], ActorState: ExecuteState, TraceId: 01jd7xggx54wk1gbbrmsrwzqa2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:49:53.466293Z node 8 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 8, TabletId: 72075186224037888 not found 2024-11-21T17:49:53.467249Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7439791283988262512:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At function: KiReadTable!
:2:31: Error: Cannot find table 'db.[/Root/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:49:53.467330Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=ZTE3YWE4ZjYtY2QxZmZjNDktMjBhZTQyYzEtZDViNWI0N2M=, ActorId: [8:7439791283988262509:2343], ActorState: ExecuteState, TraceId: 01jd7xggxseysx8bm8tf80x9e8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 27658, MsgBus: 31864 2024-11-21T17:49:43.743582Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791240728500942:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:43.743889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ac2/r3tmp/tmplOGqOz/pdisk_1.dat 2024-11-21T17:49:43.804346Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27658, node 1 2024-11-21T17:49:43.821710Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:43.821728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:43.821730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:43.821774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31864 2024-11-21T17:49:43.843849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:43.843881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:43.844919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:43.881129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 16 2024-11-21T17:49:44.021483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.089926Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:49:44.090617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.097638Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:49:44.099336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791245023469011:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.099337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791245023469022:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.099350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.099840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:49:44.106149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791245023469025:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } f f t t 18 2024-11-21T17:49:44.245370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.302774Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:49:44.303636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.312195Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2024-11-21T17:49:44.362712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.371640Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:49:44.372404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.380947Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2024-11-21T17:49:44.431513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.439870Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:49:44.440647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.451159Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2024-11-21T17:49:44.513030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.521359Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:49:44.522142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.528847Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 700 2024-11-21T17:49:44.581039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.589510Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:49:44.590227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.597979Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 701 2024-11-21T17:49:44.655682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.664163Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:49:44.665019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.675405Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 25 2024-11-21T17:49:44.722876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715723:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.730610Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:49:44.731269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715725:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.737783Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill text 0 text 0 text 1 text 1 text 2 text 2 text 3 text 3 text 4 text 4 text 5 text 5 text 6 text 6 text 7 text 7 text 8 text 8 text 9 text 9 1042 2024-11-21T17:49:44.785579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715732:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.792550Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:49:44.793192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715734:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.801362Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill bpchar 0 bpchar 0 bpchar 1 bpchar 1 bpchar 2 bpchar 2 bpchar 3 bpchar 3 bpchar 4 bpchar 4 bpchar 5 bpchar 5 bpchar 6 bpchar 6 bpchar 7 bpchar 7 bpchar 8 bpchar 8 bpchar 9 bpchar 9 1043 2024-11-21T17:49:44.853143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715741:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.860668Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:49:44.861263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715743:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.870627Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill varchar 0 varchar 0 varchar 1 varchar 1 varchar 2 varchar 2 varchar 3 varchar 3 varchar 4 varchar 4 varchar 5 varchar 5 varchar 6 varchar 6 varchar 7 varchar 7 varchar 8 varchar 8 varchar 9 varchar 9 19 2024-11-21T17:49:44.928594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715750:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.93696 ... type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:49:52.411219Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791282225391607:2346], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2024-11-21T17:49:52.411314Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzczODFhMDctYmIxMzlmZDAtZWRiZjE3ODEtZjE2YjdmNA==, ActorId: [2:7439791282225391605:2345], ActorState: ExecuteState, TraceId: 01jd7xgfwq6vxhgr9fgkgwkn27, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T17:49:52.433479Z node 2 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, unexpected exception caught: (yexception) yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2024-11-21T17:49:52.435190Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791282225391618:2351], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
: Error: (yexception) yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" , code: 2029 2024-11-21T17:49:52.435269Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTg4NTMzNTQtZWNiOTQ5ZWMtZGJjYzdlZTYtNzk0OTc5NDI=, ActorId: [2:7439791282225391616:2350], ActorState: ExecuteState, TraceId: 01jd7xgfww8j5zw6gncbew6cqd, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2024-11-21T17:49:52.438070Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791282225391630:2357], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/nopg] 2024-11-21T17:49:52.438133Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2EyMmU3NWMtZThmYjlkNC0zMWVlYTlmNy1hMTVhNGM3ZA==, ActorId: [2:7439791282225391628:2356], ActorState: ExecuteState, TraceId: 01jd7xgfxk05z9f1ka839ecvq5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 1845, MsgBus: 26400 2024-11-21T17:49:52.669388Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439791278619358616:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:52.669609Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ac2/r3tmp/tmpiDQWoo/pdisk_1.dat 2024-11-21T17:49:52.677913Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1845, node 3 2024-11-21T17:49:52.694097Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:52.694109Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:52.694110Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:52.694162Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26400 TClient is connected to server localhost:26400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:52.769961Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:52.769984Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:52.771027Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:52.772284Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:52.772955Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:52.912556Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439791278619359198:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.912580Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439791278619359217:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.912586Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.913227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:52.914721Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439791278619359227:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:52.974691Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:53.013956Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:53.580524Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:53.580569Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:53.580584Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ac2/r3tmp/tmpiGrnJh/pdisk_1.dat 2024-11-21T17:49:53.659087Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:53.673875Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:53.715862Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:53.715893Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:53.726468Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:53.831014Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:610:2519], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.831041Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:621:2524], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.831048Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:53.831727Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T17:49:53.967003Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:624:2527], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } PreparedQuery: "7b11f033-4dd84760-24f4153e-d0616376" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'218) \'(\'\"_id\" \'\"26e81b66-62bd96ca-237a03b7-b2128f3f\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull [GOOD] Test command err: Trying to start YDB, gRPC: 65226, MsgBus: 18414 2024-11-21T17:49:44.488223Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791244618162666:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:44.488501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a72/r3tmp/tmpHIo1bm/pdisk_1.dat 2024-11-21T17:49:44.547733Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65226, node 1 2024-11-21T17:49:44.565677Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:44.565691Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:44.565693Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:44.565735Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18414 2024-11-21T17:49:44.588210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:44.588250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:44.589174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:44.632769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:44.832719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791244618163246:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.832755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.832813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791244618163273:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.833628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:44.836285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791244618163275:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T17:49:44.915211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1110, MsgBus: 24586 2024-11-21T17:49:45.163617Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791250389773743:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:45.163892Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a72/r3tmp/tmpKfQq6G/pdisk_1.dat 2024-11-21T17:49:45.173460Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1110, node 2 2024-11-21T17:49:45.187872Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:45.187891Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:45.187894Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:45.187940Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24586 TClient is connected to server localhost:24586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:45.266119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:45.266150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:45.266547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:45.267114Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:49:45.268336Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:45.476323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791250389774343:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:45.476352Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791250389774335:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:45.476375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:45.480521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:45.482451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791250389774349:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:45.545002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28400, MsgBus: 10668 2024-11-21T17:49:45.864263Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439791251076317162:2062];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a72/r3tmp/tmpO5Wl3y/pdisk_1.dat 2024-11-21T17:49:45.870449Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 28400, node 3 2024-11-21T17:49:45.882984Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:45.885463Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:45.885472Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:45.885474Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:45.885514Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10668 TClient is connected to server localhost:10668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:45.963835Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:45.963871Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:45.964934Z node 3 :HIVE WARN: HIVE#7205 ... default not found or you don't have access permissions } 2024-11-21T17:49:47.830354Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:47.831150Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:47.832993Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439791259046215928:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:47.909972Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5962, MsgBus: 18413 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a72/r3tmp/tmpOYu7Ks/pdisk_1.dat 2024-11-21T17:49:48.182840Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439791263289004580:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:48.182860Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 5962, node 6 2024-11-21T17:49:48.204344Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:48.204365Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:48.204368Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:48.204370Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:48.204405Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18413 TClient is connected to server localhost:18413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:48.283195Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:48.283224Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:48.284283Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:48.285574Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:48.441882Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791263289005179:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.441900Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791263289005189:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.441906Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.442356Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:48.443561Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791263289005193:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:48.544694Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439791263289005261:2306], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: invalid input syntax for type integer: "text" 2024-11-21T17:49:48.544775Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDM4M2IyZmYtMWU5YTM1YjYtNjRmNTdhMDUtYTdiZWE2YzM=, ActorId: [6:7439791263289005177:2297], ActorState: ExecuteState, TraceId: 01jd7xgbw58cm0n3geczs9ev8b, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: invalid input syntax for type integer: "text" Trying to start YDB, gRPC: 21966, MsgBus: 13297 2024-11-21T17:49:48.685382Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439791263031029992:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:48.686399Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a72/r3tmp/tmpgzU7qM/pdisk_1.dat 2024-11-21T17:49:48.698769Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21966, node 7 2024-11-21T17:49:48.713141Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:48.713168Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:48.713172Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:48.713213Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13297 TClient is connected to server localhost:13297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:48.783908Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:48.783942Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:48.784968Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:48.788757Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:49.006365Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791267325997735:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:49.006389Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791267325997718:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:49.006398Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:49.007133Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:49:49.009436Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439791267325997745:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:49.078569Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:49.091437Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:49:49.101118Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7439791267325998026:2328], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2024-11-21T17:49:49.101193Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ODIzZjJjZDgtZjVhYWU3NDEtMWMwM2Y4ZDctYTdkOWUxZTI=, ActorId: [7:7439791267325998024:2327], ActorState: ExecuteState, TraceId: 01jd7xgcna2e88nqt6j2p1h0pd, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> KqpQueryPerf::KvRead-QueryService >> KqpQueryPerf::IndexDeleteOn-QueryService |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> KqpQueryPerf::RangeLimitRead+QueryService >> KqpQueryPerf::AggregateToScalar-QueryService >> KqpQueryPerf::Delete-QueryService >> KqpQueryPerf::Update-QueryService >> KqpQueryPerf::MultiRead-QueryService >> KqpQueryPerf::IdxLookupJoin-QueryService >> KqpQueryPerf::IndexDeleteOn+QueryService >> KqpQueryPerf::RangeRead-QueryService >> KqpQueryPerf::Delete+QueryService >> KqpQueryPerf::DeleteOn-QueryService >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> KqpQueryPerf::Upsert+QueryService >> KqpQueryPerf::Replace-QueryService >> TConsoleConfigTests::TestAddConfigItem >> KqpWorkload::KV >> KqpQueryPerf::IndexUpdateOn-QueryService >> KqpQueryPerf::RangeRead+QueryService >> KqpQueryPerf::IndexUpsert-QueryService |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |80.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |80.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::DropExternalDataSourceWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:49.840151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:49.840174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:49.840179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:49.840184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:49.840195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:49.840199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:49.840208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:49.840333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:49.851084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:49.851106Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:49.853427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:49.853530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:49.853559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:49.856396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:49.856469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:49.856584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:49.856825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:49.857811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:49.858063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:49.858074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:49.858085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:49.858092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:49.858097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:49.858137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:49.859391Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:49.876199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:49.876295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:49.876357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:49.876412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:49.876420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:49.877180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:49.877219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:49.877258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:49.877268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:49.877272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:49.877277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:49.877751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:49.877763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:49.877768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:49.878245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:49.878260Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:49.878266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:49.878273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:49.878889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:49.879403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:49.879457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:49.879663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:49.879690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:49.879697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:49.879750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:49.879757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:49.879782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:49.879796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:49.880273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:49.880287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:49.880325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:49.880330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:49.880407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:49.880415Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:49.880426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:49.880430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:49.880435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:49.880440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:49.880444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:49.880448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:49.880460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:49.880466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:49.880469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... : Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:57.665727Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:49:57.665731Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2024-11-21T17:49:57.665735Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:57.665817Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:57.665825Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:57.665829Z node 33 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:49:57.665832Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T17:49:57.665836Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:49:57.665843Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T17:49:57.666475Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2024-11-21T17:49:57.666503Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000006 2024-11-21T17:49:57.666745Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:49:57.666812Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:57.666832Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 141733922920 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:57.666840Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T17:49:57.666861Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:49:57.666873Z node 33 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2024-11-21T17:49:57.666900Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:57.666909Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:49:57.667038Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T17:49:57.667570Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:57.667579Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:57.667603Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:49:57.667623Z node 33 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:57.667628Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2024-11-21T17:49:57.667633Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:203:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T17:49:57.667674Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:49:57.667680Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T17:49:57.667691Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:49:57.667695Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:49:57.667845Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T17:49:57.667854Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:49:57.667858Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:49:57.667861Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:49:57.667873Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:49:57.667878Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T17:49:57.667882Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2024-11-21T17:49:57.667885Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:49:57.667954Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:57.667964Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:57.667968Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:49:57.667973Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:49:57.667979Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:49:57.668021Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:49:57.668026Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:49:57.668034Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:49:57.668070Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:57.668079Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:49:57.668083Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:49:57.668086Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T17:49:57.668090Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:57.668098Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T17:49:57.669005Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:49:57.669032Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:49:57.669042Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:49:57.669099Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:49:57.669106Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:49:57.669174Z node 33 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:49:57.669193Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:49:57.669198Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [33:406:2398] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:49:57.669267Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:57.669294Z node 33 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 38us result status StatusPathDoesNotExist 2024-11-21T17:49:57.669328Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |80.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> KqpQueryPerf::IdxLookupJoin+QueryService >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService >> KqpQueryPerf::AggregateToScalar+QueryService >> KqpQueryPerf::IndexInsert-QueryService |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |80.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIteratorBatchMode::RangeFromInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeFromNonInclusive >> KqpQueryPerf::KvRead-QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] >> KqpQueryPerf::Delete-QueryService [GOOD] >> KqpQueryPerf::MultiRead-QueryService [GOOD] >> KqpQueryPerf::Update-QueryService [GOOD] >> KqpQueryPerf::RangeRead-QueryService [GOOD] >> KqpQueryPerf::DeleteOn-QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 7989, MsgBus: 17455 2024-11-21T17:49:58.351330Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791304479822956:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.351553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c2f/r3tmp/tmpLnhSAY/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7989, node 1 2024-11-21T17:49:58.410427Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:58.412463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.412488Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.412490Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.412524Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17455 TClient is connected to server localhost:17455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:49:58.452011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.452038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.453216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.486907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.498905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:49:58.560813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.575840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.585449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.685882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791304479824509:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.685923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.720459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.726662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.738560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.744786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.759367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.772473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.784671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791304479825023:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.784714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.784727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791304479825028:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.785313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:58.792762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791304479825030:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpQueryPerf::IndexDeleteOn-QueryService [GOOD] >> KqpQueryPerf::Upsert+QueryService [GOOD] >> KqpQueryPerf::Delete+QueryService [GOOD] >> KqpQueryPerf::Replace-QueryService [GOOD] >> KqpQueryPerf::RangeRead+QueryService [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService [GOOD] |80.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 3935, MsgBus: 63137 2024-11-21T17:49:58.477806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791304600161903:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.477824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c23/r3tmp/tmpc9qWuA/pdisk_1.dat 2024-11-21T17:49:58.527555Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3935, node 1 2024-11-21T17:49:58.542938Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.542955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.542957Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.542993Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63137 2024-11-21T17:49:58.579801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.579830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.581224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.594694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.597876Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:49:58.625045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.690843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.707876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.720416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.787991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791304600163447:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.788018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.820277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.827431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.835700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.842401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.848901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.856048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.864869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791304600163940:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.864896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791304600163945:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.864901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.865541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:58.869065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791304600163947:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |80.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 28911, MsgBus: 11109 2024-11-21T17:49:58.563629Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791307158520074:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.563785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c29/r3tmp/tmplD7OgL/pdisk_1.dat 2024-11-21T17:49:58.612785Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28911, node 1 2024-11-21T17:49:58.627124Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.627140Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.627141Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.627177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11109 TClient is connected to server localhost:11109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:49:58.667038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.667067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.668173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.690922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.693658Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:58.714691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.776107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.795100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.807750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.876222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791307158521635:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.876264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.908045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.914523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.926827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.982130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.989345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.996951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.011828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791311453489449:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.011838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791311453489454:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.011847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.012498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.016609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791311453489456:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 27962, MsgBus: 31350 2024-11-21T17:49:58.537866Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791308410557249:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.537957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c31/r3tmp/tmpEdLcFD/pdisk_1.dat 2024-11-21T17:49:58.592215Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27962, node 1 2024-11-21T17:49:58.603992Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.604010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.604013Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.604051Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31350 2024-11-21T17:49:58.636997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.637039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.638324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.667722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.696637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.712870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:49:58.741222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.752024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.841203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791308410558641:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.841235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.867552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.874102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.884164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.891477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.898715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.905189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.913864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791308410559140:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.913888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.913922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791308410559145:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.914540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:58.918586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791308410559147:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpQueryPerf::IndexUpsert-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestAffectedConfigs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 10640, MsgBus: 63226 2024-11-21T17:49:58.574110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791305909208568:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.574174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c06/r3tmp/tmpArlhsn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10640, node 1 2024-11-21T17:49:58.637709Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:58.644562Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.644576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.644578Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.644612Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63226 2024-11-21T17:49:58.674127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.674172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.675241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.691458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.716833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.731611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.749722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.760724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.912446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791305909209954:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.912554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.916077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.921443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.937190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.947750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.954118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.961150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.976899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791305909210467:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.976921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.977041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791305909210472:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.977679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:58.981534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791305909210474:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6684, MsgBus: 24923 2024-11-21T17:49:58.554372Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791305465140906:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.554471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c35/r3tmp/tmp2HMz4S/pdisk_1.dat 2024-11-21T17:49:58.614324Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6684, node 1 2024-11-21T17:49:58.630475Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.630488Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.630490Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.630524Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24923 2024-11-21T17:49:58.654919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.654944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.656146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.689478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.696599Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:58.709512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.730672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.747354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.759016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.855990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791305465142298:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.856019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.887614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.894685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.905742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.919275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.926271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.933060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.989400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791305465142816:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.989431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.989497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791305465142821:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.990328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:58.995155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791305465142823:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19876, MsgBus: 6628 2024-11-21T17:49:58.570444Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791306672224529:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c0b/r3tmp/tmppI856G/pdisk_1.dat 2024-11-21T17:49:58.611003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:49:58.635852Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19876, node 1 2024-11-21T17:49:58.647989Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.647999Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.648000Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.648026Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6628 2024-11-21T17:49:58.669238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.669268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.670394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.710765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.715141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.779878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.796517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.807663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.885617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791306672225917:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.885643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.910569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.916981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.926257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.934299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.948158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.955892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.977606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791306672226422:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.977641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.977712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791306672226427:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.979083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:58.985671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791306672226429:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19422, MsgBus: 24744 2024-11-21T17:49:58.575908Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791305117478518:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.576044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c05/r3tmp/tmpuoj5lM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19422, node 1 2024-11-21T17:49:58.635888Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:58.636551Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.636559Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.636561Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.636591Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24744 TClient is connected to server localhost:24744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:49:58.676651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.676671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.677874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.704078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.705755Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:58.716813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.781610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.797270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:49:58.854756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.920386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791305117480074:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.920424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.951562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.958026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.968314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.975267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.989445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.003476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.018628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309412447872:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.018661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.018770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309412447877:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.019556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.023946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791309412447879:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 18814, MsgBus: 31593 2024-11-21T17:49:58.494696Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791305117178601:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.494946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c2e/r3tmp/tmpEyRyml/pdisk_1.dat 2024-11-21T17:49:58.554096Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18814, node 1 2024-11-21T17:49:58.562587Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.562603Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.562605Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.562633Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31593 2024-11-21T17:49:58.596312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.596349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:31593 2024-11-21T17:49:58.597617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.612761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.616772Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:49:58.621483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.689884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.712627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.722392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.804318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791305117180152:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.804355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.833702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.839720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.849499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.856028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.863363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.870500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.879254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791305117180653:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.879286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.879302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791305117180658:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.879933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:58.883341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791305117180660:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:49:59.045615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.051903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.059583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> TExternalDataSourceTestReboots::CreateExternalDataSourceWithReboots [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService [GOOD] |80.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 61949, MsgBus: 6239 2024-11-21T17:49:58.831143Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791304809369761:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.831209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001bf4/r3tmp/tmpCqOV5T/pdisk_1.dat 2024-11-21T17:49:58.882943Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61949, node 1 2024-11-21T17:49:58.889753Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.889766Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.889767Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.889794Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6239 TClient is connected to server localhost:6239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:49:58.931551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.931595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.936258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:58.961516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.966458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.982080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.001502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:49:59.011509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.149350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309104338453:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.149382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.183740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.189744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.200544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.213735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.220678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.234720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.250067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309104338964:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.250095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309104338969:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.250099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.250740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.254342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791309104338971:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 1331, MsgBus: 23025 2024-11-21T17:49:58.831059Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791306076615032:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.831275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c03/r3tmp/tmpbHDrmU/pdisk_1.dat 2024-11-21T17:49:58.885912Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1331, node 1 2024-11-21T17:49:58.899795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.899813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.899815Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.899855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23025 2024-11-21T17:49:58.931668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.931692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.932956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.962766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.964491Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:58.968666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.985435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.009131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.021411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.164417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791310371583871:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.164508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.168992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.177080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.185461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.192002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.200729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.213107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.268998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791310371584386:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.269025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.269030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791310371584391:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.269718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.275341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791310371584393:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 7152, MsgBus: 15393 2024-11-21T17:49:58.880006Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791307990994441:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.880151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001be7/r3tmp/tmp6a9cre/pdisk_1.dat 2024-11-21T17:49:58.946704Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7152, node 1 2024-11-21T17:49:58.959806Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.959818Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.959820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.959853Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15393 2024-11-21T17:49:58.981661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.981679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.982550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:59.023552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:49:59.036594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.051943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:49:59.070255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.083318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.183259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791312285963276:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.183284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.213917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.221375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.234473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.289431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.297280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.304201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.312665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791312285963792:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.312693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.312801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791312285963797:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.313530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.317285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791312285963799:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpQueryPerf::IndexReplace+QueryService >> KqpQueryPerf::IndexInsert-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 7727, MsgBus: 27881 2024-11-21T17:49:58.882160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791305408717285:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.882234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001be8/r3tmp/tmpEplfv0/pdisk_1.dat 2024-11-21T17:49:58.951598Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7727, node 1 2024-11-21T17:49:58.964362Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.964379Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.964381Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.964420Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27881 2024-11-21T17:49:58.984202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.984238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.984608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:59.027209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.035115Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:59.040912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.056376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.077888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:49:59.090003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.189222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309703685975:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.189246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.219741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.225495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.235099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.248339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.255374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.269379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.277827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309703686488:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.277856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.277857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309703686493:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.278438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.282757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791309703686495:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 31379, MsgBus: 11421 2024-11-21T17:49:58.744726Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791304878949912:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.744787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001bfd/r3tmp/tmpNrq7EM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31379, node 1 2024-11-21T17:49:58.818804Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:58.819019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.819032Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.819034Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.819066Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11421 2024-11-21T17:49:58.844724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.844755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.845806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.883633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.888130Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:58.894036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.955067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.976330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.987316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.037460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309173918744:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.037499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.075891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.081817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.094866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.108156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.115202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.122538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.137301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309173919236:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.137332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.137449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309173919241:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.138369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.142863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791309173919243:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:49:59.340125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.346001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.353189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 2990, MsgBus: 7443 2024-11-21T17:49:59.066870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791310125791463:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:59.067048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001bd3/r3tmp/tmp85ekaN/pdisk_1.dat 2024-11-21T17:49:59.141534Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2990, node 1 2024-11-21T17:49:59.159743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:59.159754Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:59.159756Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:59.159788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7443 TClient is connected to server localhost:7443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:59.216705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:59.216736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:59.217092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.217788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:49:59.225376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.285732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.302711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.311653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.389162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791310125793031:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.389190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.415868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.422446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.430437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.437390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.444283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.451508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.459727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791310125793523:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.459747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.459756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791310125793528:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.460427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.464371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791310125793530:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 61365, MsgBus: 32396 2024-11-21T17:49:58.855448Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791305547361591:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.855527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001bfc/r3tmp/tmpyUma8r/pdisk_1.dat 2024-11-21T17:49:58.912488Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61365, node 1 2024-11-21T17:49:58.924525Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.924539Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.924541Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.924569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32396 2024-11-21T17:49:58.955691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.955722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.956754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.983262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.984864Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:49:59.001027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.062775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.081417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.090210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.145622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309842330218:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.145660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.168839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.176136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.230533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.241194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.248301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.255367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.263090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309842330723:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.263107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309842330728:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.263114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.263631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.268514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791309842330730:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:49:59.458517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.465994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.479869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 10365, MsgBus: 1756 2024-11-21T17:49:59.037882Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791310465224816:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:59.038452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001bd5/r3tmp/tmpKjM68u/pdisk_1.dat 2024-11-21T17:49:59.085854Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10365, node 1 2024-11-21T17:49:59.097930Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:59.097941Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:59.097943Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:59.097976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1756 2024-11-21T17:49:59.134977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:59.135005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:59.136094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:59.168899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.180785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.204307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.221772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.230487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.327507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791310465226212:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.327534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.349667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.356063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.366933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.373887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.428666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.437353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.493606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791310465226730:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.493625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791310465226735:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.493627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.494227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.499428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791310465226737:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source_reboots/unittest >> TExternalDataSourceTestReboots::CreateExternalDataSourceWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:48.324603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:48.324624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:48.324629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:48.324634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:48.324646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:48.324650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:48.324660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:48.324727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:48.333014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:48.333032Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:48.334928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:48.335017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:48.335040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:48.337442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:48.337513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:48.337613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:48.337821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:48.338442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:48.338732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:48.338743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:48.338757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:48.338765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:48.338771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:48.338813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:48.339933Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:48.356420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:48.356496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.356563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:48.356618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:48.356625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.357394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:48.357420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:48.357470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.357479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:48.357484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:48.357489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:48.357851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.357860Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:48.357864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:48.358121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.358128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.358134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:48.358140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:48.358593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:48.358986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:48.359042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:48.359229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:48.359253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:48.359260Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:48.359309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:48.359316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:48.359344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:48.359356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:48.359743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:48.359753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:48.359797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:48.359802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:48.359885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:48.359892Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:48.359902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:48.359906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:48.359911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:48.359917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:48.359921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:48.359945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:48.359956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:48.359962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:48.359965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 2024-11-21T17:49:59.829688Z node 48 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:49:59.829693Z node 48 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:49:59.829697Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:59.829712Z node 48 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T17:49:59.829716Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [48:299:2291] 2024-11-21T17:49:59.830167Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:49:59.830181Z node 48 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalDataSource TPropose, operationId: 1003:0ProgressState 2024-11-21T17:49:59.830188Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1003 ready parts: 1/1 2024-11-21T17:49:59.830220Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1003 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:59.830316Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:49:59.830923Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:49:59.830965Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:49:59.830971Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [48:300:2292] 2024-11-21T17:49:59.831013Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2024-11-21T17:49:59.831034Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T17:49:59.831087Z node 48 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:59.831107Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 206158432361 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:59.831114Z node 48 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalDataSource TPropose, operationId: 1003:0HandleReply TEvOperationPlan: step# 5000004 2024-11-21T17:49:59.831138Z node 48 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T17:49:59.831160Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:49:59.831169Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:49:59.831546Z node 48 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:59.831552Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:49:59.831575Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:49:59.831589Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:49:59.831600Z node 48 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:59.831604Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [48:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:49:59.831608Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [48:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T17:49:59.831612Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [48:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T17:49:59.831664Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:49:59.831669Z node 48 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:49:59.831679Z node 48 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:49:59.831683Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:49:59.831688Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:49:59.831693Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:49:59.831697Z node 48 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:49:59.831701Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:49:59.831712Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:49:59.831717Z node 48 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 1 2024-11-21T17:49:59.831720Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T17:49:59.831723Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 2 2024-11-21T17:49:59.831859Z node 48 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:59.831869Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:59.831873Z node 48 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:59.831877Z node 48 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:49:59.831881Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:49:59.832151Z node 48 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:59.832170Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:49:59.832175Z node 48 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:49:59.832179Z node 48 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T17:49:59.832183Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:49:59.832198Z node 48 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T17:49:59.832203Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [48:299:2291] 2024-11-21T17:49:59.832833Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:59.833200Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:49:59.833230Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:49:59.833236Z node 48 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [48:300:2292] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T17:49:59.833348Z node 48 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirExternalDataSource/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:49:59.833388Z node 48 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirExternalDataSource/MyExternalDataSource" took 50us result status StatusSuccess 2024-11-21T17:49:59.833481Z node 48 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirExternalDataSource/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TConsoleConfigTests::TestAffectedConfigs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 27406, MsgBus: 63192 2024-11-21T17:49:58.912245Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791306287522870:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.912452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001bee/r3tmp/tmpNgVujm/pdisk_1.dat 2024-11-21T17:49:58.967981Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27406, node 1 2024-11-21T17:49:58.975785Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.975810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.975812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.975845Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63192 2024-11-21T17:49:59.013670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:59.013696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:59.014987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:59.035122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.040969Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:49:59.053323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.082860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.101188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.109918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.230356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791310582491736:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.230387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.262597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.268404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.275588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.282675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.289959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.296729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.305107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791310582492240:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.305116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791310582492245:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.305123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.305678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.310270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791310582492247:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:49:59.501983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.508028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.521453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 1861, MsgBus: 28723 2024-11-21T17:49:59.089978Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791309084627009:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:59.090056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001bdb/r3tmp/tmpBgQEet/pdisk_1.dat 2024-11-21T17:49:59.141447Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1861, node 1 2024-11-21T17:49:59.156444Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:59.156463Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:59.156465Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:59.156515Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28723 2024-11-21T17:49:59.189579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:59.189601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:59.190688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:59.218267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.221705Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:59.231139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.249290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.267184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.277229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.425389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309084628404:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.425415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.456805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.463183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.472773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.486297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.493764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.507463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.515793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309084628915:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.515827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791309084628920:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.515827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.516426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.520198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791309084628922:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:49:59.710395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.718019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.732001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpPg::TableDeleteAllData [GOOD] >> KqpPg::TableDeleteWhere |80.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::MultiDeleteFromTable-QueryService >> DataShardReadIteratorBatchMode::RangeFromNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::MultipleRanges >> KqpQueryPerf::DeleteOn+QueryService >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService >> KqpQueryPerf::UpdateOn-QueryService >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::ComputeLength+QueryService ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/ut/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2024-11-21T17:45:00.151681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:45:00.151714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:00.151721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:45:00.151725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:45:00.151740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:45:00.151744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:45:00.151755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:45:00.152470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:45:00.165630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:45:00.165663Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:45:00.170320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:45:00.170514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:45:00.170541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046578944 2024-11-21T17:45:00.171477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:45:00.171771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:45:00.171864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:00.171956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:00.172914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:00.173222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:00.173420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:00.173446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:45:00.173455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046578944, domainId: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:00.173462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:45:00.173483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.221673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "hdd" } StoragePools { Name: "" Kind: "hdd-3" } StoragePools { Name: "" Kind: "hdd-1" } StoragePools { Name: "" Kind: "hdd-2" } } } TxId: 1 TabletId: 72057594046578944 , at schemeshard: 72057594046578944 2024-11-21T17:45:00.221740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.221792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 0 2024-11-21T17:45:00.221828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046578944 2024-11-21T17:45:00.221835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.229273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046578944 PathId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:00.229308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2024-11-21T17:45:00.229355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.229366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046578944 2024-11-21T17:45:00.229369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:45:00.229374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:45:00.240535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.240563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046578944 2024-11-21T17:45:00.240569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:45:00.248552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.248583Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.248591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:00.248613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:45:00.249964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046578944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:45:00.252599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046578944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:45:00.252727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:45:00.252978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:00.252988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T17:45:00.252993Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:00.450466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T17:45:00.450533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 232 RawX2: 4294969520 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T17:45:00.450544Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:00.450846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:45:00.450855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:45:00.450889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:00.450901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:45:00.451570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:45:00.451579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:45:00.451625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:45:00.451629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:241:2231], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T17:45:00.451708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:45:00.451715Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T17:45:00.451727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:45:00.451730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:00.451735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:45:00.451740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:45:00.451744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:45:00.451748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:45:00.451757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T17:45:00.451763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:45:00.451767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T17:45:00.452687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:00.452702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:45:00.452707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T17:45:00.452712Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T17:45:00.452717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:45:00.452729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T17:45:00.452733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [1:95:2130] 2024-11-21T1 ... stered, txId: 1, at schemeshard: 72057594046578944 2024-11-21T17:50:00.232388Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046578944 2024-11-21T17:50:00.232442Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 229 RawX2: 120259086509 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046578944, at schemeshard: 72057594046578944 2024-11-21T17:50:00.232454Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:50:00.232528Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:00.232538Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046578944 2024-11-21T17:50:00.232578Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:50:00.232590Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046578944, LocalPathId: 1], at schemeshard: 72057594046578944 2024-11-21T17:50:00.233288Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T17:50:00.233305Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 1, path id: [OwnerId: 72057594046578944, LocalPathId: 1] 2024-11-21T17:50:00.233360Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T17:50:00.233367Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:242:2232], at schemeshard: 72057594046578944, txId: 1, path id: 1 2024-11-21T17:50:00.233460Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046578944 2024-11-21T17:50:00.233470Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046578944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:00.233486Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:00.233491Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:00.233497Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:00.233503Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:00.233507Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:00.233512Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:00.233525Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 2 2024-11-21T17:50:00.233532Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T17:50:00.233536Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046578944, LocalPathId: 1], 3 2024-11-21T17:50:00.233737Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:50:00.233757Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046578944, cookie: 1 2024-11-21T17:50:00.233761Z node 28 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 1 2024-11-21T17:50:00.233767Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 1, pathId: [OwnerId: 72057594046578944, LocalPathId: 1], version: 3 2024-11-21T17:50:00.233772Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 1] was 1 2024-11-21T17:50:00.233788Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 1, subscribers: 1 2024-11-21T17:50:00.233792Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [28:95:2130] 2024-11-21T17:50:00.234841Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 1 2024-11-21T17:50:00.234945Z node 28 :TX_PROXY DEBUG: actor# [28:290:2272] Bootstrap 2024-11-21T17:50:00.236267Z node 28 :TX_PROXY DEBUG: actor# [28:290:2272] Become StateWork (SchemeCache [28:296:2277]) 2024-11-21T17:50:00.236522Z node 28 :TX_PROXY DEBUG: actor# [28:290:2272] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:00.237098Z node 28 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:00.238576Z node 28 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:00.239332Z node 28 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:00.239568Z node 28 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:00.239731Z node 28 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:00.240464Z node 28 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:00.240477Z node 28 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:00.240521Z node 28 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:00.242476Z node 28 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:00.242726Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:00.242763Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:00.242789Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:00.242816Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:00.242853Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:00.264616Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:00.264674Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:00.275624Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:00.275676Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:00.275694Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:00.275708Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:00.275736Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:00.275748Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:00.275754Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:00.275762Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:00.286782Z node 28 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:00.286868Z node 28 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:00.287063Z node 28 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:00.287074Z node 28 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:00.287121Z node 28 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:00.287266Z node 28 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/6fwh/001df5/r3tmp/tmp4GvvcN/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } } 2024-11-21T17:50:00.287330Z node 28 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 28:1 Path# /home/runner/.ya/build/build_root/6fwh/001df5/r3tmp/tmp4GvvcN/pdisk_1.dat 2024-11-21T17:50:00.298523Z node 28 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2024-11-21T17:50:00.298607Z node 28 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:50:00.298617Z node 28 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2024-11-21T17:50:00.298650Z node 28 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2024-11-21T17:50:00.298672Z node 28 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:50:00.298767Z node 28 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2024-11-21T17:50:00.298777Z node 28 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:50:00.298781Z node 28 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:50:00.298815Z node 28 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[28:385:2341] 2024-11-21T17:50:00.299288Z node 28 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2024-11-21T17:50:00.299301Z node 28 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [28:380:2338] 2024-11-21T17:50:00.299448Z node 28 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[28:385:2341] 2024-11-21T17:50:00.299470Z node 28 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2024-11-21T17:50:00.299479Z node 28 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2024-11-21T17:50:00.299483Z node 28 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService Test command err: Trying to start YDB, gRPC: 18611, MsgBus: 9822 2024-11-21T17:49:58.578450Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791307507984115:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.578676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c19/r3tmp/tmpVyV4Ey/pdisk_1.dat 2024-11-21T17:49:58.638739Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18611, node 1 2024-11-21T17:49:58.648512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.648525Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.648527Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.648561Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9822 2024-11-21T17:49:58.678803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:58.678836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:58.679949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:58.703630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.705714Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:49:58.718501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.737171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:49:58.756745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.812858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:58.888019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791307507985652:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.888042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.915534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.921147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.933389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.939941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.947079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.954677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:58.962909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791307507986152:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.962935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.963021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791307507986157:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:58.963633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:58.967684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791307507986159:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. >> KqpQueryPerf::IndexInsert+QueryService >> KqpQueryPerf::UpdateOn+QueryService >> KqpQueryPerf::IndexReplace+QueryService [GOOD] >> KqpQueryPerf::Insert-QueryService >> KqpStreamLookup::ReadTableDuringSplit >> KqpStreamLookup::ReadTableWithIndexDuringSplit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin+QueryService Test command err: Trying to start YDB, gRPC: 24351, MsgBus: 18956 2024-11-21T17:49:59.002638Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791311057342783:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:59.002817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001be2/r3tmp/tmpuAy8FC/pdisk_1.dat 2024-11-21T17:49:59.056669Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24351, node 1 2024-11-21T17:49:59.065097Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:59.065107Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:59.065109Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:59.065154Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18956 2024-11-21T17:49:59.104320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:59.104364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:59.105553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:59.132753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.135779Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:59.139940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.205387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.223591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.236863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.312618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791311057344341:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.312645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.333389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.339844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.345542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.352838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.359996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.366967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.375334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791311057344834:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.375356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.375360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791311057344839:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.375958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.380445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791311057344841:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest |80.7%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 2734, MsgBus: 6469 2024-11-21T17:50:00.184068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791315705348045:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:00.184218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001bb6/r3tmp/tmp71Qwej/pdisk_1.dat 2024-11-21T17:50:00.239193Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2734, node 1 2024-11-21T17:50:00.254908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:00.254917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:00.254919Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:00.254954Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6469 2024-11-21T17:50:00.286420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:00.286452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:00.287566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:00.320175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:00.331574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:00.348114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:00.364650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:00.379548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:00.474254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791315705349588:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:00.474278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:00.495740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.501655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.508084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.514988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.522146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.529309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.537625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791315705350088:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:00.537645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:00.537944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791315705350093:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:00.538515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:00.542115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791315705350095:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage >> TPQTestSlow::TestOnDiskStoredSourceIds >> TDataShardTrace::TestTraceDistributedSelect >> TDataShardTrace::TestTraceDistributedUpsert >> TDataShardTrace::TestTraceWriteImmediateOnShard ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 3329, MsgBus: 17843 2024-11-21T17:50:00.153036Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791312923629221:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:00.153337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001bbb/r3tmp/tmpcdYPu5/pdisk_1.dat 2024-11-21T17:50:00.200026Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3329, node 1 2024-11-21T17:50:00.214913Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:00.214929Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:00.214931Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:00.214966Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17843 TClient is connected to server localhost:17843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:50:00.254226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:00.254250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:00.255296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:00.279261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:00.292114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:00.306996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:00.324819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:00.333998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:00.458083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791312923630769:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:00.458124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:00.487353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.494186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.500856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.508200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.514679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.522091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.530679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791312923631283:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:00.530704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:00.530716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791312923631288:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:00.531372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:00.535067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791312923631290:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:50:00.691419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.700408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.711317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpQueryPerf::MultiDeleteFromTable-QueryService [GOOD] >> KqpQueryPerf::DeleteOn+QueryService [GOOD] >> KqpQueryPerf::UpdateOn-QueryService [GOOD] >> KqpQueryPerf::ComputeLength+QueryService [GOOD] |80.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 14879, MsgBus: 12848 2024-11-21T17:50:00.816525Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791314568627002:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:00.816739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001b9c/r3tmp/tmpYO1VWI/pdisk_1.dat 2024-11-21T17:50:00.869090Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14879, node 1 2024-11-21T17:50:00.880951Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:00.880964Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:00.880965Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:00.881001Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12848 TClient is connected to server localhost:12848 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:50:00.918375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:00.918407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:50:00.919286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:00.948858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:00.959108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.022000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.041952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.054155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.099155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791318863595845:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.099200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.133026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.141390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.155183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.168061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.183502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.195737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.211162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791318863596348:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.211186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.211201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791318863596353:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.211959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:01.215192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791318863596355:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpQueryPerf::UpdateOn+QueryService [GOOD] >> KqpQueryPerf::Insert-QueryService [GOOD] >> KqpQueryPerf::IndexInsert+QueryService [GOOD] |80.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 1285, MsgBus: 15592 2024-11-21T17:50:01.054214Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791318955627551:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:01.054370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001b7a/r3tmp/tmp8oKv2Z/pdisk_1.dat 2024-11-21T17:50:01.108747Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1285, node 1 2024-11-21T17:50:01.127647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:01.127657Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:01.127659Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:01.127688Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15592 2024-11-21T17:50:01.155521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:01.155549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:01.157079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:01.185963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.190542Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:50:01.200935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.264725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.286221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.298005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.361223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791318955629111:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.361253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.393535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.400849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.458796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.467195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.474695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.488691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.504273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791318955629626:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.504300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.504311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791318955629631:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.505038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:01.508502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791318955629633:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 10128, MsgBus: 7957 2024-11-21T17:50:01.083725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791319774528269:2159];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:01.083883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001b71/r3tmp/tmpQas8Sb/pdisk_1.dat 2024-11-21T17:50:01.145093Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10128, node 1 2024-11-21T17:50:01.158261Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:01.158273Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:01.158276Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:01.158307Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7957 2024-11-21T17:50:01.184427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:01.184453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:01.185659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7957 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:01.223372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.226411Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:50:01.232538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:50:01.250576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.273806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.284588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.408122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791319774529686:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.408146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.438252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.443475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.453337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.460065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.467283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.474695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.489671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791319774530201:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.489698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.489738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791319774530206:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.490406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:01.494767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791319774530208:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 22420, MsgBus: 62348 2024-11-21T17:50:01.144993Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791317749473074:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:01.145072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001b5f/r3tmp/tmpdiZG3G/pdisk_1.dat 2024-11-21T17:50:01.197824Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22420, node 1 2024-11-21T17:50:01.210167Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:01.210181Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:01.210184Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:01.210216Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62348 2024-11-21T17:50:01.243672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:01.243702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:01.244788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:01.276225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.278767Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:50:01.287948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.311128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.327627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.384510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.442113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791317749474464:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.442142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.478076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.484130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.539135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.551040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.605940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.614409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.623262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791317749474980:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.623281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791317749474985:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.623283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.624017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:01.627468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791317749474987:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 3352, MsgBus: 11870 2024-11-21T17:50:01.264093Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791319163074339:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:01.264108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001b34/r3tmp/tmpMmS6lw/pdisk_1.dat 2024-11-21T17:50:01.328522Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3352, node 1 2024-11-21T17:50:01.341801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:01.341817Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:01.341819Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:01.341869Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11870 2024-11-21T17:50:01.365791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:01.365821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:01.366764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:01.389966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.398657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.461423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.477878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.489377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.577071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791319163075883:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.577102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.604888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.611845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.621620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.628546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.643292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.656555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.665368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791319163076387:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.665405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791319163076392:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.665406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.666349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:01.669847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791319163076394:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 4246, MsgBus: 32585 2024-11-21T17:50:01.155402Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791319505146217:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:01.155476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001b3e/r3tmp/tmpSnt3w1/pdisk_1.dat 2024-11-21T17:50:01.224848Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4246, node 1 2024-11-21T17:50:01.232672Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:01.232685Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:01.232688Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:01.232734Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32585 2024-11-21T17:50:01.257939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:01.257969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:01.258918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:01.298386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.300819Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:50:01.309342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.326963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.344204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:50:01.356573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.474822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791319505147634:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.474859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.497778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.503885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.515946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.570625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.579242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.585921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.594733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791319505148149:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.594761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.594849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791319505148154:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.595903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:01.599981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791319505148156:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:50:01.767278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.775063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.782930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 15278, MsgBus: 12481 2024-11-21T17:50:01.302012Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791320184524117:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001b3c/r3tmp/tmpCYNUqw/pdisk_1.dat 2024-11-21T17:50:01.330511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:50:01.343919Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15278, node 1 2024-11-21T17:50:01.359853Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:01.359867Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:01.359869Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:01.359905Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12481 2024-11-21T17:50:01.393775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:01.393803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:01.394889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:01.422303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.424966Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:50:01.433470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.448427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.468672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.477898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.604154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791320184525504:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.604198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.641039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.646930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.656311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.663647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.670291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.676983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.685658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791320184526017:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.685673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791320184526022:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.685678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.686181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:01.690602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791320184526024:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> TExportToS3WithRebootsTests::CancelShouldSucceedOnManyTables >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleTable [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] Test command err: 2024-11-21T17:49:15.435181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:15.435680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:15.435708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ca3/r3tmp/tmp6f9xgl/pdisk_1.dat 2024-11-21T17:49:15.543699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:15.562330Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:15.604832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:15.604873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:15.615631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:15.719770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:15.734354Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:49:15.734574Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:49:15.734654Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:49:15.734704Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:15.742985Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:49:15.743162Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:15.743189Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:49:15.743367Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:49:15.743376Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:49:15.743384Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:49:15.743434Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:49:15.747372Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:49:15.747455Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:49:15.747481Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:49:15.747487Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:49:15.747492Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:49:15.747498Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:15.747650Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:15.747658Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:15.747819Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:49:15.747844Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:49:15.747862Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:15.747866Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:15.747872Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:49:15.747880Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:15.747887Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:15.747894Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:49:15.747899Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:15.747903Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:15.747909Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:49:15.747914Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:15.747936Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:49:15.747940Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:49:15.747970Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:15.748026Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:49:15.748036Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:49:15.748057Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:49:15.748065Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:49:15.748070Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:49:15.748075Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:49:15.748079Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:15.748127Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:49:15.748131Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:49:15.748135Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:15.748138Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:15.748151Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:49:15.748155Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:15.748159Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:49:15.748162Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:15.748167Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:49:15.748494Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:49:15.748508Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:49:15.758852Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:15.758904Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:15.758913Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:15.758938Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:49:15.758972Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:49:15.944257Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:15.944286Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:15.944295Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:49:15.944318Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:49:15.944323Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:49:15.944354Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:15.944363Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:49:15.944368Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:49:15.944374Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:49:15.945172Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:49:15.945198Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:15.945363Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:15.945369Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:15.945376Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:15.945382Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:15.945387Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:15.945396Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 037889 2024-11-21T17:50:02.485527Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2024-11-21T17:50:02.485531Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2024-11-21T17:50:02.485536Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T17:50:02.485539Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2024-11-21T17:50:02.485543Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T17:50:02.485547Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2024-11-21T17:50:02.485566Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2024-11-21T17:50:02.485571Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T17:50:02.485575Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T17:50:02.485578Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T17:50:02.485582Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T17:50:02.485587Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2024-11-21T17:50:02.485591Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2024-11-21T17:50:02.485595Z node 15 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2024-11-21T17:50:02.485599Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T17:50:02.485603Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T17:50:02.485606Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T17:50:02.485610Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T17:50:02.485633Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T17:50:02.485637Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T17:50:02.485644Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T17:50:02.485648Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T17:50:02.485652Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T17:50:02.485655Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T17:50:02.485659Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T17:50:02.485663Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:50:02.485688Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2024-11-21T17:50:02.485693Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T17:50:02.485697Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T17:50:02.485701Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2024-11-21T17:50:02.485706Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T17:50:02.485709Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T17:50:02.485713Z node 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2024-11-21T17:50:02.485717Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:02.485720Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T17:50:02.485724Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T17:50:02.485727Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T17:50:02.506511Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3000 txid# 281474976715664} 2024-11-21T17:50:02.506542Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2024-11-21T17:50:02.506562Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:02.506572Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2024-11-21T17:50:02.506595Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1000:2801], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:50:02.506607Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:02.506707Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3000 txid# 281474976715664} 2024-11-21T17:50:02.506712Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2024-11-21T17:50:02.506720Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:50:02.506726Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:50:02.506734Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1000:2801], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:50:02.506742Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:50:02.507151Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:558:2485], Recipient [15:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 RangesSize: 3 2024-11-21T17:50:02.507178Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:50:02.507192Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2024-11-21T17:50:02.507219Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:50:02.507224Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:50:02.507229Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:50:02.507233Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:50:02.507244Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2024-11-21T17:50:02.507248Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:50:02.507251Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:50:02.507255Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:50:02.507258Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:50:02.507273Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 } 2024-11-21T17:50:02.507280Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2024-11-21T17:50:02.507320Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:50:02.507325Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:50:02.507328Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:50:02.507332Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:50:02.507346Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:50:02.507349Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:50:02.507353Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2024-11-21T17:50:02.507357Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:50:02.507373Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T17:50:02.507539Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553236, Sender [15:1021:2820], Recipient [15:632:2537]: NKikimr::TEvDataShard::TEvReadScanStarted 2024-11-21T17:50:02.507568Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553237, Sender [15:1021:2820], Recipient [15:632:2537]: NKikimr::TEvDataShard::TEvReadScanFinished 2024-11-21T17:50:02.507624Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:632:2537], Recipient [15:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:02.507631Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:02.507638Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:02.507646Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:02.507651Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:50:02.507656Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:50:02.507659Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:50:02.507665Z node 15 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:50:02.507672Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 >> KqpStreamLookup::ReadTableDuringSplit [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService Test command err: Trying to start YDB, gRPC: 13985, MsgBus: 25331 2024-11-21T17:50:01.107176Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791319916432180:2161];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:01.107242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001b45/r3tmp/tmp0TOCdN/pdisk_1.dat 2024-11-21T17:50:01.169808Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13985, node 1 2024-11-21T17:50:01.196467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:01.196490Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:01.196492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:01.196537Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:50:01.208241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:01.208266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:01.209502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25331 TClient is connected to server localhost:25331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:01.259716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.261789Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.271768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.337198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.352130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.365909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:01.445685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791319916433600:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.445735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.450041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.455847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.510433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.522927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.577557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.585957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.594589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791319916434117:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.594622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.594623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791319916434122:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:01.595301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:01.599537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791319916434124:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. [raw_hash_set.h : 1598] RAW: operator-> called on invalid iterator. The element might have been erased or the table might have rehashed. Consider running with --config=asan to diagnose rehashing issues. |80.7%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleTable [GOOD] |80.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TExternalDataSourceTest::SchemeErrors >> TExternalDataSourceTest::CreateExternalDataSource >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource |80.7%| [TA] $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: 2024-11-21T17:50:01.605500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:01.606022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:01.606050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002229/r3tmp/tmpQzpWN6/pdisk_1.dat 2024-11-21T17:50:01.715214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.732721Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:01.775326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:01.775364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:01.785956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:01.889803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:02.101498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:702:2586], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:02.101529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2591], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:02.101538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:02.102465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:50:02.289891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:716:2594], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:50:03.178439Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7xgsbn2a5pg9c9tpjbtjvw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODE5OGRjNzUtYjU0YmFhMjctYzQyOTUzMzItMzdkMTE2NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:03.245400Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xgtdwdecvme2srk5msxwh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjMyYjI5ODAtYzhlMTRmNjYtOTkyNWMwM2MtODg4MjI0ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleTable [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalDataSourceTest::SchemeErrors [GOOD] >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists >> TExternalDataSourceTest::DropExternalDataSource [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: 2024-11-21T17:50:01.623485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:01.623856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:01.623874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00236b/r3tmp/tmpzsWQlh/pdisk_1.dat 2024-11-21T17:50:01.727358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.744335Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:01.787040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:01.787078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:01.797671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:01.901949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:02.122936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:750:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:02.122967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:760:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:02.123024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:02.123913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:50:02.312913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:764:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:50:03.602592Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7xgscackk5grd32h14bm9k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNkZDg1YWUtMWNlNmNjN2EtYWJiZGE4NzktZjI3YTM4MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:03.611913Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xgscackk5grd32h14bm9k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNkZDg1YWUtMWNlNmNjN2EtYWJiZGE4NzktZjI3YTM4MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:03.614486Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xgscackk5grd32h14bm9k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNkZDg1YWUtMWNlNmNjN2EtYWJiZGE4NzktZjI3YTM4MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:03.664674Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xgtvz0gjh6wweg9zngahy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQ3MTBlMjAtYmM4YTNkMDgtY2U4MzY5NTItMTM4YWFmODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR >> KqpPg::TableDeleteWhere [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:03.875899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:03.875927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.875933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:03.875938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:03.875953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:03.875958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:03.875968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.876048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:03.890567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:03.890591Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:03.899358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:03.900244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:03.900289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:03.901786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:03.902246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:03.902368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.902445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.903914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.904194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.904205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.904258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:03.904268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.904275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:03.904290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.905518Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.922190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.922258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.922319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:03.922365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:03.922373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.923434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.923469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:03.923507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.923518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:03.923522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:03.923527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:03.924359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.924373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:03.924379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:03.925538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.925550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.925556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.925561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.926142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:03.926526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:03.926566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:03.926713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.926737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.926747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.926793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:03.926800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.926826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.926848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:03.927207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.927214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.927241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.927246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:03.927311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.927317Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:03.927327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:03.927331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.927336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:03.927341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.927345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:03.927348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:03.927358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.927364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:03.927368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:03.927654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:03.927666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:03.927670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:03.927674Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:03.927679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.927693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 6: External source with type DataStream was not found, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2024-11-21T17:50:03.935932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.935964Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2024-11-21T17:50:03.935972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2024-11-21T17:50:03.936004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2024-11-21T17:50:03.936397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.936418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2024-11-21T17:50:03.936890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.936928Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2024-11-21T17:50:03.936938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2024-11-21T17:50:03.936963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2024-11-21T17:50:03.937341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.937360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2024-11-21T17:50:03.937787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.937818Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2024-11-21T17:50:03.937829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2024-11-21T17:50:03.937845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2024-11-21T17:50:03.938204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.938223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2024-11-21T17:50:03.938620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.938643Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2024-11-21T17:50:03.938651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2024-11-21T17:50:03.938662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100, at schemeshard: 72057594046678944 2024-11-21T17:50:03.939026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.939048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:03.931967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:03.931991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.931996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:03.932000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:03.932014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:03.932017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:03.932027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.932106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:03.942918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:03.942941Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:03.947138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:03.947946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:03.947984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:03.950959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:03.951105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:03.951210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.951272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.951957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.952152Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.952159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.952187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:03.952192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.952196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:03.952205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.953263Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.967234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.967308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.967367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:03.967411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:03.967417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.970214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.970251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:03.970309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.970322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:03.970326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:03.970331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:03.970958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.970975Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:03.970980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:03.971313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.971321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.971325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.971330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.971903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:03.972281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:03.972335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:03.972523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.972553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.972563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.972617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:03.972625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.972677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.972691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:03.973097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.973107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.973152Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.973158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:03.973251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.973260Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:03.973272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:03.973277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.973282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:03.973287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.973292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:03.973296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:03.973309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.973315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:03.973319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:03.973614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:03.973627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:03.973632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:03.973636Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:03.973641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.973656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... OperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.977586Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2024-11-21T17:50:03.977602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T17:50:03.977622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.977631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:50:03.977926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.977934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.977970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:03.977988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.977993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:50:03.977998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:50:03.978069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.978076Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:50:03.978086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:03.978090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:03.978095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:50:03.978100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:03.978105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:03.978109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:50:03.978120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:03.978125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:50:03.978129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T17:50:03.978133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T17:50:03.978252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:03.978265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:03.978272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:03.978277Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T17:50:03.978281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.978383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:03.978394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:03.978398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:03.978402Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T17:50:03.978405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:03.978416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:50:03.978910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:03.979068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T17:50:03.979107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:50:03.979112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:50:03.979166Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:50:03.979180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:03.979183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:296:2288] TestWaitNotification: OK eventTxId 101 2024-11-21T17:50:03.979227Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:03.979253Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 33us result status StatusSuccess 2024-11-21T17:50:03.979342Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2024-11-21T17:50:03.980097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.980145Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2024-11-21T17:50:03.980156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2024-11-21T17:50:03.980190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp:94, at schemeshard: 72057594046678944 2024-11-21T17:50:03.980604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp:94" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2024-11-21T17:50:03.980627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp:94, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T17:50:03.980662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:50:03.980666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:50:03.980704Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:03.980715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:03.980717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:304:2296] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:03.599671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:03.599690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.599693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:03.599697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:03.599713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:03.599716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:03.599725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.599802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:03.607404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:03.607428Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:03.609876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:03.610386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:03.610413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:03.612035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:03.612274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:03.612403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.612488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.613491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.613758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.613768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.613805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:03.613812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.613818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:03.613832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.615097Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.628118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.628188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.628279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:03.628331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:03.628337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.629025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.629043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:03.629083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.629090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:03.629093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:03.629097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:03.629365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.629371Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:03.629373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:03.629579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.629583Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.629587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.629593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.629948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:03.630251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:03.630294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:03.630452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.630468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.630474Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.630512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:03.630516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.630542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.630551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:03.630978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.630988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.631030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.631034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:03.631109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.631114Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:03.631123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:03.631126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.631130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:03.631133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.631137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:03.631140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:03.631149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.631154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:03.631156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:03.631362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:03.631372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:03.631375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:03.631378Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:03.631381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.631388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 03.923639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:03.923642Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:03.923647Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2024-11-21T17:50:03.923651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.923739Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:03.923747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:03.923751Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:03.923754Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T17:50:03.923757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:03.923765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T17:50:03.924090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:50:03.924119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T17:50:03.924352Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.924374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.924383Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000003 2024-11-21T17:50:03.924404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:03.924418Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T17:50:03.924446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.924454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:03.924765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:03.924791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:50:03.924878Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.924884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.924904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:03.924923Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.924929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T17:50:03.924934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:50:03.924983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.924990Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:50:03.925001Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:50:03.925004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:03.925010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T17:50:03.925015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:03.925020Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:50:03.925024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:50:03.925033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:03.925038Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T17:50:03.925041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:50:03.925045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:50:03.925103Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:03.925112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:03.925119Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:03.925122Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:50:03.925126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:03.925164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:03.925169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:50:03.925177Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.925203Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:03.925211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:03.925215Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:03.925218Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:50:03.925221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.925227Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T17:50:03.925910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:03.925935Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:50:03.925947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T17:50:03.925986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:50:03.925992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:50:03.926052Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:03.926066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:03.926070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:327:2319] TestWaitNotification: OK eventTxId 102 2024-11-21T17:50:03.926132Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:03.926153Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 32us result status StatusPathDoesNotExist 2024-11-21T17:50:03.926195Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |80.8%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleTable [GOOD] >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> DataShardSnapshots::MvccSnapshotTailCleanup >> TExternalDataSourceTest::ReadOnlyMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:03.910828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:03.910868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.910872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:03.910876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:03.910889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:03.910894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:03.910902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.910977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:03.922101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:03.922120Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:03.926461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:03.927347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:03.927382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:03.928755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:03.928919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:03.929026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.929088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.929915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.930160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.930169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.930204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:03.930211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.930217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:03.930229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.931822Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.949108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.949184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.949249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:03.949302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:03.949311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.949944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.949967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:03.950009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.950019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:03.950024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:03.950029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:03.950391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.950401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:03.950405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:03.950736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.950746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.950751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.950758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.951328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:03.951704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:03.951751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:03.951924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.951947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.951958Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.952006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:03.952013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.952041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.952053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:03.952448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.952456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.952486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.952492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:03.952559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.952566Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:03.952577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:03.952581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.952586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:03.952592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.952596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:03.952600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:03.952611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.952617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:03.952621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:03.952907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:03.952918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:03.952923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:03.952927Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:03.952932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.952945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... -21T17:50:04.218726Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:04.218730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:04.218736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:50:04.218741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:04.218745Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:04.218748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:50:04.218759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:04.218764Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:50:04.218768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:50:04.218773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T17:50:04.218938Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:04.218950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:04.218955Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:04.218960Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:50:04.218964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:04.219170Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:04.219179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:04.219185Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:04.219197Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T17:50:04.219204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:04.219213Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:50:04.219530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:04.219719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T17:50:04.219766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:50:04.219773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:50:04.219827Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:50:04.219842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:04.219848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:302:2294] TestWaitNotification: OK eventTxId 101 2024-11-21T17:50:04.219907Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:04.219934Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 35us result status StatusSuccess 2024-11-21T17:50:04.220001Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2024-11-21T17:50:04.220569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:04.220608Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2024-11-21T17:50:04.220618Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2024-11-21T17:50:04.220638Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100, at schemeshard: 72057594046678944 2024-11-21T17:50:04.221116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2024-11-21T17:50:04.221143Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T17:50:04.221185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:50:04.221191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:50:04.221236Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:04.221250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:04.221254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:310:2302] TestWaitNotification: OK eventTxId 102 2024-11-21T17:50:04.221304Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:04.221323Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 23us result status StatusSuccess 2024-11-21T17:50:04.221376Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere [GOOD] Test command err: Trying to start YDB, gRPC: 5192, MsgBus: 20714 2024-11-21T17:49:44.107270Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791248136556845:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:44.107461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a7e/r3tmp/tmpjuaDCG/pdisk_1.dat 2024-11-21T17:49:44.129917Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5192, node 1 2024-11-21T17:49:44.144444Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:44.144454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:44.144455Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:44.144483Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20714 TClient is connected to server localhost:20714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:44.207628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:44.207664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:44.208789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:44.209995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:44.423494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.484614Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg17_b (key, value) VALUES ( 'bytea 0'::bytea, 'bytea 0'::bytea ) 2024-11-21T17:49:44.486401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791248136557553:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.486404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791248136557542:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.486415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:44.487032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:49:44.491146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791248136557556:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } --!syntax_pg INSERT INTO Pg17_b (key, value) VALUES ( 'bytea 1'::bytea, 'bytea 1'::bytea ) --!syntax_pg INSERT INTO Pg17_b (key, value) VALUES ( 'bytea 2'::bytea, 'bytea 2'::bytea ) \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 2024-11-21T17:49:44.668323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.675517Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1001_b (key, value) VALUES ( '0'::int2, '{a0, b10}'::_bytea ) --!syntax_pg INSERT INTO Pg1001_b (key, value) VALUES ( '1'::int2, '{a1, b11}'::_bytea ) --!syntax_pg INSERT INTO Pg1001_b (key, value) VALUES ( '2'::int2, '{a2, b12}'::_bytea ) {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} 16 2024-11-21T17:49:44.767224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.774444Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg16_b (key, value) VALUES ( 'false'::bool, 'false'::bool ) --!syntax_pg INSERT INTO Pg16_b (key, value) VALUES ( 'true'::bool, 'true'::bool ) f f t t 18 2024-11-21T17:49:44.841827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.848351Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg18_b (key, value) VALUES ( '0'::"char", '0'::"char" ) --!syntax_pg INSERT INTO Pg18_b (key, value) VALUES ( '1'::"char", '1'::"char" ) --!syntax_pg INSERT INTO Pg18_b (key, value) VALUES ( '2'::"char", '2'::"char" ) 0 0 1 1 2 2 21 2024-11-21T17:49:44.929084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T17:49:44.935806Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg21_b (key, value) VALUES ( '0'::int2, '0'::int2 ) --!syntax_pg INSERT INTO Pg21_b (key, value) VALUES ( '1'::int2, '1'::int2 ) --!syntax_pg INSERT INTO Pg21_b (key, value) VALUES ( '2'::int2, '2'::int2 ) 0 0 1 1 2 2 23 2024-11-21T17:49:45.034285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T17:49:45.042042Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg23_b (key, value) VALUES ( '0'::int4, '0'::int4 ) --!syntax_pg INSERT INTO Pg23_b (key, value) VALUES ( '1'::int4, '1'::int4 ) --!syntax_pg INSERT INTO Pg23_b (key, value) VALUES ( '2'::int4, '2'::int4 ) 0 0 1 1 2 2 20 2024-11-21T17:49:45.138555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715729:0, at schemeshard: 72057594046644480 2024-11-21T17:49:45.145723Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg20_b (key, value) VALUES ( '0'::int8, '0'::int8 ) --!syntax_pg INSERT INTO Pg20_b (key, value) VALUES ( '1'::int8, '1'::int8 ) --!syntax_pg INSERT INTO Pg20_b (key, value) VALUES ( '2'::int8, '2'::int8 ) 0 0 1 1 2 2 700 2024-11-21T17:49:45.243889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715741:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg700_b (key, value) VALUES ( '0.5'::float4, '0.5'::float4 ) 2024-11-21T17:49:45.252559Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg700_b (key, value) VALUES ( '1.5'::float4, '1.5'::float4 ) --!syntax_pg INSERT INTO Pg700_b (key, value) VALUES ( '2.5'::float4, '2.5'::float4 ) 0.5 0.5 1.5 1.5 2.5 2.5 701 2024-11-21T17:49:45.365276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715753:0, at schemeshard: 72057594046644480 2024-11-21T17:49:45.372129Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg701_b (key, value) VALUES ( '0.5'::float8, '0.5'::float8 ) --!syntax_pg INSERT INTO Pg701_b (key, value) VALUES ( '1.5'::float8, '1.5'::float8 ) --!syntax_pg INSERT INTO Pg701_b (key, value) VALUES ( '2.5'::float8, '2.5'::float8 ) 0.5 0.5 1.5 1.5 2.5 2.5 25 2024-11-21T17:49:45.467157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715765:0, at schemeshard: 72057594046644480 2024-11-21T17:49:45.474000Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg25_b (key, value) VALUES ( 'text 0'::text, 'text 0'::text ) --!syntax_pg INSERT INTO Pg25_b (key, value) VALUES ( 'text 1'::text, 'text 1'::text ) --!syntax_pg INSERT INTO Pg25_b (key, value) VALUES ( 'text 2'::text, 'text 2'::text ) text 0 text 0 text 1 text 1 text 2 text 2 1042 2024-11-21T17:49:45.571713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715777:0, at schemeshard: 72057594046644480 2024-11-21T17:49:45.578122Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1042_b (key, value) VALUES ( 'bpchar 0'::bpchar, 'bpchar 0'::bpchar ) --!syntax_pg INSERT INTO Pg1042_b (key, value) VALUES ( 'bpchar 1'::bpchar, 'bpchar 1':: ... 24-11-21T17:50:03.340893Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715829:0, at schemeshard: 72057594046644480 628 2024-11-21T17:50:03.352637Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.354543Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715831:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.369252Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.370993Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715833:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.429398Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 601 2024-11-21T17:50:03.431495Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715835:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.441166Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.443021Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715837:0, at schemeshard: 72057594046644480 603 2024-11-21T17:50:03.451350Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.453232Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715839:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.464563Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.466439Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715841:0, at schemeshard: 72057594046644480 602 2024-11-21T17:50:03.523914Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.525791Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715843:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.533646Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.535367Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715845:0, at schemeshard: 72057594046644480 604 2024-11-21T17:50:03.547820Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.549312Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715847:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.562821Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.564384Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715849:0, at schemeshard: 72057594046644480 718 2024-11-21T17:50:03.577281Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.579018Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715851:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.590589Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.592376Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715853:0, at schemeshard: 72057594046644480 869 2024-11-21T17:50:03.605804Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.607670Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715855:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.618056Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.619633Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715857:0, at schemeshard: 72057594046644480 650 2024-11-21T17:50:03.633120Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.634916Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715859:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.647431Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.649581Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715861:0, at schemeshard: 72057594046644480 829 2024-11-21T17:50:03.664175Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.666195Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715863:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.723581Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.725700Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715865:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.738745Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 774 2024-11-21T17:50:03.741219Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715867:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.753569Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.756201Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715869:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.768976Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2950 2024-11-21T17:50:03.771412Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715871:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.792014Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.794348Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715873:0, at schemeshard: 72057594046644480 114 2024-11-21T17:50:03.806377Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.809721Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715875:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.821011Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.822875Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715877:0, at schemeshard: 72057594046644480 3802 2024-11-21T17:50:03.890325Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.894248Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715879:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.960913Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.963717Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715881:0, at schemeshard: 72057594046644480 4072 2024-11-21T17:50:03.977912Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:03.980884Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715883:0, at schemeshard: 72057594046644480 2024-11-21T17:50:04.040995Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:04.044834Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715885:0, at schemeshard: 72057594046644480 142 2024-11-21T17:50:04.055214Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:04.057668Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715887:0, at schemeshard: 72057594046644480 2024-11-21T17:50:04.068180Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:04.070750Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715889:0, at schemeshard: 72057594046644480 3615 2024-11-21T17:50:04.081181Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:04.082987Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715891:0, at schemeshard: 72057594046644480 2024-11-21T17:50:04.103366Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:04.111508Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715893:0, at schemeshard: 72057594046644480 3614 2024-11-21T17:50:04.128465Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:04.130812Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715895:0, at schemeshard: 72057594046644480 2024-11-21T17:50:04.147142Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:04.149068Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715897:0, at schemeshard: 72057594046644480 22 2024-11-21T17:50:04.160117Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T17:50:04.162500Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715899:0, at schemeshard: 72057594046644480 2024-11-21T17:50:04.176376Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715900:0, at schemeshard: 72057594046644480 2024-11-21T17:50:04.187203Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpPg::V1CreateTable [GOOD] >> KqpPg::TempTablesSessionsIsolation >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:04.573062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:04.573088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:04.573094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:04.573098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:04.573117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:04.573121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:04.573129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:04.573208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:04.583631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:04.583652Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:04.587229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:04.587973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:04.588003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:04.589374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:04.589529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:04.589644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:04.589713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:04.590542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:04.590785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:04.590795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:04.590831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:04.590853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:04.590859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:04.590873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.592063Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:04.607935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:04.608022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.608091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:04.608140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:04.608147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.608864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:04.608899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:04.608944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.608954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:04.608959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:04.608964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:04.609319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.609331Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:04.609336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:04.609615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.609622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.609628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:04.609634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:04.610154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:04.610486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:04.610534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:04.610693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:04.610713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:04.610721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:04.610767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:04.610774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:04.610800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:04.610812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:04.611161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:04.611168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:04.611202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:04.611207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:04.611280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.611287Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:04.611297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:04.611301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:04.611307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:04.611312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:04.611316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:04.611320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:04.611329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:04.611335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:04.611339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:04.611618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:04.611631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:04.611635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:04.611640Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:04.611645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:04.611660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... SHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:04.619346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:04.619350Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2024-11-21T17:50:04.619354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:04.619401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:04.619408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:04.619412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:04.619418Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:50:04.619421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:04.619428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T17:50:04.619902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:50:04.619926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T17:50:04.620006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:04.620022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:04.620029Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000003 2024-11-21T17:50:04.620044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T17:50:04.620064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:04.620071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:04.620140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:04.620156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:50:04.620433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:04.620439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:04.620458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:04.620467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:04.620477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:04.620481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T17:50:04.620485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:50:04.620489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:50:04.620540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.620548Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:50:04.620557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:50:04.620561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:04.620567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T17:50:04.620571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:04.620578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:50:04.620581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:50:04.620591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:04.620596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T17:50:04.620600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:50:04.620604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:50:04.620692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:04.620700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:04.620704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:04.620708Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:50:04.620713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:04.620912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:04.620921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:04.620925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:04.620928Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:50:04.620931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:04.620940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T17:50:04.621260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:04.621438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T17:50:04.621476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:50:04.621484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:50:04.621537Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:04.621551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:04.621555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:329:2321] TestWaitNotification: OK eventTxId 102 2024-11-21T17:50:04.621611Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:04.621632Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 29us result status StatusSuccess 2024-11-21T17:50:04.621690Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::ShouldSkipDroppedColumn >> TExternalDataSourceTest::ReadOnlyMode [GOOD] >> TDataShardTrace::TestTraceDistributedUpsert [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:04.920309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:04.920336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:04.920342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:04.920346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:04.920360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:04.920364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:04.920372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:04.920449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:04.930490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:04.930510Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:04.933970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:04.934868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:04.934904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:04.936053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:04.936215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:04.936340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:04.936413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:04.937674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:04.937843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:04.937849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:04.937871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:04.937875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:04.937879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:04.937888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.938688Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:04.955819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:04.955878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.955933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:04.955972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:04.955978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.957586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:04.957635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:04.957688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.957701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:04.957705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:04.957710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:04.958340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.958355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:04.958361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:04.959034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.959079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.959087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:04.959097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:04.959821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:04.960513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:04.960581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:04.960808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:04.960841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:04.960854Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:04.960923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:04.960931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:04.960971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:04.960988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:04.961759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:04.961768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:04.961814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:04.961817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:04.961898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.961905Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:04.961915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:04.961919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:04.961923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:04.961927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:04.961931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:04.961933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:04.961944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:04.961949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:04.961952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:04.962241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:04.962253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:04.962256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:04.962260Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:04.962263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:04.962275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 0:04.980122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:04.980124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:50:04.980127Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2024-11-21T17:50:04.980129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:04.980188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:04.980193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:04.980195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:50:04.980197Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T17:50:04.980199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:04.980204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T17:50:04.980670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2024-11-21T17:50:04.980704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2024-11-21T17:50:04.980802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:04.980821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:04.980828Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2024-11-21T17:50:04.980845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:04.980858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T17:50:04.980879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:04.980886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:04.980991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:50:04.981308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2024-11-21T17:50:04.981528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:04.981533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:04.981549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:04.981562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:04.981564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 1 2024-11-21T17:50:04.981566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T17:50:04.981587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.981592Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T17:50:04.981600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T17:50:04.981602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:50:04.981606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T17:50:04.981609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:50:04.981613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:50:04.981615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:50:04.981624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:04.981627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2024-11-21T17:50:04.981629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:50:04.981631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:50:04.981677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:04.981682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:04.981685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:50:04.981687Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:50:04.981689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:04.981717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:04.981720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:50:04.981725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:04.981764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:04.981769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:04.981771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:50:04.981773Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:50:04.981775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:04.981779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2024-11-21T17:50:04.982456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:50:04.982491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:50:04.982502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T17:50:04.982555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T17:50:04.982565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T17:50:04.982700Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:50:04.982720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:50:04.982725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:388:2380] TestWaitNotification: OK eventTxId 104 2024-11-21T17:50:04.982804Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:04.982830Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 38us result status StatusPathDoesNotExist 2024-11-21T17:50:04.982879Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: 2024-11-21T17:50:01.925903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:01.926341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:01.926368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e97/r3tmp/tmptK2P1v/pdisk_1.dat 2024-11-21T17:50:02.030043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:02.045990Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:02.088354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:02.088397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:02.099028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:02.202565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 >> KqpPg::TempTablesSessionsIsolation [GOOD] >> KqpPg::TempTablesDrop |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert [GOOD] >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative Test command err: 2024-11-21T17:50:01.925859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:01.926179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:01.926193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ea6/r3tmp/tmp9VFpo7/pdisk_1.dat 2024-11-21T17:50:02.021664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:02.040116Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:02.082794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:02.082853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:02.093433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:02.197833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:04.872727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:895:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:04.872761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:906:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:04.872775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:04.873758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:50:04.886715Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T17:50:05.085239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:909:2749], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:50:05.163056Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xgw287aww494m1ybqvknd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJiODkxOGQtNWJkNmRmZDEtZTFhODE5NWEtZmIzOGRjMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType >> TDataShardTrace::TestTraceDistributedSelect [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:04.889583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:04.889605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:04.889609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:04.889612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:04.889624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:04.889627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:04.889633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:04.889691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:04.899675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:04.899696Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:04.902343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:04.903160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:04.903192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:04.904646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:04.904908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:04.905030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:04.905124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:04.906237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:04.906499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:04.906510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:04.906547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:04.906554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:04.906560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:04.906573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.907959Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:04.924846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:04.924915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.924977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:04.925023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:04.925031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.926506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:04.926541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:04.926584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.926594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:04.926598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:04.926603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:04.927448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.927460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:04.927465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:04.927816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.927825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.927831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:04.927838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:04.928471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:04.928846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:04.928890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:04.929049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:04.929071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:04.929079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:04.929125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:04.929131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:04.929158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:04.929169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:04.929541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:04.929548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:04.929584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:04.929589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:04.929662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:04.929668Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:04.929678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:04.929683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:04.929688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:04.929693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:04.929697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:04.929701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:04.929711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:04.929717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:04.929720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:04.930000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:04.930012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:04.930016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:04.930021Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:04.930025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:04.930039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 46678944 2024-11-21T17:50:05.041218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 0 2024-11-21T17:50:05.041231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:05.041295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:05.041305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:05.048596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAccepted TxId: 128 SchemeshardId: 72057594046678944 PathId: 4, at schemeshard: 72057594046678944 2024-11-21T17:50:05.048653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2024-11-21T17:50:05.048723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:05.048731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:05.048774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:05.048796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:05.048805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:449:2410], at schemeshard: 72057594046678944, txId: 128, path id: 1 2024-11-21T17:50:05.048810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:449:2410], at schemeshard: 72057594046678944, txId: 128, path id: 4 2024-11-21T17:50:05.048822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.048831Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#128:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:05.048842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 128 ready parts: 1/1 2024-11-21T17:50:05.048874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 128 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:05.049073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T17:50:05.049084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T17:50:05.049088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2024-11-21T17:50:05.049093Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T17:50:05.049098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:05.049266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T17:50:05.049278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T17:50:05.049281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2024-11-21T17:50:05.049284Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2024-11-21T17:50:05.049288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:05.049298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 0/1, is published: true 2024-11-21T17:50:05.056776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 128:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:128 msg type: 269090816 2024-11-21T17:50:05.056838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 128, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:50:05.056987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 FAKE_COORDINATOR: Add transaction: 128 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 128 at step: 5000004 2024-11-21T17:50:05.057487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2024-11-21T17:50:05.057552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:05.057588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 128 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:05.057598Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#128:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T17:50:05.057645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 128:0 128 -> 240 2024-11-21T17:50:05.057689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:05.057703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 FAKE_COORDINATOR: Erasing txId 128 2024-11-21T17:50:05.058224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:05.058234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:05.058276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:05.058292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:05.058298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:449:2410], at schemeshard: 72057594046678944, txId: 128, path id: 1 2024-11-21T17:50:05.058303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:449:2410], at schemeshard: 72057594046678944, txId: 128, path id: 4 2024-11-21T17:50:05.058373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.058382Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 128:0 ProgressState 2024-11-21T17:50:05.058394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2024-11-21T17:50:05.058400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2024-11-21T17:50:05.058405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 1/1, is published: false 2024-11-21T17:50:05.058411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2024-11-21T17:50:05.058415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 128:0 2024-11-21T17:50:05.058420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 128:0 2024-11-21T17:50:05.058432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:05.058439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 128, publications: 2, subscribers: 0 2024-11-21T17:50:05.058443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T17:50:05.058447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2024-11-21T17:50:05.058557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T17:50:05.058569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T17:50:05.058576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 128 2024-11-21T17:50:05.058581Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T17:50:05.058585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:05.058689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T17:50:05.058699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2024-11-21T17:50:05.058702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 128 2024-11-21T17:50:05.058706Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T17:50:05.058709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:05.058718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 128, subscribers: 0 2024-11-21T17:50:05.059401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2024-11-21T17:50:05.059425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 TestModificationResult got TxId: 128, wait until txId: 128 |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: 2024-11-21T17:50:01.956656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:01.957198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:01.957224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e8c/r3tmp/tmpbusEGH/pdisk_1.dat 2024-11-21T17:50:02.056221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:02.073141Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:02.115685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:02.115724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:02.126295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:02.230529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:04.927263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:895:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:04.927294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:906:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:04.927308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:04.928145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:50:04.940897Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T17:50:05.140134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:909:2749], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:50:05.216843Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xgw3yfxg36pn3caaznhdr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjE0MzhiZDQtZWNlZjkxOWMtOTQ3MGM0ZGYtYzc1Zjc4MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.251205Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xgwdgf7ttevc9tbt0cms4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzMyMDFlMmUtNzE4OTUxMWMtNDU5MGU0NTQtYTYzNmEzODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.461002Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xgwjmfhcb9hrapcwwewx5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTgxM2Q1OGItMzk4Njg4OWQtMTY1OTk1ZWYtMmI5MjZkZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpPg::TempTablesDrop [GOOD] >> KqpPg::TempTablesWithCache |80.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:05.891891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:05.891924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:05.891930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:05.891936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:05.891943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:05.891947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:05.891957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:05.892047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:05.902661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:05.902680Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:05.909173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:05.909971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:05.910009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:05.911601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:05.911990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:05.912097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:05.912175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:05.913104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:05.913361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:05.913372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:05.913405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:05.913411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:05.913418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:05.913430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.914880Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:05.931003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:05.931074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.931125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:05.931165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:05.931172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.931828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:05.931864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:05.931905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.931915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:05.931919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:05.931924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:05.932340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.932351Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:05.932356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:05.932681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.932701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.932707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:05.932714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:05.933239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:05.933729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:05.933774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:05.933929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:05.933952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:05.933962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:05.934014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:05.934020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:05.934048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:05.934059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:05.934405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:05.934413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:05.934456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:05.934461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:05.934543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.934549Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:05.934560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:05.934564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:05.934570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:05.934575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:05.934580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:05.934583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:05.934593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:05.934599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:05.934602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:05.934883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:05.934897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:05.934902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:05.934907Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:05.934911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:05.934924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:05.935453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:05.935547Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:05.935807Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:05.936894Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:05.937380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:05.937452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.937465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2024-11-21T17:50:05.937560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2024-11-21T17:50:05.937687Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:05.938192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:05.938214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T17:50:05.938290Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable |80.8%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |80.8%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |80.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] >> TSchemeShardTTLTestsWithReboots::AlterTable >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: 2024-11-21T17:50:03.270482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:03.270987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:03.271019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e6c/r3tmp/tmpAnekdw/pdisk_1.dat 2024-11-21T17:50:03.375377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:03.393643Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:03.436517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:03.436566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:03.447154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:03.551000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:06.280558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:895:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:06.280584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:906:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:06.280596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:06.281323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:50:06.293778Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T17:50:06.490542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:909:2749], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:50:06.563760Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xgxe8a089zbvxxyw8d4jy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ZmMTBhYWEtMzliZDdjMDEtZDA3MjJmY2YtNGVjZjUzNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:06.589055Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xgxqhbjgmgv7dkqpqfyn5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdhMGM3YzktZTMxNjBlNDQtMmE4MjE3NTYtYTk3YzUwN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:06.624547Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xgxr9ahxbsarr0z1b8bvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTliYTliN2QtNTZlNTNjMzUtMTkxMTI4MWUtYzc0ZGM3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:06.735372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:06.735402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:06.735407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:06.735414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:06.735420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:06.735424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:06.735433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:06.735519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:06.747092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:06.747118Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:06.751216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:06.752082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:06.752121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:06.753531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:06.753684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:06.753786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:06.753860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:06.754689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:06.754989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:06.754999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:06.755039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:06.755047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:06.755053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:06.755071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.756341Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:06.774576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:06.774658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.774732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:06.774778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:06.774786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.775686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:06.775716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:06.775782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.775794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:06.775799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:06.775803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:06.776205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.776215Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:06.776220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:06.776612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.776622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.776628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:06.776636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:06.777254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:06.777650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:06.777708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:06.777922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:06.777949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:06.777959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:06.778013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:06.778019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:06.778052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:06.778065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:06.778535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:06.778544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:06.778591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:06.778597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:06.778686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.778694Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:06.778706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:06.778711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:06.778718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:06.778724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:06.778729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:06.778733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:06.778744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:06.778751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:06.778755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:06.779052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:06.779066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:06.779072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:06.779078Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:06.779082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:06.779096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ndRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:06.855228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 176 } } 2024-11-21T17:50:06.855238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 176 } } 2024-11-21T17:50:06.855364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:06.855369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2024-11-21T17:50:06.855397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:06.855404Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:06.855413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:06.855426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:06.855430Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T17:50:06.855435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:06.855443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2024-11-21T17:50:06.855650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:06.855657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:06.855668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:06.855672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:06.855680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:06.855686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:06.855690Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.855693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:06.855697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T17:50:06.856554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:06.856574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:06.856584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:06.857478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:06.857514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T17:50:06.857541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.857567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T17:50:06.857634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.857670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T17:50:06.857680Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2024-11-21T17:50:06.857697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2024-11-21T17:50:06.857703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2024-11-21T17:50:06.857710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2024-11-21T17:50:06.857755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.857762Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:50:06.857769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2024-11-21T17:50:06.857772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T17:50:06.857776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2024-11-21T17:50:06.857795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 101 2024-11-21T17:50:06.857800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T17:50:06.857806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:06.857810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:50:06.857838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:06.857843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2024-11-21T17:50:06.857846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2024-11-21T17:50:06.857849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:50:06.857852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2024-11-21T17:50:06.857855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2024-11-21T17:50:06.857861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:06.858355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:06.858365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:383:2348] TestWaitNotification: OK eventTxId 101 2024-11-21T17:50:06.858498Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:06.858559Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 71us result status StatusSuccess 2024-11-21T17:50:06.858692Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed >> KqpPg::TempTablesWithCache [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:07.014908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:07.014942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:07.014947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:07.014952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:07.014959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:07.014963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:07.014972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:07.015067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:07.027931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:07.027959Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:07.030602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:07.031170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:07.031200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:07.032410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:07.033021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:07.033148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.033237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:07.034125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.034397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:07.034407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.034445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:07.034451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:07.034458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:07.034473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.035754Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:07.052407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:07.052501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.052575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:07.052620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:07.052628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.053469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.053501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:07.053545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.053553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:07.053558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:07.053563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:07.053937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.053949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:07.053953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:07.054251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.054259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.054263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.054267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.054731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:07.055367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:07.055414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:07.055552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.055571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:07.055579Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.055620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:07.055624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.055649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:07.055658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:07.056082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:07.056091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:07.056137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.056142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:07.056213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.056221Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:07.056249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:07.056254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.056260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:07.056265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.056269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:07.056273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:07.056285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:07.056291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:07.056295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:07.056578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:07.056593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:07.056598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:07.056603Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:07.056609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:07.056623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:07.057051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:07.057131Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:07.057365Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:07.058520Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:07.059220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:07.059299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.059311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2024-11-21T17:50:07.059420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2024-11-21T17:50:07.059617Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:07.060380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:07.060411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T17:50:07.060530Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccess |80.9%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |80.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[TabletReboots] [GOOD] |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |80.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |80.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] >> KqpQueryService::DdlColumnTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/pg/unittest >> KqpPg::TempTablesWithCache [GOOD] Test command err: Trying to start YDB, gRPC: 25035, MsgBus: 8325 2024-11-21T17:49:43.792810Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791241266306976:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:43.792829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b4a/r3tmp/tmpXI9kXg/pdisk_1.dat 2024-11-21T17:49:43.853521Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25035, node 1 2024-11-21T17:49:43.869731Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:43.869745Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:43.869747Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:43.869782Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8325 2024-11-21T17:49:43.893267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:43.893297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:43.894357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:43.931554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:44.127582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229'Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: value too long for type character(2) 2024-11-21T17:49:44.198844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465'Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: value too long for type character(2) 2024-11-21T17:49:44.209772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 abcd 2024-11-21T17:49:44.237884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 {abcd,abcd} 2024-11-21T17:49:44.264152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 abcd 2024-11-21T17:49:44.293313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 {"abcd ","abcd "} 2024-11-21T17:49:44.322783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229'Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: value too long for type character varying(2) 2024-11-21T17:49:44.335991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465'Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: value too long for type character varying(2) 2024-11-21T17:49:44.350789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 abcd 2024-11-21T17:49:44.418498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 {abcd,abcd} 2024-11-21T17:49:44.446644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 abcd 2024-11-21T17:49:44.475099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 {abcd,abcd} 2024-11-21T17:49:44.502760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336'Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string length 4 does not match type bit(2) 2024-11-21T17:49:44.518260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517'Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string length 4 does not match type bit(2) 2024-11-21T17:49:44.531235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 1111 2024-11-21T17:49:44.558700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 {1111,1111} 2024-11-21T17:49:44.589361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336'Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string length 4 does not match type bit(6) 2024-11-21T17:49:44.605730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_10103374131519304989_11087201080355820517'Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string length 4 does not match type bit(6) 2024-11-21T17:49:44.618816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarbit_17472595041006102391_5866627432374416336'Unable to coerce value for pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string too long for type bit varying(2) 2024-11-21T17:49:44.630272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarbit_17472595041006102391_11087201080355820517'Unable to coerce value for _pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: bit string too long for type bit varying(2) 2024-11-21T17:49:44.645253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 1111 2024-11-21T17:49:44.665240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 {1111,1111} 2024-11-21T17:49:44.691619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 1111 2024-11-21T17:49:44.720749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 {1111,1111} 2024-11-21T17:49:44.750466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgnumeric_17472595041006102391_7644398022171395976'Unable to coerce value for pgnumeric: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:41: ERROR: numeric field overflow DETAIL: A field with precision 2, scale 0 must round to an absolute value less than 10^2. 2024-11-21T17:49:44.762418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711 ... 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:05.664464Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:05.665595Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:50:05.669834Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:05.859218Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791336871542542:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:05.859239Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:05.859372Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791336871542554:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:05.860277Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:50:05.862840Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:50:05.862933Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791336871542556:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:50:05.937722Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2024-11-21T17:50:05.939307Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:1, at schemeshard: 72057594046644480 2024-11-21T17:50:05.974880Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037888 not found 2024-11-21T17:50:05.976596Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439791336871542861:2333], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:50:05.976675Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=YzNmY2YwZC01M2VmZDEzYS02NzQxZjYyZi0xMTRkNjEwMw==, ActorId: [6:7439791336871542538:2297], ActorState: ExecuteState, TraceId: 01jd7xgx4p6d79pb69cd1t48pq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:50:05.989548Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7439791336871542878:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:50:05.989647Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=MmI1ZWQ1ZDctMzFhYmM1ZTAtZThmNzcxMTMtOGRhZDY1NGY=, ActorId: [6:7439791336871542874:2337], ActorState: ExecuteState, TraceId: 01jd7xgx52aghq2mw26vb6mtzt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 11324, MsgBus: 11281 2024-11-21T17:50:06.269965Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439791340022905914:2060];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b4a/r3tmp/tmpyErGKV/pdisk_1.dat 2024-11-21T17:50:06.273093Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:50:06.282608Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11324, node 7 2024-11-21T17:50:06.298844Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:06.298870Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:06.298872Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:06.298918Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11281 TClient is connected to server localhost:11281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:06.370090Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:06.370128Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:06.371209Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:06.371421Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:06.619661Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791340022906499:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:06.619714Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:06.619907Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791340022906511:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:06.623946Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:50:06.626612Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:50:06.626689Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439791340022906513:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:50:06.699897Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:50:06.795028Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2024-11-21T17:50:06.795199Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:06.894766Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:06.950211Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:1, at schemeshard: 72057594046644480 2024-11-21T17:50:07.050826Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2024-11-21T17:50:07.067503Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2024-11-21T17:50:07.071969Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7439791344317874582:2414], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:50:07.072438Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=N2QzZjAyOS0yNjQ5ZWQ3ZS03MGNiMmU2ZC1jMTkzMWEyMw==, ActorId: [7:7439791344317874580:2413], ActorState: ExecuteState, TraceId: 01jd7xgy6x2n5aj6972gqy5e6q, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn >> KqpQueryService::DdlCache >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:05.863381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:05.863410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:05.863415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:05.863421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:05.863427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:05.863430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:05.863439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:05.863530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:05.871402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:05.871425Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:05.874147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:05.874842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:05.874890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:05.876154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:05.876331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:05.876413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:05.876467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:05.877496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:05.877769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:05.877780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:05.877820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:05.877827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:05.877834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:05.877849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.879163Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:05.895272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:05.895350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.895413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:05.895455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:05.895463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.901060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:05.901115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:05.901200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.901213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:05.901218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:05.901224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:05.901895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.901910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:05.901916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:05.902242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.902251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.902256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:05.902262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:05.902798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:05.903140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:05.903178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:05.903298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:05.903315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:05.903322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:05.903359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:05.903363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:05.903386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:05.903394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:05.903666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:05.903671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:05.903706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:05.903710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:05.903761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.903766Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:05.903774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:05.903777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:05.903780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:05.903784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:05.903787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:05.903790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:05.903797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:05.903801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:05.903804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:05.904002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:05.904011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:05.904014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:05.904017Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:05.904033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:05.904042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 530436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.530459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.530482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.530501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.530747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.530773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.530789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.530806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.530819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.530835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.530854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.530884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.532350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.532406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.532427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.532441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.532458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.532494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.532507Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:50:07.532532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:07.532537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:07.532545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T17:50:07.532572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2607:3874] message: TxId: 101 2024-11-21T17:50:07.532585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:07.532599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:07.532604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:50:07.532875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2024-11-21T17:50:07.533081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.533916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:07.533927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:2608:3875] TestWaitNotification: OK eventTxId 101 2024-11-21T17:50:07.534099Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:07.534214Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 126us result status StatusSuccess 2024-11-21T17:50:07.534425Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" ColumnCodec: ColumnCodecPlain } NextColumnFamilyId: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2024-11-21T17:50:07.535331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:07.535373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.535457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2024-11-21T17:50:07.540823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:07.540889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:07.600044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:07.600070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:07.600075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:07.600084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:07.600091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:07.600094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:07.600103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:07.600199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:07.611953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:07.611978Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:07.615063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:07.615772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:07.615814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:07.617005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:07.617188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:07.617298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.617379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:07.618293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.618560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:07.618574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.618612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:07.618621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:07.618627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:07.618643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.619922Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:07.635734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:07.635814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.635878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:07.635920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:07.635929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.636826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.636863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:07.636918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.636929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:07.636934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:07.636938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:07.637364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.637374Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:07.637379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:07.637686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.637693Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.637698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.637705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.638292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:07.638699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:07.638753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:07.638938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.638964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:07.638970Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.639025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:07.639032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.639062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:07.639074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:07.639444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:07.639452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:07.639496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.639501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:07.639577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.639584Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:07.639598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:07.639602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.639608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:07.639613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.639617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:07.639621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:07.639631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:07.639636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:07.639641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:07.639903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:07.639913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:07.639918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:07.639923Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:07.639927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:07.639939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Builder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:50:07.850143Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:381:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:50:07.850195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2024-11-21T17:50:07.850222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:50:07.850252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T17:50:07.850258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2024-11-21T17:50:07.850263Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T17:50:07.850318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.850339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:07.850345Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T17:50:07.850351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T17:50:07.850900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.850915Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T17:50:07.850927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T17:50:07.850930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T17:50:07.850935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T17:50:07.850946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:121:2147] message: TxId: 281474976710760 2024-11-21T17:50:07.850952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T17:50:07.850956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T17:50:07.850960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T17:50:07.850973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T17:50:07.851332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T17:50:07.851346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T17:50:07.851355Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T17:50:07.851367Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:381:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:50:07.855085Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:50:07.855116Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:381:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:50:07.855127Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T17:50:07.855582Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:50:07.855606Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:381:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:50:07.855611Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T17:50:07.855631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:07.855636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:468:2432] TestWaitNotification: OK eventTxId 102 2024-11-21T17:50:07.855753Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:07.855832Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 95us result status StatusSuccess 2024-11-21T17:50:07.855948Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:13.437958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:13.437977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:13.437981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:13.437985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:13.437997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:13.438001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:13.438009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:13.438084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:13.447195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:13.447211Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:13.448923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:13.449018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:13.449040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:13.451655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:13.451738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:13.451838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:13.452043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:13.452560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:13.452779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:13.452790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:13.452801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:13.452807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:13.452812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:13.452847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:13.454112Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:13.468030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:13.468096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:13.468138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:13.468169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:13.468174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:13.468725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:13.468748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:13.468786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:13.468792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:13.468795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:13.468798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:13.469131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:13.469139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:13.469141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:13.469427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:13.469437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:13.469442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:13.469448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:13.469995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:13.470422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:13.470479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:13.470662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:13.470683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:13.470690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:13.470746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:13.470754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:13.470781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:13.470792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:13.471246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:13.471256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:13.471281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:13.471286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:13.471340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:13.471346Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:13.471354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:13.471358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:13.471364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:13.471369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:13.471373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:13.471377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:13.471387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:13.471392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:13.471396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 16 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:07.286013Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:50:07.286076Z node 46 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 76us result status StatusSuccess 2024-11-21T17:50:07.286254Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:07.296608Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1010:2783] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T17:50:07.296636Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1011:2783] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T17:50:07.296651Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:932:2783] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2024-11-21T17:50:07.296666Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:932:2783] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2024-11-21T17:50:07.296692Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1010:2783] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732211407282751 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732211407282751 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T17:50:07.296735Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1011:2783] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1732211407282751 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T17:50:07.301191Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1010:2783] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2024-11-21T17:50:07.301959Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:932:2783] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2024-11-21T17:50:07.302253Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1011:2783] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T17:50:07.302271Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:932:2783] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:07.774966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:07.774995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:07.775000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:07.775006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:07.775011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:07.775015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:07.775024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:07.775111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:07.789440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:07.789461Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:07.793928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:07.794788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:07.794827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:07.796212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:07.796698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:07.796817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.796882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:07.797746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.798024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:07.798033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.798073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:07.798080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:07.798086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:07.798097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.799311Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:07.817860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:07.817941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.818012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:07.818062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:07.818071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.818852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.818905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:07.818957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.818968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:07.818973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:07.818978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:07.819366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.819376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:07.819380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:07.819636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.819643Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.819648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.819655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.820369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:07.821080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:07.821157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:07.821428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.821454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:07.821503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.821566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:07.821572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.821600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:07.821613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:07.828469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:07.828486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:07.828544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.828549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:07.828648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.828657Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:07.828674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:07.828683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.828690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:07.828697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.828702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:07.828706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:07.828727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:07.828733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:07.828737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:07.829173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:07.829184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:07.829189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:07.829194Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:07.829199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:07.829214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... t { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 5000004 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:07.981712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2024-11-21T17:50:07.981755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 104 at step: 5000004 2024-11-21T17:50:07.981945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.981973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:07.981983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#104:0 HandleReply TEvOperationPlan, operationId: 104:0, stepId: 5000004, at schemeshard: 72057594046678944 2024-11-21T17:50:07.982067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 129 2024-11-21T17:50:07.982105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2024-11-21T17:50:07.983744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:07.983758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:07.983832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.983840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T17:50:07.983949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.983978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:50:07.984091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:07.984103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:07.984108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:50:07.984114Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:50:07.984119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:07.984134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T17:50:07.987252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:50:07.998330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 273 } } 2024-11-21T17:50:07.998357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:07.998389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 273 } } 2024-11-21T17:50:07.998405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 273 } } FAKE_COORDINATOR: Erasing txId 104 2024-11-21T17:50:07.998690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T17:50:07.998699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:07.998716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T17:50:07.998723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:07.998731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T17:50:07.998745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.998749Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.998754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:07.998762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2024-11-21T17:50:07.999235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.999549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.999626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.999636Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T17:50:07.999651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T17:50:07.999656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:50:07.999666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T17:50:07.999682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 104 2024-11-21T17:50:07.999690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:50:07.999696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:50:07.999700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:50:07.999725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:08.000154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:50:08.000165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:438:2412] TestWaitNotification: OK eventTxId 104 2024-11-21T17:50:08.000298Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:08.000377Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 87us result status StatusSuccess 2024-11-21T17:50:08.000500Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |80.9%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream_reboots/unittest >> TCdcStreamWithRebootsTests::CreateStreamOnIndexTableExplicitReady[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:19.635752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:19.635779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:19.635784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:19.635789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:19.635805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:19.635809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:19.635818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:19.635898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:19.661959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:19.661987Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:19.665276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:19.665379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:19.665404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:19.672319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:19.672432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:19.672548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:19.674993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:19.681847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:19.682206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:19.682218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:19.682233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:19.682240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:19.682246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:19.682299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:19.687409Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:19.710602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:19.710679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.710736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:19.710772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:19.710777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.711465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:19.711485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:19.711527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.711535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:19.711538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:19.711541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:19.711804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.711810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:19.711813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:19.712001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.712007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.712013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:19.712019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:19.712524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:19.712846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:19.712898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:19.713063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:19.713081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:19.713086Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:19.713129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:19.713135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:19.713167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:19.713177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:19.713502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:19.713510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:19.713548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:19.713554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:19.713620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:19.713626Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:19.713633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:19.713636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:19.713641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:19.713645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:19.713650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:19.713653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:19.713664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:19.713672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:19.713676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ablet: 72075186233409546, partId: 2 2024-11-21T17:50:07.804644Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:2, at schemeshard: 72057594046678944, message: Source { RawX1: 339 RawX2: 635655162132 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:50:07.804651Z node 148 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:07.804658Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 339 RawX2: 635655162132 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:50:07.804670Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.804674Z node 148 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T17:50:07.804678Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:07.804684Z node 148 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:2 129 -> 240 2024-11-21T17:50:07.804781Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.804790Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.804794Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:50:07.804798Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:50:07.804801Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:07.804881Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.804890Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.804893Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:50:07.804897Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 4 2024-11-21T17:50:07.804903Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T17:50:07.804979Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.804987Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.804990Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:50:07.804994Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2024-11-21T17:50:07.804998Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2024-11-21T17:50:07.805533Z node 148 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.805551Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.805555Z node 148 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:50:07.805559Z node 148 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2024-11-21T17:50:07.805564Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T17:50:07.805577Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/4, is published: true 2024-11-21T17:50:07.806347Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T17:50:07.807039Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.807070Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T17:50:07.807146Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T17:50:07.807153Z node 148 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:2 ProgressState 2024-11-21T17:50:07.807167Z node 148 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 4/4 2024-11-21T17:50:07.807171Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T17:50:07.807178Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2024-11-21T17:50:07.807193Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [148:465:2430] message: TxId: 1003 2024-11-21T17:50:07.807199Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2024-11-21T17:50:07.807206Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:50:07.807211Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:50:07.807226Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:07.807230Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T17:50:07.807233Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T17:50:07.807237Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:50:07.807240Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T17:50:07.807243Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T17:50:07.807257Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T17:50:07.807261Z node 148 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2024-11-21T17:50:07.807263Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2024-11-21T17:50:07.807271Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T17:50:07.807348Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.807385Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.807425Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.807745Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:50:07.808144Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:50:07.808155Z node 148 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [148:665:2587] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:50:07.808282Z node 148 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:50:07.808349Z node 148 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 76us result status StatusSuccess 2024-11-21T17:50:07.808512Z node 148 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathDescription { Self { Name: "Stream" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409549 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } CdcStreamDescription { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:08.171207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:08.171233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:08.171237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:08.171241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:08.171245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:08.171248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:08.171254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:08.171331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:08.180435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:08.180455Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:08.183010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:08.183825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:08.183877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:08.186271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:08.186545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:08.186638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:08.186717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:08.187859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:08.188120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:08.188130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:08.188167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:08.188173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:08.188180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:08.188193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.189614Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:08.206958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:08.207039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.207107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:08.207152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:08.207161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.209131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:08.209173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:08.209228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.209237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:08.209242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:08.209247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:08.212652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.212675Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:08.212682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:08.213241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.213257Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.213263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:08.213270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:08.213928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:08.214679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:08.214735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:08.214930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:08.214956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:08.214965Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:08.215021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:08.215028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:08.215057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:08.215067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:08.215507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:08.215515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:08.215562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:08.215567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:08.215642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.215649Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:08.215660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:08.215664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:08.215670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:08.215674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:08.215679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:08.215683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:08.215693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:08.215698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:08.215702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:08.216015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:08.216028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:08.216033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:08.216038Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:08.216042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:08.216054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:08.218951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:08.219060Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:08.219328Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:08.220740Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:08.221396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:08.221482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.221500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2024-11-21T17:50:08.221607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2024-11-21T17:50:08.221812Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:08.223419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:08.223456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T17:50:08.223598Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] |80.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpQueryService::DdlCache [GOOD] >> KqpQueryService::DdlExecuteScript ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2024-11-21T17:49:09.397605Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791096713953222:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:09.397806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000655/r3tmp/tmpyKDHzT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18666, node 1 2024-11-21T17:49:09.456117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:49:09.456138Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:09.460479Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:09.460495Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:09.460496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:09.460536Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:09.497881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:09.497909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:09.499413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:09.525133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.525931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:09.525946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.526372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:49:09.526432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:49:09.526440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:49:09.526760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:49:09.526768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:49:09.526801Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:49:09.527025Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.527632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211349575, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:09.527641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:49:09.527682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:49:09.527970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.528011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.528023Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:49:09.528030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:49:09.528044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:49:09.528056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:49:09.528411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:49:09.528430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:49:09.528433Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:49:09.528446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:26599 2024-11-21T17:49:09.665098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791096713954141:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:09.665137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:09.686144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.686266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:49:09.686458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:09.686477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.687257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T17:49:09.687319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.687364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.687386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:49:09.687590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:49:09.687674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:49:09.687685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:49:09.687688Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:49:09.687725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:49:09.687732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:49:09.687733Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:49:09.689546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:49:09.689575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:49:09.690260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:49:09.745434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:49:09.745446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:49:09.745474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:49:09.746245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:49:09.747131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211349792, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:09.747145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211349792 2024-11-21T17:49:09.747174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:49:09.747559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.747668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.747689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:49:09.747962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:49:09.747978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:49 ... n/3?node_id=1&id=MmM1ODE2ZWUtY2I1MWIyMTAtOTUyMmM4MGQtYTM2ZDNiYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.369145Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880290. Ctx: { TraceId: 01jd7xgwhq3q0hfq39h6cejw19, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJmMWJmNDgtMzc2NTg3ODAtZjYxYTVjYmMtZDU3ZDQ0NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.369234Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880291. Ctx: { TraceId: 01jd7xgwhn8xf9m88s05r1cf4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJjM2JmYmMtNGUxNzVjMTctYzZkMDVkZmYtZWVlMTNlNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.369299Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880292. Ctx: { TraceId: 01jd7xgwhqcv3pknvhsj3dda17, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmEzOTkzZTQtOTc2MzY4MTgtZDNmMmMwOGEtZmEzZDU4NmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.369364Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880293. Ctx: { TraceId: 01jd7xgwhq568nbcttzty3xt0r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTAyOGUwNzgtZjA5YzYyMTctNTAwNWQxMC1lODZiMDExZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.369427Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880284. Ctx: { TraceId: 01jd7xgwhqajj81gbmd74fzqz6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzkyN2E4ZWYtOGJiNjRkNmEtOGFlZjFjZGItNzdkODY5MmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.369488Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880285. Ctx: { TraceId: 01jd7xgwhq15fz3yc395nfxt79, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZThkZGM0YzktZDQ3YjI2ZDQtYjk2NmJkNDgtZDZhNjcxYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.369577Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880286. Ctx: { TraceId: 01jd7xgwhq00v3gayy9wnd2vyw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVkMTVhNjAtNjQ3ZWNmZjctZjE5NmNhYjQtYTY0ZWMzYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.369643Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880287. Ctx: { TraceId: 01jd7xgwhq6j0p23x4tsp0yk1c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmNlYTY4NTEtYTMzNjEzYTItZTUwZjRhZTQtOGY4MGI0MDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.369704Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880288. Ctx: { TraceId: 01jd7xgwhq7xqarre94x6bjy0s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmJmZmZiOGItNzUzMTAxOGYtYWViMDlkYjAtYzQyNmI2YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.383870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880298. Ctx: { TraceId: 01jd7xgwj00cwc9r0p87th4h6j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmJmZmZiOGItNzUzMTAxOGYtYWViMDlkYjAtYzQyNmI2YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.384193Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880299. Ctx: { TraceId: 01jd7xgwhzb3gat7r50kp6mjbc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM1ODE2ZWUtY2I1MWIyMTAtOTUyMmM4MGQtYTM2ZDNiYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.384338Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880300. Ctx: { TraceId: 01jd7xgwj0bj508tk3n4f53s7f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJjM2JmYmMtNGUxNzVjMTctYzZkMDVkZmYtZWVlMTNlNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.384436Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880302. Ctx: { TraceId: 01jd7xgwj05jhhwkh42rv09t1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzkyN2E4ZWYtOGJiNjRkNmEtOGFlZjFjZGItNzdkODY5MmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.384531Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880303. Ctx: { TraceId: 01jd7xgwj0cb8psbvwtjsyn0v4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmNlYTY4NTEtYTMzNjEzYTItZTUwZjRhZTQtOGY4MGI0MDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.384613Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880294. Ctx: { TraceId: 01jd7xgwj00jjgcqxwaa2fdea9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJmMWJmNDgtMzc2NTg3ODAtZjYxYTVjYmMtZDU3ZDQ0NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.384683Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880295. Ctx: { TraceId: 01jd7xgwj08cgkqcrcwxjv0qw3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTAyOGUwNzgtZjA5YzYyMTctNTAwNWQxMC1lODZiMDExZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.384767Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880296. Ctx: { TraceId: 01jd7xgwj02dj3np3gd37fhzj7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZThkZGM0YzktZDQ3YjI2ZDQtYjk2NmJkNDgtZDZhNjcxYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.384844Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880297. Ctx: { TraceId: 01jd7xgwj098kf92ddhnxrkbcp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVkMTVhNjAtNjQ3ZWNmZjctZjE5NmNhYjQtYTY0ZWMzYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.384923Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880301. Ctx: { TraceId: 01jd7xgwj08bqym82hwgvax0db, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmEzOTkzZTQtOTc2MzY4MTgtZDNmMmMwOGEtZmEzZDU4NmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211349792 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T17:50:05.394112Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880309. Ctx: { TraceId: 01jd7xgwjg8gqf4475kmwtfnm8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJjM2JmYmMtNGUxNzVjMTctYzZkMDVkZmYtZWVlMTNlNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.394391Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880310. Ctx: { TraceId: 01jd7xgwjgdsttertnjhwrms5a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmJmZmZiOGItNzUzMTAxOGYtYWViMDlkYjAtYzQyNmI2YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.394476Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880304. Ctx: { TraceId: 01jd7xgwjg0pw2jhd6dv9pdmhk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM1ODE2ZWUtY2I1MWIyMTAtOTUyMmM4MGQtYTM2ZDNiYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.394542Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880305. Ctx: { TraceId: 01jd7xgwjge64tsk1b9y81r46j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJmMWJmNDgtMzc2NTg3ODAtZjYxYTVjYmMtZDU3ZDQ0NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.394602Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880306. Ctx: { TraceId: 01jd7xgwjg86w6emrzmwhktbzd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzkyN2E4ZWYtOGJiNjRkNmEtOGFlZjFjZGItNzdkODY5MmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.394655Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880307. Ctx: { TraceId: 01jd7xgwjgdeweabaqqbhwz43p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmNlYTY4NTEtYTMzNjEzYTItZTUwZjRhZTQtOGY4MGI0MDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.394709Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880308. Ctx: { TraceId: 01jd7xgwjgdw8ng02qpjz82f46, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTAyOGUwNzgtZjA5YzYyMTctNTAwNWQxMC1lODZiMDExZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.402327Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880311. Ctx: { TraceId: 01jd7xgwjr466rj1emk752vab6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZThkZGM0YzktZDQ3YjI2ZDQtYjk2NmJkNDgtZDZhNjcxYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.402620Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880312. Ctx: { TraceId: 01jd7xgwjrc986x127rf3rzfs4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmEzOTkzZTQtOTc2MzY4MTgtZDNmMmMwOGEtZmEzZDU4NmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:05.402706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976880313. Ctx: { TraceId: 01jd7xgwjr8extey1q61c0nkt3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVkMTVhNjAtNjQ3ZWNmZjctZjE5NmNhYjQtYTY0ZWMzYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211349792 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) Table has 4 shards |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:08.761373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:08.761402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:08.761407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:08.761414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:08.761420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:08.761424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:08.761433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:08.761537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:08.772529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:08.772552Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:08.775597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:08.776175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:08.776207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:08.777431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:08.777559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:08.777641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:08.777698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:08.778297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:08.778518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:08.778528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:08.778557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:08.778563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:08.778567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:08.778577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.779533Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:08.795586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:08.795670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.795738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:08.795781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:08.795790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.796605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:08.796635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:08.796695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.796707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:08.796712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:08.796717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:08.797109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.797121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:08.797126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:08.797450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.797460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.797466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:08.797474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:08.798079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:08.798488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:08.798541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:08.798728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:08.798755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:08.798763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:08.798815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:08.798822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:08.798852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:08.798865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:08.799251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:08.799260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:08.799302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:08.799309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:08.799390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.799397Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:08.799409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:08.799414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:08.799421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:08.799427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:08.799431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:08.799435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:08.799446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:08.799452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:08.799456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:08.799744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:08.799758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:08.799763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:08.799769Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:08.799775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:08.799788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... end tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:50:08.913873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2024-11-21T17:50:08.913959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:08.913978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:08.913986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:50:08.914065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T17:50:08.914096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:50:08.914970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:08.914982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:08.915055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:08.915062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:50:08.915186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.915196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:50:08.915305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:08.915321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:08.915326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:08.915332Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T17:50:08.915339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:50:08.915353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T17:50:08.916092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:50:08.932269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 264 } } 2024-11-21T17:50:08.932296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:08.932331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 264 } } 2024-11-21T17:50:08.932348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 264 } } 2024-11-21T17:50:08.932604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:08.932615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:08.932631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:08.932640Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:08.932648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:08.932663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:08.932671Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.932676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:08.932684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:50:08.936953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.937068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.937173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.937185Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:50:08.937205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:50:08.937210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:08.937219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:50:08.937239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 102 2024-11-21T17:50:08.937249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:08.937254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:50:08.937259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:50:08.937284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:08.937789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:08.937801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:498:2426] TestWaitNotification: OK eventTxId 102 2024-11-21T17:50:08.937927Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:08.937994Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 75us result status StatusSuccess 2024-11-21T17:50:08.938128Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> QueryStats::Ranges [GOOD] |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:09.416205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:09.416251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:09.416256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:09.416266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:09.416273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:09.416277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:09.416286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:09.416378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:09.428791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:09.428823Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:09.432341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:09.433367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:09.433417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:09.435105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:09.435315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:09.435447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:09.435537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:09.436523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:09.436823Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:09.436834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:09.436881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:09.436890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:09.436898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:09.436914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.445657Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:09.481651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:09.481736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.481803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:09.481850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:09.481858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.488661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:09.488719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:09.488791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.488805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:09.488810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:09.488816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:09.489503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.489524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:09.489531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:09.489945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.489955Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.489961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:09.489968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:09.490689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:09.491138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:09.491191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:09.491375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:09.491403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:09.491409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:09.491467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:09.491475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:09.491505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:09.491517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:09.491885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:09.491892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:09.491936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:09.491941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:09.492021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.492028Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:09.492039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:09.492046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:09.492053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:09.492058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:09.492063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:09.492067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:09.492077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:09.492083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:09.492087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:09.492421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:09.492435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:09.492440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:09.492446Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:09.492450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:09.492462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... HEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:09.675265Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:50:09.675269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:09.675286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:50:09.676848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:09.676863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:50:09.680682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 362 } } 2024-11-21T17:50:09.680700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:09.680728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 362 } } 2024-11-21T17:50:09.680743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 362 } } 2024-11-21T17:50:09.680986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 405 RawX2: 4294969672 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:09.680993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:09.681007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 405 RawX2: 4294969672 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:09.681013Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:09.681021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 405 RawX2: 4294969672 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:09.681035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:09.681039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T17:50:09.687794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.687916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.704681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:09.704714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:09.704743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:09.704753Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:09.704762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:09.704777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:09.704782Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.704786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:09.704792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:09.704798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:50:09.712685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.712845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.712859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2024-11-21T17:50:09.712871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:50:09.712876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2024-11-21T17:50:09.712890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T17:50:09.712898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2024-11-21T17:50:09.720622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:09.720653Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:50:09.720674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:50:09.720679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:09.720687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:50:09.720711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 102 2024-11-21T17:50:09.720724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:09.720729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:50:09.720733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:50:09.720781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:50:09.720785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:09.727486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:09.727510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:433:2397] TestWaitNotification: OK eventTxId 102 2024-11-21T17:50:09.727661Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:09.727731Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 82us result status StatusSuccess 2024-11-21T17:50:09.727930Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest >> KqpQueryService::DdlExecuteScript [GOOD] |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |81.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |81.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |81.0%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TReplicationWithRebootsTests::CreateInParallelWithInitialController [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlExecuteScript [GOOD] Test command err: Trying to start YDB, gRPC: 10620, MsgBus: 16271 2024-11-21T17:48:26.477980Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790912110591886:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:48:26.477999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001288/r3tmp/tmpowjGNQ/pdisk_1.dat 2024-11-21T17:48:26.532766Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10620, node 1 2024-11-21T17:48:26.549585Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:48:26.549599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:48:26.549600Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:48:26.549638Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16271 2024-11-21T17:48:26.577942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:48:26.577967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:48:26.579067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:48:26.609058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.619535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:48:26.635588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.652647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.716948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:48:26.774920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790912110593425:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.774959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.819194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.828767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.843111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.856132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.862312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.869663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:48:26.885049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790912110593929:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.885066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790912110593934:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.885082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:48:26.885762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:48:26.889955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790912110593936:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:48:27.127895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:48:27.158847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790916405561635:2462];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:48:27.158898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790916405561635:2462];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:48:27.158949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790916405561635:2462];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:48:27.158968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790916405561635:2462];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:48:27.158985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790916405561635:2462];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:48:27.159002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790916405561635:2462];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:48:27.159017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790916405561635:2462];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:48:27.159037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790916405561635:2462];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:48:27.159055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790916405561635:2462];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:48:27.159070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790916405561635:2462];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:48:27.159084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790916405561635:2462];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:48:27.159101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7439790916405561635:2462];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:48:27.159994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439790916405561658:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:48:27.160016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439790916405561658:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:48:27.160052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439790916405561658:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:48:27.160080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439790916405561658:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:48:27.160108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7439790916405561658:2469];tablet_id=72075186224037923;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T ... PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:08.443971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:08.450861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:08.457626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:08.515145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:08.530955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:08.543440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:08.554598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:08.573684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791348259056593:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:08.573721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:08.574836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791348259056598:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:08.575689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:08.579126Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:50:08.579238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791348259056600:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:50:08.833913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:50:08.867600Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037919 not found 2024-11-21T17:50:08.874252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64966, MsgBus: 8500 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001288/r3tmp/tmpbQjhmd/pdisk_1.dat 2024-11-21T17:50:09.332409Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:09.344302Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64966, node 3 2024-11-21T17:50:09.376895Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:09.376914Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:09.376916Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:09.376967Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8500 2024-11-21T17:50:09.416284Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:09.416316Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:09.420672Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:50:09.489620Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:09.492708Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:50:09.509221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:09.538126Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:09.570221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:09.586469Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:09.816562Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439791353584350921:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:09.816613Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:09.821065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:09.840547Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:09.851440Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:09.864293Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:09.888805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:09.904112Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:09.919273Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439791353584351423:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:09.919321Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:09.919391Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439791353584351431:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:09.920210Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:09.922626Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439791353584351433:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:50:10.190046Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:50:10.190407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:50:10.190734Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:50:10.369927Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> TExportToS3Tests::CheckItemProgress |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |81.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TExportToS3Tests::RebootDuringCompletion >> TExportToS3Tests::UidAsIdempotencyKey |81.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |81.0%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::CreateInParallelWithInitialController [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:31.161677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:31.161697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:31.161701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:31.161705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:31.161710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:31.161712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:31.161719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:31.161794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:31.170879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:31.170900Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:31.172760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:31.172831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:31.172856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:31.175276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:31.175342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:31.175447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:31.175605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:31.176258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:31.176521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:31.176532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:31.176544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:31.176552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:31.176559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:31.176601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:31.177837Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:31.192599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:31.192715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.192797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:31.192854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:31.192863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.193820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:31.193852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:31.193910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.193922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:31.193928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:31.193934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:31.194388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.194401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:31.194407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:31.194778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.194790Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.194797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:31.194804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:31.195493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:31.195933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:31.195991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:31.196215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:31.196263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:31.196272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:31.196333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:31.196340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:31.196375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:31.196389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:31.197579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:31.197594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:31.197646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:31.197653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:31.197745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:31.197753Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:31.197768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:31.197773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:31.197780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:31.197785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:31.197790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:31.197795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:31.197821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:31.197829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:31.197833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 1003 2024-11-21T17:50:11.325763Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T17:50:11.325771Z node 119 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:50:11.325805Z node 119 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:50:11.325809Z node 119 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [119:720:2611] 2024-11-21T17:50:11.325834Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [119:725:2616], Recipient [119:129:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:11.325838Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:11.325842Z node 119 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:50:11.325848Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [119:726:2617], Recipient [119:129:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:11.325852Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:11.325856Z node 119 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:50:11.325864Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [119:724:2615], Recipient [119:129:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:11.325867Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:11.325875Z node 119 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2024-11-21T17:50:11.325882Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [119:416:2373], Recipient [119:129:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1005 2024-11-21T17:50:11.325885Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T17:50:11.325891Z node 119 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T17:50:11.325902Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [119:416:2373], Recipient [119:129:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1004 2024-11-21T17:50:11.325906Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T17:50:11.325910Z node 119 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:50:11.325916Z node 119 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:50:11.325919Z node 119 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [119:720:2611] 2024-11-21T17:50:11.325932Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [119:725:2616], Recipient [119:129:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:11.325939Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:11.325943Z node 119 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2024-11-21T17:50:11.325946Z node 119 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:50:11.325950Z node 119 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [119:720:2611] 2024-11-21T17:50:11.325961Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [119:726:2617], Recipient [119:129:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:11.325964Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:11.325967Z node 119 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1005 TestWaitNotification: OK eventTxId 1004 2024-11-21T17:50:11.326035Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [119:727:2618], Recipient [119:129:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T17:50:11.326039Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:50:11.326050Z node 119 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:11.326096Z node 119 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication1" took 45us result status StatusSuccess 2024-11-21T17:50:11.326162Z node 119 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication1" PathDescription { Self { Name: "Replication1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication1" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 ControllerId: 72075186233409547 State { StandBy { } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:11.326244Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [119:728:2619], Recipient [119:129:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T17:50:11.326248Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:50:11.326257Z node 119 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:11.326273Z node 119 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication2" took 16us result status StatusSuccess 2024-11-21T17:50:11.326302Z node 119 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication2" PathDescription { Self { Name: "Replication2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication2" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 ControllerId: 72075186233409548 State { StandBy { } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:11.326359Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [119:729:2620], Recipient [119:129:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T17:50:11.326365Z node 119 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:50:11.326371Z node 119 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:11.326384Z node 119 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication3" took 13us result status StatusSuccess 2024-11-21T17:50:11.326411Z node 119 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication3" PathDescription { Self { Name: "Replication3" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1005 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Replication3" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 6 } Version: 1 ControllerId: 72075186233409549 State { StandBy { } } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::ExportPartitioningSettings >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> TExportToS3Tests::DropCopiesBeforeTransferring1 |81.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |81.0%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2024-11-21T17:46:10.263415Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:10.263423Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:10.263428Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:10.263533Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:46:10.263548Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:10.263550Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:10.264096Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007548s 2024-11-21T17:46:10.264266Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:46:10.264276Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:10.264279Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:10.264297Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005184s 2024-11-21T17:46:10.264362Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:46:10.264368Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:10.264370Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:10.264381Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008805s 2024-11-21T17:46:10.315809Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1732211170315800 2024-11-21T17:46:10.443159Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790327375987411:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:10.443482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:10.445776Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790327914706290:2243];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e80/r3tmp/tmpr62CQO/pdisk_1.dat 2024-11-21T17:46:10.478641Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:10.480007Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:10.481136Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:10.511551Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21983, node 1 2024-11-21T17:46:10.540204Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e80/r3tmp/yandex88g77S.tmp 2024-11-21T17:46:10.540225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e80/r3tmp/yandex88g77S.tmp 2024-11-21T17:46:10.540306Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e80/r3tmp/yandex88g77S.tmp 2024-11-21T17:46:10.540338Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:10.541919Z INFO: TTestServer started on Port 5998 GrpcPort 21983 2024-11-21T17:46:10.544477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:10.544505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:10.550416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5998 PQClient connected to localhost:21983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:46:10.575898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:10.575929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:10.578050Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:10.578423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:10.579698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T17:46:10.627132Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2024-11-21T17:46:10.759708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790327375988289:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.759733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.759772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790327375988316:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:10.760466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:10.764346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790327375988318:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T17:46:10.784598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:10.789392Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790327914706403:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:10.789515Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTQ0MGIwOS03ZjNhMDViMC00ZTZjYjA5Yi01NjAyOWZhYg==, ActorId: [2:7439790327914706377:2277], ActorState: ExecuteState, TraceId: 01jd7x9qez27dt3qy20jc9s6ke, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:10.790400Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:10.844375Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790327375988503:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:10.844431Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjkwODZhNTItNGNhZTc0Y2YtNTdmYjRhNWItMzc3N2MzZmM=, ActorId: [1:7439790327375988286:2299], ActorState: ExecuteState, TraceId: 01jd7x9qe79agng8d6vhmrypth, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:10.844535Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:10.849364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:10.913679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:21983", true, true, 1000); 2024-11-21T17:46:10.943849Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd7x9qkf0ewawx17f37btaay, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzZiODY2NjMtNDFiM2E3NzktZDdmZGVkLWJjYThmN2Yy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439790327375988804:2943] 2024-11-21T17:46:15.443371Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout; ... T17:50:10.531913Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1454 count 9 nextOffset 9 batches 1 2024-11-21T17:50:10.531916Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0test-message-group-id' seqNo 10 partNo 0 2024-11-21T17:50:10.531920Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1612 count 10 nextOffset 10 batches 1 2024-11-21T17:50:10.531971Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,10 HeadOffset 0 endOffset 0 curOffset 10 d0000000000_00000000000000000000_00000_0000000010_00000| size 1208 WTime 1732211410531 2024-11-21T17:50:10.531994Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:50:10.536938Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 2024-11-21T17:50:10.536960Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:10.536971Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T17:50:10.536978Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:10.536983Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2024-11-21T17:50:10.536986Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:10.536990Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T17:50:10.536993Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:10.536998Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2024-11-21T17:50:10.537000Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:10.537005Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2024-11-21T17:50:10.537007Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:10.537016Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2024-11-21T17:50:10.537021Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:10.537026Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2024-11-21T17:50:10.537028Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:10.537033Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2024-11-21T17:50:10.537036Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:10.537040Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2024-11-21T17:50:10.537042Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:10.537047Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2024-11-21T17:50:10.537078Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:50:10.537082Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T17:50:10.537113Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T17:50:10.537143Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:50:10.537335Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2024-11-21T17:50:10.537353Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T17:50:10.537392Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T17:50:10.537395Z node 17 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:50:10.537413Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1732211410531 queuesize 0 startOffset 0 2024-11-21T17:50:10.540358Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 5 } 2024-11-21T17:50:10.540374Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: acknoledged message 1 2024-11-21T17:50:10.540380Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: acknoledged message 2 2024-11-21T17:50:10.540383Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: acknoledged message 3 2024-11-21T17:50:10.540386Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: acknoledged message 4 2024-11-21T17:50:10.540389Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: acknoledged message 5 2024-11-21T17:50:10.540392Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: acknoledged message 6 2024-11-21T17:50:10.540396Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: acknoledged message 7 2024-11-21T17:50:10.540399Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: acknoledged message 8 2024-11-21T17:50:10.540405Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: acknoledged message 9 2024-11-21T17:50:10.540408Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: acknoledged message 10 2024-11-21T17:50:10.540865Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: close. Timeout = 0 ms 2024-11-21T17:50:10.540875Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session will now close 2024-11-21T17:50:10.540880Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: aborting 2024-11-21T17:50:10.541006Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:50:10.541011Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0] Write session: destroy 2024-11-21T17:50:10.541422Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0 grpc read done: success: 0 data: 2024-11-21T17:50:10.541436Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0 grpc read failed 2024-11-21T17:50:10.541443Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0 grpc closed 2024-11-21T17:50:10.541448Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|e95fecdc-a252fdc7-b843064f-f7dc4b09_0 is DEAD 2024-11-21T17:50:10.541934Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:50:10.545377Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:10.545419Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7439791358260103837:2621] destroyed 2024-11-21T17:50:10.545441Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::CompletedExportEndTime >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::UserSID >> TSchemeShardTTLTests::CheckCounters >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::RebootDuringAbortion >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::TablePermissions >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::CorruptedDyNumber >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] >> TExportToS3Tests::TablePermissions [GOOD] >> TSchemeShardColumnTableTTL::AlterColumnTable >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TExportToS3Tests::CancelledExportEndTime >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::ExportStartTime >> TSchemeShardTTLTestsWithReboots::CopyTable >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:13.550466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:13.550487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:13.550491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:13.550495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:13.550499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:13.550501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:13.550507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:13.550579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:13.557675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:13.557694Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:13.560517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:13.561292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:13.561334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:13.562718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:13.562899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:13.563018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.563099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:13.564009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:13.564298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:13.564310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:13.564351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:13.564359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:13.564365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:13.564376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.565552Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:13.584246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:13.584324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.584386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:13.584430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:13.584438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.585214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.585235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:13.585276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.585283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:13.585285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:13.585289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:13.585660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.585671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:13.585675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:13.585959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.585965Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.585970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:13.585974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:13.586379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:13.586671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:13.586708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:13.586829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.586846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:13.586850Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:13.586887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:13.586892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:13.586928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:13.586937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:13.587218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:13.587224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:13.587251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:13.587254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:13.587303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.587307Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:13.587316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:13.587319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:13.587323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:13.587326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:13.587329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:13.587332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:13.587339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:13.587343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:13.587346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:13.587530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:13.587538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:13.587542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:13.587545Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:13.587548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:13.587557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:13.588116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:13.588194Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:13.588479Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:13.589346Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:13.589848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:13.589900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.590017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2024-11-21T17:50:13.590162Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:13.590733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:13.590757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2024-11-21T17:50:13.590835Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:11.696937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:11.696967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:11.696972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:11.696976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:11.696982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:11.696986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:11.697006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:11.697100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:11.708127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:11.708154Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:11.711285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:11.712374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:11.712413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:11.713653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:11.713808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:11.713898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:11.713962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:11.714745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:11.715006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:11.715017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:11.715053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:11.715059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:11.715065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:11.715077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.716157Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:11.733344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:11.733429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.733497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:11.733542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:11.733549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.737292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:11.737342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:11.737404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.737417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:11.737422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:11.737427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:11.745493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.745521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:11.745529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:11.753565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.753595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.753602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:11.753611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:11.754302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:11.761353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:11.761467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:11.761721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:11.761768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:11.761779Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:11.761853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:11.761861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:11.761896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:11.761908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:11.770203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:11.770225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:11.770279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:11.770285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:11.770359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.770368Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:11.770383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:11.770388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:11.770394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:11.770400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:11.770406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:11.770409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:11.770432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:11.770438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:11.770441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:11.770838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:11.770854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:11.770860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:11.770865Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:11.770869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:11.770883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.362686Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2024-11-21T17:50:13.363359Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.363400Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.363407Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:13.363421Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2024-11-21T17:50:13.363462Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:13.369130Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2024-11-21T17:50:13.369191Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 281474976710759 at step: 5000005 2024-11-21T17:50:13.369368Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.369395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:13.369405Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2024-11-21T17:50:13.369430Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2024-11-21T17:50:13.369461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:25445 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 812A9EF0-89FE-40FC-816A-7A69EBACD4BF amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2024-11-21T17:50:13.381277Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:13.381293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 6] REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:25445 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4489781D-7FCC-4CAC-8511-866ED6475CC9 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T17:50:13.381374Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:13.381380Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 6 2024-11-21T17:50:13.381658Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.381677Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:13.381888Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T17:50:13.381901Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T17:50:13.381905Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2024-11-21T17:50:13.381911Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T17:50:13.381918Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T17:50:13.381938Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:25445 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D5ECB68F-62DE-42C1-86B6-5BB19F5BA468 amz-sdk-request: attempt=1 content-length: 602 content-md5: GgrERoUcI3sF1n0Je2MTCQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 602 2024-11-21T17:50:13.383484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:25445 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C100BD02-5360-4D96-8ECD-97D1D8C47D5E amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2024-11-21T17:50:13.396001Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 500 RawX2: 12884904349 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:50:13.396029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409548, partId: 0 2024-11-21T17:50:13.396054Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 500 RawX2: 12884904349 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:50:13.396069Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 500 RawX2: 12884904349 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:50:13.396084Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.396087Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.396092Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:50:13.396099Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T17:50:13.396149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:13.396650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.396747Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.396756Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2024-11-21T17:50:13.396768Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2024-11-21T17:50:13.396772Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T17:50:13.396777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2024-11-21T17:50:13.396790Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2024-11-21T17:50:13.396795Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T17:50:13.396801Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T17:50:13.396805Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2024-11-21T17:50:13.396832Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:50:13.397255Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T17:50:13.397272Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2024-11-21T17:50:13.397673Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:13.397684Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:526:2480] TestWaitNotification: OK eventTxId 102 |81.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] |81.1%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:12.734209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:12.734235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.734240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:12.734246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:12.734253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:12.734256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:12.734276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.734355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:12.744966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:12.744986Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:12.747690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:12.748581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:12.748615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:12.749761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:12.749913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:12.750037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.750125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:12.751159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.751429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.751440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.751479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:12.751485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.751492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:12.751503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.752751Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:12.771521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:12.771597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.771667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:12.771707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:12.771715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.772431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.772455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:12.772493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.772502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:12.772506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:12.772511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:12.772834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.772842Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:12.772847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:12.773112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.773119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.773125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.773131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.773771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:12.774093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:12.774136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:12.774297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.774318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:12.774328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.774377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:12.774383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.774414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.774425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:12.774744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.774750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.774790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.774796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:12.774873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.774879Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:12.774890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:12.774895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.774913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:12.774918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.774923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:12.774927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:12.774937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:12.774943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:12.774948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:12.775249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.775261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.775266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:12.775271Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:12.775275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.775291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... lectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409547, shardIdx: 72057594046678944:2, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.791739Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2024-11-21T17:50:13.792301Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.792341Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.792349Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:13.792361Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2024-11-21T17:50:13.792393Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:13.792790Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2024-11-21T17:50:13.792818Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2024-11-21T17:50:13.793060Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.793081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:13.793088Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2024-11-21T17:50:13.793108Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2024-11-21T17:50:13.793131Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2024-11-21T17:50:13.798651Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:13.798666Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:13.798762Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:13.798768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:64929 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0F8F01B1-20C3-4CF1-947B-DC5077627F34 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 2024-11-21T17:50:13.798878Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.798889Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2024-11-21T17:50:13.799088Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T17:50:13.799099Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T17:50:13.799103Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2024-11-21T17:50:13.799108Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T17:50:13.799115Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:50:13.799134Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:64929 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 83171339-A356-4EFB-9B13-7D398A1FBDB2 amz-sdk-request: attempt=1 content-length: 137 content-md5: WeIr3D5bqIjvqMGEjx2JrA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 137 2024-11-21T17:50:13.799847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:64929 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9E644505-85D3-478A-B239-93FC24FD5414 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:64929 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9DAB27C0-3531-4245-9FE2-8210A9AB05B0 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2024-11-21T17:50:13.802979Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 439 RawX2: 12884904298 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:50:13.803003Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:13.803025Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 439 RawX2: 12884904298 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:50:13.803038Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 439 RawX2: 12884904298 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:50:13.803051Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.803054Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.803059Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:13.803065Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T17:50:13.803102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:13.803526Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.803597Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.803606Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2024-11-21T17:50:13.803619Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2024-11-21T17:50:13.803623Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T17:50:13.803629Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2024-11-21T17:50:13.803641Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2024-11-21T17:50:13.803647Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T17:50:13.803652Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T17:50:13.803659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2024-11-21T17:50:13.803688Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:13.804120Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T17:50:13.804139Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2024-11-21T17:50:13.804616Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:13.804631Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:469:2433] TestWaitNotification: OK eventTxId 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:11.647015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:11.647038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:11.647042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:11.647046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:11.647050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:11.647053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:11.647069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:11.647128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:11.654873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:11.654908Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:11.657778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:11.658463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:11.658501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:11.659951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:11.660106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:11.660205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:11.660298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:11.661121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:11.661434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:11.661445Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:11.661485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:11.661492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:11.661497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:11.661511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.662607Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:11.678047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:11.678131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.678231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:11.678288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:11.678299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.680121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:11.680170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:11.680272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.680291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:11.680300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:11.680308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:11.681237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.681258Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:11.681267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:11.681885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.681900Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.681907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:11.681915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:11.682629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:11.683137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:11.683198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:11.683397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:11.683424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:11.683434Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:11.683492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:11.683499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:11.683531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:11.683544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:11.683963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:11.683972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:11.684020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:11.684027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:11.684113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:11.684121Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:11.684133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:11.684138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:11.684145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:11.684150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:11.684155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:11.684159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:11.684171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:11.684178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:11.684182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:11.684531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:11.684549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:11.684555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:11.684561Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:11.684566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:11.684583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... lt, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 281474976710765:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.732052Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 3 -> 128 2024-11-21T17:50:13.732642Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.732687Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.732695Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:13.732707Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710765 ready parts: 1/1 2024-11-21T17:50:13.732743Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976710765 MinStep: 5000010 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:13.733199Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710765:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710765 msg type: 269090816 2024-11-21T17:50:13.733234Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710765, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710765 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710765 at step: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 281474976710765 at step: 5000010 2024-11-21T17:50:13.733321Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.733343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710765 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:13.733350Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 HandleReply TEvOperationPlan, stepId: 5000010, at schemeshard: 72057594046678944 2024-11-21T17:50:13.733377Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 128 -> 129 2024-11-21T17:50:13.733401Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 REQUEST: PUT /Backup2/metadata.json HTTP/1.1 HEADERS: Host: localhost:15494 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3E677AA7-FFC7-42E9-AA2B-358BA023C6E4 amz-sdk-request: attempt=1 content-length: 73 content-md5: 5UnTthDw7DG9u0TfCJZu+w== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000010 2024-11-21T17:50:13.737708Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:13.737720Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2024-11-21T17:50:13.737774Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:13.737780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 7 2024-11-21T17:50:13.737912Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.737922Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710765 2024-11-21T17:50:13.738072Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2024-11-21T17:50:13.738083Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2024-11-21T17:50:13.738087Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2024-11-21T17:50:13.738092Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2024-11-21T17:50:13.738097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T17:50:13.738114Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true REQUEST: PUT /Backup2/permissions.pb HTTP/1.1 HEADERS: Host: localhost:15494 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 00521BD5-B3E1-40F9-A68A-980169DB8C1F amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/permissions.pb / / 43 2024-11-21T17:50:13.738771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 REQUEST: PUT /Backup2/scheme.pb HTTP/1.1 HEADERS: Host: localhost:15494 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 71972421-0539-4A64-B7D6-9BB51285727B amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/scheme.pb / / 355 REQUEST: PUT /Backup2/data_00.csv HTTP/1.1 HEADERS: Host: localhost:15494 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CBA748C7-3C9F-4667-9F7D-381C3598A867 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/data_00.csv / / 0 2024-11-21T17:50:13.762553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 793 RawX2: 12884904623 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:50:13.762576Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710765, tablet: 72075186233409549, partId: 0 2024-11-21T17:50:13.762599Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944, message: Source { RawX1: 793 RawX2: 12884904623 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:50:13.762612Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 793 RawX2: 12884904623 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:50:13.762626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.762630Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.762634Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:50:13.762641Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 129 -> 240 2024-11-21T17:50:13.762682Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:13.763132Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.763198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.763205Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710765:0 ProgressState 2024-11-21T17:50:13.763215Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 1/1 2024-11-21T17:50:13.763219Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2024-11-21T17:50:13.763225Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2024-11-21T17:50:13.763239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710765 2024-11-21T17:50:13.763244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2024-11-21T17:50:13.763251Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:0 2024-11-21T17:50:13.763255Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710765:0 2024-11-21T17:50:13.763280Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T17:50:13.763622Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2024-11-21T17:50:13.763635Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710765 2024-11-21T17:50:13.763952Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:50:13.763960Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:824:2759] TestWaitNotification: OK eventTxId 104 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:14.167800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:14.167825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:14.167830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:14.167835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:14.167841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:14.167844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:14.167852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:14.167924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:14.175304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:14.175319Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:14.177844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:14.178375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:14.178398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:14.179823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:14.180013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:14.180093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.180151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:14.181008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.181197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:14.181204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.181227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:14.181231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:14.181236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:14.181244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.182137Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:14.194860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:14.194937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.194992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:14.195024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:14.195029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.195564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.195582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:14.195623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.195630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:14.195633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:14.195636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:14.195950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.195959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:14.195963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:14.196282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.196290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.196294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:14.196298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:14.196765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:14.197128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:14.197164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:14.197277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.197292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:14.197298Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:14.197335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:14.197339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:14.197359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:14.197367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:14.197644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:14.197648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:14.197679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.197682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:14.197730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.197734Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:14.197741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:14.197744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:14.197747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:14.197750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:14.197753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:14.197756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:14.197762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:14.197766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:14.197768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:14.198027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:14.198040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:14.198044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:14.198049Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:14.198054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:14.198067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:14.198608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:14.198676Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:14.198895Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:14.199996Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:14.200568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:14.200636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.200648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 } }, at schemeshard: 72057594046678944 2024-11-21T17:50:14.200735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1732211414 seconds (20048 days, 54 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2024-11-21T17:50:14.200856Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:14.201409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1732211414 seconds (20048 days, 54 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:14.201431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1732211414 seconds (20048 days, 54 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T17:50:14.201505Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TExportToS3Tests::CancelledExportEndTime [GOOD] |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:06.571181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:06.571215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:06.571221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:06.571228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:06.571235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:06.571239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:06.571248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:06.571357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:06.582845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:06.582891Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:06.591945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:06.592950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:06.593002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:06.601544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:06.601760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:06.601878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:06.601976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:06.602938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:06.603286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:06.603308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:06.603361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:06.603374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:06.603383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:06.603402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.604891Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:06.630200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:06.630289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.630364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:06.630411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:06.630420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.631601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:06.631649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:06.631721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.631734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:06.631738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:06.631743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:06.632254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.632267Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:06.632273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:06.632601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.632612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.632618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:06.632626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:06.633223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:06.633608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:06.633663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:06.633844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:06.633869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:06.633877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:06.633931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:06.633937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:06.633969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:06.633980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:06.634428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:06.634438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:06.634500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:06.634506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:06.634590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:06.634598Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:06.634610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:06.634614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:06.634621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:06.634626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:06.634631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:06.634635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:06.634647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:06.634653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:06.634657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:06.634948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:06.634962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:06.634969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:06.634974Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:06.634980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:06.634996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... schemeshard: 72057594046678944 2024-11-21T17:50:14.127971Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:14.127977Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:14.127982Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:14.127993Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.129286Z node 27 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [27:123:2149] sender: [27:234:2058] recipient: [27:15:2062] 2024-11-21T17:50:14.131543Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:14.131586Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.131634Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:14.131672Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:14.131678Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.132101Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.132123Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:14.132154Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.132162Z node 27 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:14.132167Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:14.132176Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:14.132603Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.132615Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:14.132620Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:14.132984Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.133012Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.133017Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:14.133024Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:14.133053Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:14.133340Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:14.133373Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:14.133519Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.133541Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 115964119145 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:14.133548Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:14.133601Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:14.133608Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:14.133637Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:14.133649Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:14.134071Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:14.134080Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:14.134119Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.134123Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [27:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:14.134188Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.134197Z node 27 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:14.134211Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:14.134215Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:14.134220Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:14.134241Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:14.134245Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:14.134248Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:14.134260Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:14.134265Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:14.134268Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:14.134346Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:14.134356Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:14.134360Z node 27 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:14.134364Z node 27 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:14.134368Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:14.134378Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:14.135033Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:14.135105Z node 27 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:14.135260Z node 27 :TX_PROXY DEBUG: actor# [27:264:2256] Bootstrap 2024-11-21T17:50:14.136599Z node 27 :TX_PROXY DEBUG: actor# [27:264:2256] Become StateWork (SchemeCache [27:269:2261]) 2024-11-21T17:50:14.137296Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:14.137356Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.137372Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2024-11-21T17:50:14.137443Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2024-11-21T17:50:14.137583Z node 27 :TX_PROXY DEBUG: actor# [27:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:14.138104Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:14.138134Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T17:50:14.138826Z node 27 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |81.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> KqpWorkload::KV [GOOD] |81.1%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |81.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |81.1%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportStartTime [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:12.727664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:12.727691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.727695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:12.727699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:12.727704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:12.727708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:12.727728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.727801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:12.738027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:12.738048Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:12.740546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:12.741316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:12.741347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:12.742560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:12.742739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:12.742830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.742893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:12.743704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.743959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.743970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.744006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:12.744013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.744019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:12.744033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.745126Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:12.762796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:12.762866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.762942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:12.762982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:12.762989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.763607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.763634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:12.763674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.763684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:12.763688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:12.763693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:12.764057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.764069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:12.764074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:12.764454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.764466Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.764471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.764477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.765077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:12.765425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:12.765464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:12.765620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.765642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:12.765648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.765700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:12.765707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.765728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.765738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:12.766101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.766109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.766149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.766155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:12.766229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.766236Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:12.766246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:12.766252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.766258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:12.766263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.766268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:12.766273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:12.766285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:12.766290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:12.766294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:12.766559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.766574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.766578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:12.766583Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:12.766587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.766599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 17:50:14.344522Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710757 2024-11-21T17:50:14.344791Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2024-11-21T17:50:14.345438Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpCreateConsistentCopyTables CreateConsistentCopyTables { CopyTableDescriptions { SrcPath: "/MyRoot/Table" DstPath: "/MyRoot/export-102/0" OmitIndexes: true OmitFollowers: true IsBackup: true } } Internal: true } TxId: 281474976710758 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:14.345480Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/export-102/0, opId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.345572Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 3], parent name: export-102, child name: 0, child id: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:50:14.345581Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 0 2024-11-21T17:50:14.345588Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:14.345595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:14.345610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:14.345627Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710758:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:14.345742Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:50:14.345750Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:14.346148Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710758, response: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944 PathId: 4, at schemeshard: 72057594046678944 2024-11-21T17:50:14.346165Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710758, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, path: /MyRoot/export-102/0 2024-11-21T17:50:14.346188Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710758, status# StatusAccepted 2024-11-21T17:50:14.346194Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944 PathId: 4 2024-11-21T17:50:14.346207Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:14.346212Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:14.346230Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:14.346241Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.346246Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 3 2024-11-21T17:50:14.346250Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 2024-11-21T17:50:14.346343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.346352Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710758:0 ProgressState, operation type: TxCopyTable, at tablet72057594046678944 2024-11-21T17:50:14.346396Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710758:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:50:14.346452Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T17:50:14.346464Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T17:50:14.346468Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2024-11-21T17:50:14.346472Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:50:14.346477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:50:14.346514Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T17:50:14.346521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T17:50:14.346525Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2024-11-21T17:50:14.346528Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 1 2024-11-21T17:50:14.346532Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:50:14.346539Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2024-11-21T17:50:14.347002Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2024-11-21T17:50:14.347011Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2024-11-21T17:50:14.347018Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2024-11-21T17:50:14.347184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2024-11-21T17:50:14.347206Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72057594037968897 2024-11-21T17:50:14.347211Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:2, partId: 0 2024-11-21T17:50:14.347267Z node 3 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T17:50:14.347318Z node 3 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2024-11-21T17:50:14.347348Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T17:50:14.347353Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:2, partId: 0 2024-11-21T17:50:14.347367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T17:50:14.347374Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:50:14.347379Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T17:50:14.347396Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710758:0 2 -> 3 2024-11-21T17:50:14.347646Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2024-11-21T17:50:14.347681Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2024-11-21T17:50:14.348265Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.348306Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.348314Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 281474976710758:0 ProgressState at tablet# 72057594046678944 2024-11-21T17:50:14.348321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 281474976710758:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 281474976710758:0 seqNo# 2:2 at tablet# 72057594046678944 2024-11-21T17:50:14.349100Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T17:50:14.349128Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2024-11-21T17:50:14.349142Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409547 2024-11-21T17:50:14.349146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CorruptedDyNumber [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:12.792030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:12.792060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.792065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:12.792070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:12.792075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:12.792079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:12.792098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.792168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:12.802339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:12.802368Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:12.805295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:12.806049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:12.806087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:12.807442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:12.807611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:12.807711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.807787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:12.808781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.809049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.809060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.809098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:12.809105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.809111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:12.809123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.810360Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:12.831948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:12.832033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.832097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:12.832137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:12.832145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.832833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.832868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:12.832902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.832911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:12.832915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:12.832919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:12.833319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.833333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:12.833338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:12.833682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.833693Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.833699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.833705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.834287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:12.834673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:12.834727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:12.834916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.834944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:12.834954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.835007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:12.835014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.835040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.835050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:12.835442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.835451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.835487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.835493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:12.835556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.835564Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:12.835574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:12.835578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.835584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:12.835589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.835594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:12.835598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:12.835609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:12.835614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:12.835618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:12.835926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.835944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.835949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:12.835953Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:12.835959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.835975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... tabletId# 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: PREPARED TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 PrepareArriveTime: 97000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 46 } } 2024-11-21T17:50:13.965010Z node 3 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T17:50:13.965044Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409547, shardIdx: 72057594046678944:2, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.965050Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2024-11-21T17:50:13.965503Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.965539Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.965545Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:13.965556Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2024-11-21T17:50:13.965588Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:13.965893Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2024-11-21T17:50:13.965922Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2024-11-21T17:50:13.966055Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.966073Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:13.966079Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2024-11-21T17:50:13.966100Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2024-11-21T17:50:13.966124Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2024-11-21T17:50:14.123378Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:14.123396Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:14.123473Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.123480Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2024-11-21T17:50:14.123581Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.123591Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2024-11-21T17:50:14.123951Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T17:50:14.123964Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T17:50:14.123969Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2024-11-21T17:50:14.123974Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T17:50:14.123981Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:50:14.123998Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true 2024-11-21T17:50:14.124114Z node 3 :DATASHARD_BACKUP ERROR: [Export] [scanner] Error read data from table: Invalid DyNumber binary representation REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:17371 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A0B43BE8-8FC5-4D5A-97A4-A2923B190C33 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 2024-11-21T17:50:14.126145Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2024-11-21T17:50:14.139138Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 12884904293 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid DyNumber binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:50:14.139167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:14.139196Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 12884904293 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid DyNumber binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:50:14.139211Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 434 RawX2: 12884904293 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid DyNumber binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T17:50:14.139227Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.139231Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.139236Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:14.139242Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T17:50:14.139289Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:14.139757Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.139832Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.139840Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2024-11-21T17:50:14.139851Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2024-11-21T17:50:14.139855Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T17:50:14.139861Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2024-11-21T17:50:14.139875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2024-11-21T17:50:14.139881Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T17:50:14.139886Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T17:50:14.139890Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2024-11-21T17:50:14.139917Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:14.140330Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T17:50:14.140345Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2024-11-21T17:50:14.140385Z node 3 :EXPORT NOTICE: TExport::TTxProgress: issues during backing up, cancelling, info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Transferring WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/MyRoot/Table' SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2] State: Done SubState: Proposed WaitTxId: 0 Issue: 'shard: 72057594046678944:2, error: Invalid DyNumber binary representation' } 2024-11-21T17:50:14.140778Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:14.140791Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:460:2424] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:12.739195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:12.739222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.739228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:12.739233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:12.739238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:12.739242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:12.739251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.739323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:12.750637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:12.750661Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:12.753357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:12.754196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:12.754230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:12.755427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:12.755565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:12.755652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.755718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:12.756491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.756725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.756735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.756771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:12.756779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.756784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:12.756797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.757953Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:12.776127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:12.776194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.776269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:12.776312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:12.776320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.776893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.776915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:12.776946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.776956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:12.776959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:12.776964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:12.777323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.777336Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:12.777341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:12.777649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.777659Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.777664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.777670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.778261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:12.778604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:12.778644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:12.778800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.778825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:12.778831Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.778882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:12.778889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.778926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.778937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:12.779290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.779298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.779327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.779332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:12.779388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.779395Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:12.779403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:12.779409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.779414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:12.779418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.779422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:12.779425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:12.779435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:12.779440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:12.779444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:12.779718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.779735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.779740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:12.779744Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:12.779749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.779762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 3 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710763 2024-11-21T17:50:14.361186Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.361191Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.361198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2024-11-21T17:50:14.361223Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:14.361288Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:14.361302Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:14.361306Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:50:14.361310Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2024-11-21T17:50:14.361315Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:14.361362Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:14.361367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:14.361369Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:50:14.361371Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T17:50:14.361373Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:50:14.361377Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T17:50:14.361826Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T17:50:14.361863Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T17:50:14.361867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T17:50:14.361871Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T17:50:14.361973Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2024-11-21T17:50:14.361993Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2024-11-21T17:50:14.362060Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.362074Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:14.362079Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2024-11-21T17:50:14.362095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.362102Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2024-11-21T17:50:14.362104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T17:50:14.362110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:14.362117Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:14.362120Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2024-11-21T17:50:14.362124Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T17:50:14.362127Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2024-11-21T17:50:14.362129Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2024-11-21T17:50:14.362134Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:50:14.362137Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2024-11-21T17:50:14.362140Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:50:14.362141Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:50:14.362204Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:14.362214Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:14.362428Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:14.362434Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:14.362453Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:14.362466Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.362469Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2024-11-21T17:50:14.362472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2024-11-21T17:50:14.362556Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:14.362563Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:14.362565Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:50:14.362568Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:50:14.362574Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:14.362633Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:14.362638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:14.362640Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:50:14.362642Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:50:14.362644Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:14.362650Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2024-11-21T17:50:14.362653Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:123:2149] 2024-11-21T17:50:14.363067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:14.363106Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:14.363115Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2024-11-21T17:50:14.363123Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2024-11-21T17:50:14.363127Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2024-11-21T17:50:14.363129Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2024-11-21T17:50:14.363133Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2024-11-21T17:50:14.363333Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T17:50:14.363343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:14.363347Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:827:2764] TestWaitNotification: OK eventTxId 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [GOOD] Test command err: Trying to start YDB, gRPC: 64688, MsgBus: 19027 2024-11-21T17:49:58.919625Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791307386984944:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:58.919830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001bed/r3tmp/tmpFcqX5x/pdisk_1.dat 2024-11-21T17:49:58.975205Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64688, node 1 2024-11-21T17:49:58.983733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:58.983747Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:58.983749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:58.983782Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19027 2024-11-21T17:49:59.021033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:59.021065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:59.022162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:59.036425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:49:59.200827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791311681952848:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.200852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.225716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.296419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791311681954374:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.296447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.296545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791311681954379:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:59.297372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:49:59.299034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791311681954381:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:50:03.919725Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791307386984944:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:03.919778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:50:13.969259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:50:13.969279Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.041040s took: 0.042185s took: 0.042189s took: 0.042206s took: 0.042170s took: 0.042240s took: 0.042186s took: 0.042161s took: 0.042077s took: 0.042561s took: 0.133974s took: 0.136535s took: 0.136683s took: 0.136902s took: 0.137157s took: 0.137394s took: 0.137617s took: 0.137825s took: 0.137956s took: 0.142923s took: 0.059703s took: 0.060186s took: 0.060390s took: 0.061488s took: 0.061273s took: 0.061656s took: 0.062460s took: 0.063112s took: 0.064557s took: 0.083253s took: 0.017530s took: 0.017891s took: 0.017870s took: 0.017824s took: 0.018172s took: 0.018381s took: 0.018475s took: 0.018914s took: 0.021559s took: 0.022088s took: 0.032226s took: 0.032450s took: 0.045015s took: 0.050747s took: 0.050822s took: 0.058423s took: 0.058717s took: 0.058546s took: 0.059235s took: 0.063291s 2024-11-21T17:50:14.751120Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2024-11-21T17:50:14.751139Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2024-11-21T17:50:14.751142Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2024-11-21T17:50:14.751144Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2024-11-21T17:50:14.751146Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2024-11-21T17:50:14.751148Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2024-11-21T17:50:14.751150Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2024-11-21T17:50:14.751152Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2024-11-21T17:50:14.751154Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2024-11-21T17:50:14.751156Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2024-11-21T17:50:14.751638Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2024-11-21T17:50:14.751650Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2024-11-21T17:50:14.751653Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2024-11-21T17:50:14.751655Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2024-11-21T17:50:14.751658Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2024-11-21T17:50:14.751660Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2024-11-21T17:50:14.751663Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2024-11-21T17:50:14.751665Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2024-11-21T17:50:14.751667Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2024-11-21T17:50:14.751669Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2024-11-21T17:50:14.751672Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2024-11-21T17:50:14.751674Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2024-11-21T17:50:14.751676Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2024-11-21T17:50:14.751678Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T17:50:14.751680Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2024-11-21T17:50:14.751682Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2024-11-21T17:50:14.751684Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2024-11-21T17:50:14.751685Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2024-11-21T17:50:14.751688Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2024-11-21T17:50:14.751690Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2024-11-21T17:50:14.751693Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2024-11-21T17:50:14.751695Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2024-11-21T17:50:14.751698Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2024-11-21T17:50:14.751700Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2024-11-21T17:50:14.751703Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2024-11-21T17:50:14.751705Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2024-11-21T17:50:14.751706Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2024-11-21T17:50:14.751709Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2024-11-21T17:50:14.751711Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2024-11-21T17:50:14.751713Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found |81.1%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:08.245699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:08.245725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:08.245730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:08.245734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:08.245748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:08.245751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:08.245760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:08.245840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:08.256700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:08.256725Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:08.259129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:08.259230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:08.259260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:08.261793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:08.261869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:08.261978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:08.262149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:08.262686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:08.262996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:08.263007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:08.263019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:08.263026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:08.263032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:08.263075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:08.264299Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:08.279951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:08.280052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.280122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:08.280167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:08.280175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.280998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:08.281024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:08.281090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.281099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:08.281103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:08.281108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:08.281453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.281462Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:08.281466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:08.281736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.281744Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.281749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:08.281757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:08.282283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:08.282660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:08.282713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:08.282891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:08.282913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:08.282920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:08.282980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:08.282987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:08.283021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:08.283035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:08.283392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:08.283401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:08.283443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:08.283448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:08.283523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:08.283529Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:08.283539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:08.283543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:08.283549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:08.283553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:08.283558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:08.283561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:08.283571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:08.283577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:08.283581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:14.091650Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:50:14.091717Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 75us result status StatusSuccess 2024-11-21T17:50:14.091870Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:14.112456Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1167:2910] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T17:50:14.112503Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1104:2910] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T17:50:14.112542Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1167:2910] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732211414084384 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732211414084384 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1732211414084384 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2024-11-21T17:50:14.114052Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1167:2910] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2024-11-21T17:50:14.114072Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1104:2910] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TSchemeShardTTLTests::ShouldCheckQuotas >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelledExportEndTime [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:12.181254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:12.181279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.181284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:12.181288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:12.181294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:12.181297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:12.181316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.181394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:12.193439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:12.193462Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:12.203539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:12.204604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:12.204648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:12.210580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:12.210833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:12.210965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.211095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:12.213676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.214071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.214085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.214130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:12.214137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.214143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:12.214158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.215402Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:12.234968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:12.235058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.235124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:12.235169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:12.235176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.235979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.236008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:12.236049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.236060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:12.236065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:12.236070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:12.236445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.236455Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:12.236459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:12.236754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.236763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.236769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.236776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.237394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:12.237780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:12.237826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:12.237994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.238015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:12.238024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.238076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:12.238093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.238132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.238151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:12.238730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.238744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.238802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.238813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:12.238919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.238931Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:12.238945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:12.238949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.238954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:12.238965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.238970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:12.238974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:12.238989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:12.238997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:12.239001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:12.239355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.239374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.239381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:12.239386Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:12.239430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.239449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... rrier }, at tablet# 72057594046678944 2024-11-21T17:50:14.562580Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710758:0 240 -> 240 2024-11-21T17:50:14.562932Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.562942Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710758:0 ProgressState 2024-11-21T17:50:14.562956Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2024-11-21T17:50:14.562963Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2024-11-21T17:50:14.562969Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: true 2024-11-21T17:50:14.562986Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710758 2024-11-21T17:50:14.562992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2024-11-21T17:50:14.562999Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2024-11-21T17:50:14.563002Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2024-11-21T17:50:14.563034Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:14.563039Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:14.563947Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2024-11-21T17:50:14.563964Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2024-11-21T17:50:14.564320Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2024-11-21T17:50:14.580841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:50:14.580864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:50:14.581856Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/export-102" OperationType: ESchemeOpBackup Backup { TableName: "0" NumberOfRetries: 0 S3Settings { Endpoint: "localhost:22881" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:14.581955Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.581996Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:14.582121Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:14.582129Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.582470Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:14.582482Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:14.584906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:14.584953Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2024-11-21T17:50:14.585033Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2024-11-21T17:50:14.585040Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2024-11-21T17:50:14.585151Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.585163Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet72057594046678944 2024-11-21T17:50:14.585169Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2024-11-21T17:50:14.585175Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2024-11-21T17:50:14.593851Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2024-11-21T17:50:14.593876Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2024-11-21T17:50:14.594035Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.594044Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.594084Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2024-11-21T17:50:14.597172Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.597242Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.597251Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.597287Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2024-11-21T17:50:14.597440Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:14.597472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2024-11-21T17:50:14.597575Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T17:50:14.597611Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2024-11-21T17:50:14.599442Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T17:50:14.599640Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:14.599650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:543:2505] TestWaitNotification: OK eventTxId 102 >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::AuditCompletedExport >> TSchemeShardAuditSettings::CreateSubdomain >> TSchemeShardAuditSettings::CreateExtSubdomain >> TWebLoginService::AuditLogLdapLoginBadPassword >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] >> TPQCachingProxyTest::MultipleSessions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:15.722542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:15.722573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:15.722579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:15.722585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:15.722592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:15.722595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:15.722605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:15.722712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:15.734768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:15.734791Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:15.738266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:15.739158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:15.739223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:15.740560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:15.740740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:15.740866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:15.740954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:15.741832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:15.742125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:15.742135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:15.742178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:15.742185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:15.742191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:15.742209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.743534Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:15.763826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:15.763915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.764001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:15.764050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:15.764059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.772631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:15.772689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:15.772767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.772782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:15.772788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:15.772794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:15.780475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.780509Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:15.780520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:15.781175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.781189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.781196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:15.781206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:15.781943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:15.782478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:15.782535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:15.782735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:15.782765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:15.782776Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:15.782841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:15.782850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:15.782888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:15.782903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:15.783338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:15.783346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:15.783397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:15.783402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:15.783488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.783494Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:15.783507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:15.783512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:15.783519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:15.783525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:15.783529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:15.783533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:15.783546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:15.783552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:15.783556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:15.783910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:15.783923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:15.783929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:15.783935Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:15.783939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:15.783954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 5186233409546 2024-11-21T17:50:16.030050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.030098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 AckTo { RawX1: 378 RawX2: 4294969643 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:16.030111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#103:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 200 2024-11-21T17:50:16.030156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 129 2024-11-21T17:50:16.030189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:50:16.030204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:16.033278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.033298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:16.033350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:16.033408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.033415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T17:50:16.033422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 4 2024-11-21T17:50:16.033658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.033671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:50:16.034153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:50:16.034170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:50:16.034174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:50:16.034180Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T17:50:16.034186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T17:50:16.034350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:50:16.034360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:50:16.034364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:50:16.034367Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T17:50:16.034371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:50:16.034380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2024-11-21T17:50:16.034551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 272 } } 2024-11-21T17:50:16.034558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2024-11-21T17:50:16.034573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 272 } } 2024-11-21T17:50:16.034602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 272 } } 2024-11-21T17:50:16.034689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 535 RawX2: 4294969777 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T17:50:16.034695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2024-11-21T17:50:16.034706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 535 RawX2: 4294969777 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T17:50:16.034712Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:16.034720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 535 RawX2: 4294969777 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T17:50:16.034731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.034734Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.034738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:50:16.034745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T17:50:16.036102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:50:16.036127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:50:16.036262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.036284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.036303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.036310Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T17:50:16.036323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:50:16.036328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:50:16.036334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T17:50:16.036347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:404:2371] message: TxId: 103 2024-11-21T17:50:16.036357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:50:16.036362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:50:16.036366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:50:16.036392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:16.039125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:16.039143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:566:2505] TestWaitNotification: OK eventTxId 103 TestModificationResults wait txId: 104 2024-11-21T17:50:16.039803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:16.039867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.039879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } } }, at schemeshard: 72057594046678944 2024-11-21T17:50:16.039965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2024-11-21T17:50:16.040534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:16.040571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:16.132265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:16.132294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:16.132300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:16.132306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:16.132320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:16.132324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:16.132334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:16.132425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:16.144107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:16.144130Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:16.147359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:16.148223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:16.148288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:16.149961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:16.150199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:16.150321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.150410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:16.151630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.151916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.151924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.151974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:16.151979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.151985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:16.152002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.153503Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:16.168887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:16.168973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.169031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:16.169096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:16.169103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.169803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.169838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:16.169879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.169889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:16.169893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:16.169897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:16.170343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.170357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:16.170363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:16.170763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.170774Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.170780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.170787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.171423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:16.171849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:16.171904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:16.172083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.172112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:16.172123Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.172179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:16.172186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.172301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.172319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:16.172786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.172794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.172834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.172839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:16.172913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.172918Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:16.172926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:16.172929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.172933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:16.172936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.172940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:16.172942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:16.172951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:16.172955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:16.172958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:16.173195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:16.173204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:16.173207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:16.173210Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:16.173213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.173224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 94046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.246957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.246962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T17:50:16.246969Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2024-11-21T17:50:16.246980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:16.247143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.247160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.247166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T17:50:16.247172Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2024-11-21T17:50:16.247204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T17:50:16.247217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2024-11-21T17:50:16.247876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2024-11-21T17:50:16.247919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2024-11-21T17:50:16.248250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.248281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:16.248292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2024-11-21T17:50:16.248313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.248320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2024-11-21T17:50:16.248328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 134 2024-11-21T17:50:16.248832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T17:50:16.248940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T17:50:16.249421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.249441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 112:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:16.249478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 134 -> 135 2024-11-21T17:50:16.249511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.249529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 FAKE_COORDINATOR: Erasing txId 112 2024-11-21T17:50:16.249969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.249979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.250012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2024-11-21T17:50:16.250034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.250038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 1 2024-11-21T17:50:16.250043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 7 2024-11-21T17:50:16.250052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.250058Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2024-11-21T17:50:16.250062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 135 -> 240 2024-11-21T17:50:16.250221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.250232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.250236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T17:50:16.250240Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2024-11-21T17:50:16.250246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:16.250380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.250392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.250396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T17:50:16.250400Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2024-11-21T17:50:16.250403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T17:50:16.250413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2024-11-21T17:50:16.250864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.250877Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 112:0 ProgressState 2024-11-21T17:50:16.250890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2024-11-21T17:50:16.250895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2024-11-21T17:50:16.250900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: true 2024-11-21T17:50:16.250905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2024-11-21T17:50:16.250910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2024-11-21T17:50:16.250914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2024-11-21T17:50:16.250923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T17:50:16.250999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:16.251004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T17:50:16.251015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T17:50:16.251067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:16.251073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T17:50:16.251082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.251140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T17:50:16.251320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T17:50:16.251671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:50:16.251684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2024-11-21T17:50:16.251768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2024-11-21T17:50:16.251775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2024-11-21T17:50:16.251871Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2024-11-21T17:50:16.251887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2024-11-21T17:50:16.251892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:660:2652] TestWaitNotification: OK eventTxId 112 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:14.023275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:14.023303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:14.023308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:14.023313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:14.023318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:14.023321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:14.023329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:14.023425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:14.033682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:14.033703Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:14.036408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:14.037242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:14.037284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:14.039466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:14.039681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:14.039795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.039865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:14.041071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.041314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:14.041324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.041357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:14.041364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:14.041371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:14.041384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.042721Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:14.058329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:14.058406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.058467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:14.058515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:14.058525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.059247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.059292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:14.059356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.059373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:14.059380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:14.059387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:14.060005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.060027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:14.060035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:14.060455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.060471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.060478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:14.060488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:14.061146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:14.061569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:14.061626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:14.061817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.061844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:14.061853Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:14.061911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:14.061918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:14.061954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:14.061967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:14.062452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:14.062465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:14.062511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.062517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:14.062600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.062607Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:14.062620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:14.062625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:14.062632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:14.062638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:14.062643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:14.062646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:14.062657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:14.062663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:14.062668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:14.062975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:14.062988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:14.062993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:14.062997Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:14.063002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:14.063015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... LAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409593 TxId: 103 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2024-11-21T17:50:16.076126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409594 TxId: 103 MinStep: 0 Step: 5000004 2024-11-21T17:50:16.076131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409594, partId: 0 2024-11-21T17:50:16.076142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409594 TxId: 103 MinStep: 0 Step: 5000004 2024-11-21T17:50:16.076149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409594 TxId: 103 MinStep: 0 Step: 5000004 2024-11-21T17:50:16.076195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409594 TxId: 103 2024-11-21T17:50:16.076200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409594, partId: 0 2024-11-21T17:50:16.076210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409594 TxId: 103 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T17:50:16.076846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.077541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.077585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.077629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.077657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.077668Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T17:50:16.077696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:50:16.077702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:50:16.077709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T17:50:16.077730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2615:3881] message: TxId: 103 2024-11-21T17:50:16.077738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:50:16.077759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:50:16.077763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:50:16.078045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2024-11-21T17:50:16.079529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:16.079547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:3960:5160] TestWaitNotification: OK eventTxId 103 2024-11-21T17:50:16.079714Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:16.079802Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 100us result status StatusSuccess 2024-11-21T17:50:16.080004Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" ColumnCodec: ColumnCodecPlain } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2024-11-21T17:50:16.080820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:16.080870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.081021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2024-11-21T17:50:16.081651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:16.081684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T17:50:16.081775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T17:50:16.081782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T17:50:16.081876Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:50:16.081900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:50:16.081905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4393:5592] TestWaitNotification: OK eventTxId 104 |81.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:07.495776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:07.495806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:07.495812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:07.495826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:07.495832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:07.495836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:07.495844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:07.495983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:07.507566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:07.507590Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:07.511225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:07.512175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:07.512219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:07.513900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:07.514182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:07.514288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.514363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:07.515971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.516270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:07.516281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.516320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:07.516327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:07.516333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:07.516346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.521644Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:07.542098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:07.542170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.542239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:07.542284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:07.542291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.543112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.543147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:07.543200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.543211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:07.543216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:07.543222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:07.543541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.543548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:07.543552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:07.543795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.543803Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.543809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.543815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.544472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:07.544934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:07.544986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:07.545172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.545198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:07.545206Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.545262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:07.545269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.545299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:07.545310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:07.545644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:07.545651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:07.545693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.545699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:07.545776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.545783Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:07.545796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:07.545801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.545808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:07.545813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.545818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:07.545822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:07.545831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:07.545837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:07.545841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:07.546136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:07.546148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:07.546153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:07.546158Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:07.546162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:07.546174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Id: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2024-11-21T17:50:16.114639Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.114649Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.114673Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:16.114713Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.114718Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:50:16.114723Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:50:16.114837Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.114846Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:50:16.115306Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:16.115326Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:16.115330Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:16.115335Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:50:16.115340Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:16.115468Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:16.115479Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:16.115483Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:16.115486Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:50:16.115491Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:16.115501Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T17:50:16.115685Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 324 } } 2024-11-21T17:50:16.115696Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:16.115715Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 324 } } 2024-11-21T17:50:16.115730Z node 28 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 324 } } 2024-11-21T17:50:16.115864Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 120259086582 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:16.115872Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:16.115885Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 120259086582 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:16.115892Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:16.115898Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 306 RawX2: 120259086582 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:16.115909Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.115912Z node 28 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.115916Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:16.115921Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T17:50:16.116401Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:16.116698Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:16.116717Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.116731Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.116785Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.116792Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:50:16.116804Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:16.116808Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:16.116813Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T17:50:16.116824Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:342:2317] message: TxId: 101 2024-11-21T17:50:16.116833Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:16.116837Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:16.116841Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:50:16.116864Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:16.117264Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:16.117274Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:343:2318] TestWaitNotification: OK eventTxId 101 2024-11-21T17:50:16.117374Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:16.117419Z node 28 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 55us result status StatusSuccess 2024-11-21T17:50:16.117536Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:16.125579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:16.125608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:16.125613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:16.125620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:16.125634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:16.125638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:16.125647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:16.125738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:16.137148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:16.137175Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:16.140461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:16.141274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:16.141317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:16.143226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:16.143499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:16.143613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.143703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:16.145134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.145503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.145520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.145565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:16.145575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.145581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:16.145603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.147228Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:16.165107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:16.165206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.165275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:16.165352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:16.165359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.166249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.166274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:16.166324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.166334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:16.166339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:16.166345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:16.166772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.166782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:16.166786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:16.167104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.167113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.167119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.167126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.167726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:16.168136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:16.168191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:16.168398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.168423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:16.168434Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.168491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:16.168499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.168531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.168543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:16.168941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.168948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.168994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.168999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:16.169079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.169086Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:16.169097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:16.169102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.169108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:16.169113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.169118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:16.169122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:16.169132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:16.169138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:16.169143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:16.169436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:16.169446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:16.169451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:16.169457Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:16.169462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.169477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... etId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 112 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:16.241042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.241051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.241055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T17:50:16.241060Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2024-11-21T17:50:16.241064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:16.241196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.241207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.241210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T17:50:16.241213Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2024-11-21T17:50:16.241218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T17:50:16.241232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2024-11-21T17:50:16.241625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2024-11-21T17:50:16.241656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2024-11-21T17:50:16.241951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.241973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:16.241981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2024-11-21T17:50:16.241988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.241992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2024-11-21T17:50:16.242013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 130 2024-11-21T17:50:16.242041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.242050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T17:50:16.242249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T17:50:16.242299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2024-11-21T17:50:16.242603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.242610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.242629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2024-11-21T17:50:16.242649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.242653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 1 2024-11-21T17:50:16.242657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 7 2024-11-21T17:50:16.242699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.242705Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2024-11-21T17:50:16.242712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2024-11-21T17:50:16.242719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2024-11-21T17:50:16.242723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2024-11-21T17:50:16.242727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2024-11-21T17:50:16.242730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2024-11-21T17:50:16.242733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2024-11-21T17:50:16.242744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T17:50:16.242748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2024-11-21T17:50:16.242751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 27 2024-11-21T17:50:16.242755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 7], 18446744073709551615 2024-11-21T17:50:16.242845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.242854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.242858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2024-11-21T17:50:16.242862Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2024-11-21T17:50:16.242865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:16.243183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.243196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:16.243200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2024-11-21T17:50:16.243205Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2024-11-21T17:50:16.243209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T17:50:16.243224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2024-11-21T17:50:16.243257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:16.243262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T17:50:16.243278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T17:50:16.243325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:16.243333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T17:50:16.243342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.243878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T17:50:16.244374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T17:50:16.244394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:50:16.244404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2024-11-21T17:50:16.244468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2024-11-21T17:50:16.244472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2024-11-21T17:50:16.244544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2024-11-21T17:50:16.244564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2024-11-21T17:50:16.244567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:652:2644] TestWaitNotification: OK eventTxId 112 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T17:50:16.162429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:16.162452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:16.162455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:16.162458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:16.162474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:16.162477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:16.162484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:16.162586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:16.169717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:16.169741Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:16.171899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:16.171928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:16.171954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:16.174448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:16.174503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:16.174590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.174640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:16.175258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.175500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.175507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.175537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:16.175543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.175547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:16.175557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.177033Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:16.193163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:16.193272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.193356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:16.193413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:16.193421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.194384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.194434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:16.194500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.194512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:16.194516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:16.194521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:16.195007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.195019Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:16.195024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:16.195376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.195385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.195390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.195398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.195982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:16.196427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:16.196491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:16.196684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.196708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:16.196719Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.196774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:16.196781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.196821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.196833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:16.197238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.197247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.197298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.197303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:16.197377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.197385Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:16.197397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:16.197402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.197408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:16.197413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.197418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:16.197421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:16.197432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:16.197438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:16.197442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:16.197748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:16.197762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:16.197767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:16.197773Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:16.197778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.197793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:16.198958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:16.199078Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.199728Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:16.200977Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:274:2266]) 2024-11-21T17:50:16.201041Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T17:50:16.201124Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1786, port: 1786 2024-11-21T17:50:16.201622Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:50:16.216575Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=user1, attributes: 1.1 2024-11-21T17:50:16.264402Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: uid=user1,dc=search,dc=yandex,dc=net 2024-11-21T17:50:16.264636Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP login failed for user uid=user1,dc=search,dc=yandex,dc=net on server ldap://localhost:1786. Invalid credentials 2024-11-21T17:50:16.265169Z node 1 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2024-11-21T17:50:16.265349Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:16.266559Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2024-11-21T17:50:16.194425Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T17:50:16.265092Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP login failed for user uid=user1,dc=search,dc=yandex,dc=net on server ldap://localhost:1786. Invalid credentials, login_user=user1@ldap AUDIT LOG checked line: 2024-11-21T17:50:16.265092Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP login failed for user uid=user1,dc=search,dc=yandex,dc=net on server ldap://localhost:1786. Invalid credentials, login_user=user1@ldap |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] >> TPQCachingProxyTest::TestWrongSessionOrGeneration >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport >> BsControllerConfig::ExtendByCreatingSeparateBox >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2024-11-21T17:50:16.517898Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:16.517932Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:50:16.521736Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:16.521763Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2024-11-21T17:50:16.521779Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2024-11-21T17:50:16.521785Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 2 for session: session1 2024-11-21T17:50:16.521790Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T17:50:16.521798Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 2 for session session1 2024-11-21T17:50:16.521803Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 2 2024-11-21T17:50:16.521809Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 3 for session: session2 2024-11-21T17:50:16.521812Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 3 for session session2 >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2024-11-21T17:50:16.839000Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:16.839026Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:50:16.842744Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:16.842771Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2 2024-11-21T17:50:16.842786Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2024-11-21T17:50:16.842791Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T17:50:16.842802Z node 1 :PQ_READ_PROXY INFO: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2024-11-21T17:50:16.842809Z node 1 :PQ_READ_PROXY ALERT: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2024-11-21T17:50:16.842817Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T17:50:16.842831Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 >> TExportToS3Tests::AuditCancelledExport [GOOD] >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] >> BsControllerConfig::AddDriveSerial >> BsControllerConfig::OverlayMapCrossReferences >> BsControllerConfig::OverlayMap [GOOD] >> BsControllerConfig::ManyPDisksRestarts >> TSchemeShardTopicSplitMergeTest::MargePartitions >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMap [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:16.276087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:16.276106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:16.276109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:16.276113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:16.276123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:16.276125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:16.276131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:16.276194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:16.284725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:16.284746Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:16.287573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:16.288190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:16.288216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:16.289377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:16.289582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:16.289652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.289698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:16.290568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.290773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.290781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.290808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:16.290814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.290818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:16.290828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.291910Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:16.308405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:16.308486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.308547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:16.308610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:16.308618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.309423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.309448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:16.309499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.309507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:16.309510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:16.309513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:16.309866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.309876Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:16.309880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:16.310135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.310142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.310146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.310151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.310583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:16.310904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:16.310958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:16.311101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.311118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:16.311126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.311164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:16.311169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.311190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.311198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:16.311648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.311660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.311699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.311705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:16.311780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.311789Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:16.311800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:16.311805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.311810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:16.311814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.311819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:16.311822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:16.311835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:16.311841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:16.311846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:16.312159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:16.312178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:16.312183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:16.312189Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:16.312194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.312212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 0, tablet: 72057594037968897 2024-11-21T17:50:17.275741Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteOwnerTablets, msg: { Owner: 72075186233409618 TxId: 175 } 2024-11-21T17:50:17.275785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free owner tablets reply, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897, at schemeshard: 72057594046678944 2024-11-21T17:50:17.275795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 175:0, at schemeshard: 72057594046678944, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897 2024-11-21T17:50:17.275801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 HandleReply TDeleteExternalShards, Status: ALREADY, from Hive: 72057594037968897, Owner: 72075186233409618, at schemeshard: 72057594046678944 2024-11-21T17:50:17.275820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2024-11-21T17:50:17.275837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:17.275844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2024-11-21T17:50:17.276343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.276392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:17.276397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:17.276421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2024-11-21T17:50:17.276443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.276448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2024-11-21T17:50:17.276453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2024-11-21T17:50:17.276526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.276533Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2024-11-21T17:50:17.276539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2024-11-21T17:50:17.276691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:17.276704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:17.276708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T17:50:17.276712Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2024-11-21T17:50:17.276717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:17.276882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:17.276894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:17.276898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T17:50:17.276902Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2024-11-21T17:50:17.276906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 6 2024-11-21T17:50:17.276917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2024-11-21T17:50:17.277440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:74 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:50:17.277447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:73 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:50:17.277449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:75 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:50:17.277486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.277491Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2024-11-21T17:50:17.277498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2024-11-21T17:50:17.277500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T17:50:17.277517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2024-11-21T17:50:17.277521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T17:50:17.277524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2024-11-21T17:50:17.277527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2024-11-21T17:50:17.277558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2024-11-21T17:50:17.277664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T17:50:17.277780Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 74 TabletID: 72075186233409619 2024-11-21T17:50:17.278391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 74 ShardOwnerId: 72057594046678944 ShardLocalIdx: 74, at schemeshard: 72057594046678944 2024-11-21T17:50:17.278535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 4 Forgetting tablet 72075186233409619 2024-11-21T17:50:17.278788Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 73 TabletID: 72075186233409618 2024-11-21T17:50:17.278909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 73 ShardOwnerId: 72057594046678944 ShardLocalIdx: 73, at schemeshard: 72057594046678944 2024-11-21T17:50:17.278951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2024-11-21T17:50:17.278988Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 75 TabletID: 72075186233409620 2024-11-21T17:50:17.279194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:17.279732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 75 ShardOwnerId: 72057594046678944 ShardLocalIdx: 75, at schemeshard: 72057594046678944 2024-11-21T17:50:17.279772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 Forgetting tablet 72075186233409618 2024-11-21T17:50:17.280005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:17.280010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T17:50:17.280032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 Forgetting tablet 72075186233409620 2024-11-21T17:50:17.280338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:17.280346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T17:50:17.280361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:17.280718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T17:50:17.280984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:74 2024-11-21T17:50:17.280995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:74 tabletId 72075186233409619 2024-11-21T17:50:17.281724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:73 2024-11-21T17:50:17.281738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:73 tabletId 72075186233409618 2024-11-21T17:50:17.281765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:75 2024-11-21T17:50:17.281772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:75 tabletId 72075186233409620 2024-11-21T17:50:17.281789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:50:17.281839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2024-11-21T17:50:17.282167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2024-11-21T17:50:17.282174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2024-11-21T17:50:17.282381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2024-11-21T17:50:17.282406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2024-11-21T17:50:17.282411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:6713:7724] TestWaitNotification: OK eventTxId 175 >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> ClientStatsCollector::PrepareQuery >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AuditCancelledExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T17:50:12.122857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:12.122882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.122887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:12.122891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:12.122913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:12.122917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:12.122925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.123007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:12.135581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:12.135610Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:12.142888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:12.142936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:12.142965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:12.145448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:12.145503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:12.145598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.145651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:12.146254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.146532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.146544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.146588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:12.146596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.146602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:12.146615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.147790Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:12.165234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:12.165313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.165395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:12.165447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:12.165455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.168577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.168628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:12.168689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.168703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:12.168708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:12.168713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:12.169271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.169285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:12.169290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:12.169601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.169611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.169618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.169625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.170222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:12.170618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:12.170667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:12.170834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.170857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:12.170864Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.170934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:12.170948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.170991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.171010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:12.171399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.171406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.171444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.171449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:12.171517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.171524Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:12.171536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:12.171540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.171545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:12.171550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.171554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:12.171558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:12.171569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:12.171575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:12.171578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:12.171889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.171903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.171907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:12.171912Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:12.171916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.171930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... pose Execute, message: Transaction { WorkingDir: "/MyRoot/export-102" OperationType: ESchemeOpBackup Backup { TableName: "0" NumberOfRetries: 0 S3Settings { Endpoint: "localhost:11744" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:17.395918Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.395953Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:17.396044Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:17.396051Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.396965Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:17.396980Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:17.397129Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:17.397170Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2024-11-21T17:50:17.397230Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2024-11-21T17:50:17.397237Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2024-11-21T17:50:17.397301Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.397311Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet72057594046678944 2024-11-21T17:50:17.397316Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2024-11-21T17:50:17.397321Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2024-11-21T17:50:17.397979Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2024-11-21T17:50:17.397993Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2024-11-21T17:50:17.398068Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.398074Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.398099Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2024-11-21T17:50:17.398684Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.398751Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.398764Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.398781Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2024-11-21T17:50:17.398910Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:17.398928Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2024-11-21T17:50:17.399058Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T17:50:17.399083Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2024-11-21T17:50:17.399652Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T17:50:17.399700Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:17.399706Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:549:2511] TestWaitNotification: OK eventTxId 102 AUDIT LOG buffer(7): 2024-11-21T17:50:16.878952Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T17:50:16.882952Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE TABLE, paths=[/MyRoot/Table], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T17:50:17.168691Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT START, status=SUCCESS, detailed_status=SUCCESS, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none} 2024-11-21T17:50:17.170326Z: component=schemeshard, tx_id=281474976710757, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE DIRECTORY, paths=[/MyRoot/export-102], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T17:50:17.175658Z: component=schemeshard, tx_id=281474976710758, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=CREATE TABLE COPY FROM, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T17:50:17.397162Z: component=schemeshard, tx_id=281474976710759, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=BACKUP TABLE, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T17:50:17.398996Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2024-11-21T17:50:16.904899Z, end_time=2024-11-21T17:50:46.952899Z AUDIT LOG checked line: 2024-11-21T17:50:17.398996Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2024-11-21T17:50:16.904899Z, end_time=2024-11-21T17:50:46.952899Z >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:17.972352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:17.972373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:17.972378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:17.972382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:17.972386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:17.972390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:17.972397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:17.972479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:17.983262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:17.983279Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:17.985790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:17.986516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:17.986538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:17.987759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:17.987947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:17.988010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:17.988066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:17.989402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.989665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:17.989677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.989712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:17.989719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:17.989726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:17.989738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.990855Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:18.006106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:18.006173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.006210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:18.006242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:18.006249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.006812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:18.006834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:18.006883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.006893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:18.006898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:18.006902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:18.007302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.007318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:18.007324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:18.007725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.007736Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.007742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:18.007749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:18.008349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:18.008741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:18.008796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:18.008948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:18.008973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.008984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:18.009028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:18.009034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:18.009057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:18.009068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:18.009510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:18.009518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:18.009543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:18.009546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:18.009599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.009604Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:18.009612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:18.009615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:18.009618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:18.009622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:18.009625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:18.009627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:18.009637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:18.009640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:18.009643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:18.009869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:18.009880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:18.009883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:18.009888Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:18.009892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:18.009905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... : 72075186233409548 TxId: 104 Status: OK 2024-11-21T17:50:18.052604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2024-11-21T17:50:18.052611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2024-11-21T17:50:18.052615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T17:50:18.053247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T17:50:18.053333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T17:50:18.053340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T17:50:18.053407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:50:18.053414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T17:50:18.053418Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:50:18.084823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:18.084868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 378 RawX2: 4294969643 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.084881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2024-11-21T17:50:18.084899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T17:50:18.121970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2024-11-21T17:50:18.122012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2024-11-21T17:50:18.122020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2024-11-21T17:50:18.122030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.122036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T17:50:18.122075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T17:50:18.122110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:18.122121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:18.122743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.123021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:18.123030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:18.123072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:18.123111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:18.123118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T17:50:18.123124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T17:50:18.123214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.123223Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T17:50:18.123237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T17:50:18.123242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:50:18.123249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T17:50:18.123255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:50:18.123262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:50:18.123266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:50:18.123298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:50:18.123304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2024-11-21T17:50:18.123309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T17:50:18.123313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T17:50:18.123543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:18.123557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:18.123564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:50:18.123570Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:50:18.123575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:50:18.123690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:18.123700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:18.123704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:50:18.123708Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:50:18.123713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:18.123721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2024-11-21T17:50:18.123726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:404:2371] 2024-11-21T17:50:18.124537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:50:18.124784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:50:18.124804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:50:18.124809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:541:2475] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2024-11-21T17:50:18.127418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:18.127460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.127489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2024-11-21T17:50:18.127937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.127960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T17:50:18.128013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T17:50:18.128020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T17:50:18.128089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T17:50:18.128107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:50:18.128111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:641:2564] TestWaitNotification: OK eventTxId 105 >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:17.865042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:17.865066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:17.865071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:17.865075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:17.865080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:17.865084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:17.865091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:17.865182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:17.875179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:17.875205Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:17.879008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:17.879983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:17.880026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:17.885832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:17.886111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:17.886234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:17.886358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:17.887634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.887941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:17.887954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.888000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:17.888010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:17.888017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:17.888034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.889433Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:17.906545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:17.906625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.906697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:17.906752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:17.906761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.907530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:17.907560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:17.907624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.907638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:17.907642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:17.907647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:17.908108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.908118Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:17.908121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:17.910066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.910080Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.910087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:17.910094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:17.910690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:17.911224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:17.911298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:17.911492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:17.911539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:17.911551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:17.911611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:17.911620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:17.911655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:17.911671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:17.912165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:17.912174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:17.912213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.912218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:17.912314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.912322Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:17.912335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:17.912339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:17.912344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:17.912349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:17.912354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:17.912358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:17.912371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:17.912377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:17.912380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:17.912622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:17.912631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:17.912634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:17.912637Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:17.912640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:17.912651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 548 TxId: 104 Status: OK 2024-11-21T17:50:18.405510Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2024-11-21T17:50:18.405517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2024-11-21T17:50:18.405523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T17:50:18.405986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T17:50:18.406063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T17:50:18.406073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T17:50:18.406162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:50:18.406172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T17:50:18.406178Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:50:18.437530Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:18.437574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 373 RawX2: 8589936935 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.437586Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2024-11-21T17:50:18.437602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T17:50:18.467063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2024-11-21T17:50:18.467123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2024-11-21T17:50:18.467135Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2024-11-21T17:50:18.467148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.467152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T17:50:18.467206Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T17:50:18.467246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:18.467257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:18.467873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.467933Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:18.467939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:18.467980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:18.468015Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:18.468020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T17:50:18.468025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T17:50:18.468102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.468110Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T17:50:18.468123Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T17:50:18.468126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:50:18.468133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T17:50:18.468139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:50:18.468144Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:50:18.468148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:50:18.468180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:50:18.468186Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2024-11-21T17:50:18.468190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T17:50:18.468196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T17:50:18.468529Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:18.468550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:18.468556Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:50:18.468561Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:50:18.468567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:50:18.468661Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:18.468669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:18.468672Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:50:18.468676Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:50:18.468680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:18.468688Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2024-11-21T17:50:18.468693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:404:2372] 2024-11-21T17:50:18.471155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:50:18.471189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:50:18.471206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:50:18.471212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:540:2477] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } TestModificationResults wait txId: 105 2024-11-21T17:50:18.473949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:18.474009Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.474052Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, at schemeshard: 72057594046678944 2024-11-21T17:50:18.474543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "You cannot merge non-contiguous partitions" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.474573Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T17:50:18.474625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T17:50:18.474631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T17:50:18.474702Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T17:50:18.474717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:50:18.474722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:638:2564] TestWaitNotification: OK eventTxId 105 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:18.177286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:18.177314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:18.177319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:18.177323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:18.177329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:18.177333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:18.177341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:18.177441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:18.187112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:18.187133Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:18.189720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:18.190540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:18.190580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:18.192649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:18.192909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:18.193024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:18.193121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:18.194526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:18.194824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:18.194837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:18.194879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:18.194888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:18.194894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:18.194910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.196343Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:18.213777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:18.213873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.213938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:18.213982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:18.213990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.215346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:18.215371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:18.215430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.215440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:18.215445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:18.215449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:18.215844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.215852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:18.215856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:18.216152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.216159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.216165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:18.216171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:18.216801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:18.217447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:18.217503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:18.217691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:18.217717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.217728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:18.217781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:18.217788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:18.217820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:18.217833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:18.218362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:18.218371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:18.218411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:18.218416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:18.218506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.218514Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:18.218525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:18.218529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:18.218535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:18.218540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:18.218545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:18.218549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:18.218560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:18.218567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:18.218571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:18.218866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:18.218879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:18.218884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:18.218890Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:18.218894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:18.218908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... chemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.355614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary is empty, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T17:50:18.355675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T17:50:18.355682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T17:50:18.355752Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T17:50:18.355769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:50:18.355773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:641:2564] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } TestModificationResults wait txId: 106 2024-11-21T17:50:18.356325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:18.356355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.356401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2024-11-21T17:50:18.356887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'01\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.356908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T17:50:18.356951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T17:50:18.356956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T17:50:18.357010Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T17:50:18.357024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T17:50:18.357027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:648:2571] TestWaitNotification: OK eventTxId 106 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } TestModificationResults wait txId: 107 2024-11-21T17:50:18.357513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } } } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:18.357536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.357564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2024-11-21T17:50:18.357945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.357964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2024-11-21T17:50:18.358006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2024-11-21T17:50:18.358010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2024-11-21T17:50:18.358056Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2024-11-21T17:50:18.358069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T17:50:18.358072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:655:2578] TestWaitNotification: OK eventTxId 107 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } TestModificationResults wait txId: 108 2024-11-21T17:50:18.358500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:18.358524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.358548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2024-11-21T17:50:18.358963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AD\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.358987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2024-11-21T17:50:18.359033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2024-11-21T17:50:18.359037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2024-11-21T17:50:18.359085Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2024-11-21T17:50:18.359098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T17:50:18.359102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:662:2585] TestWaitNotification: OK eventTxId 108 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } TestModificationResults wait txId: 109 2024-11-21T17:50:18.359447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:18.359464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.359489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2024-11-21T17:50:18.359927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.359950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2024-11-21T17:50:18.360001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2024-11-21T17:50:18.360006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2024-11-21T17:50:18.360063Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2024-11-21T17:50:18.360077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2024-11-21T17:50:18.360081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:669:2592] TestWaitNotification: OK eventTxId 109 >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:17.945447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:17.945474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:17.945479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:17.945483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:17.945489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:17.945492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:17.945501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:17.945607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:17.954287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:17.954304Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:17.957123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:17.957941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:17.957971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:17.964432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:17.964908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:17.965015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:17.965114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:17.966780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.967051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:17.967061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.967097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:17.967104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:17.967110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:17.967125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.969703Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:17.986481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:17.986591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.986655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:17.986706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:17.986713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.987931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:17.987959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:17.988021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.988033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:17.988038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:17.988043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:17.988816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.988828Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:17.988833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:17.989834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.989843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.989848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:17.989854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:17.990425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:17.991553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:17.991605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:17.991777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:17.991806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:17.991815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:17.991891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:17.991898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:17.991924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:17.991936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:17.993146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:17.993157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:17.993194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.993198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:17.993280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.993287Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:17.993298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:17.993302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:17.993308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:17.993313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:17.993318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:17.993322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:17.993334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:17.993341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:17.993344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:17.993693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:17.993708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:17.993713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:17.993718Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:17.993723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:17.993740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... EvProposeTransactionResult> complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.515743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.515749Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.515764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2024-11-21T17:50:18.515790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409546 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 151 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409546 2024-11-21T17:50:18.516250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269550082 2024-11-21T17:50:18.516269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 0:105 msg type: 269090816 2024-11-21T17:50:18.516286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72075186233409546 2024-11-21T17:50:18.516337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2024-11-21T17:50:18.516356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409548 TxId: 105 Status: OK 2024-11-21T17:50:18.516363Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 105 Status: OK 2024-11-21T17:50:18.516368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2024-11-21T17:50:18.516372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T17:50:18.516640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T17:50:18.516693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T17:50:18.516700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T17:50:18.516763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2024-11-21T17:50:18.516768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T17:50:18.516772Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2024-11-21T17:50:18.548495Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:18.548556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 378 RawX2: 8589936941 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.548571Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2024-11-21T17:50:18.548580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T17:50:18.566315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2024-11-21T17:50:18.566385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2024-11-21T17:50:18.566397Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2024-11-21T17:50:18.566410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.566415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T17:50:18.566457Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2024-11-21T17:50:18.566489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:18.566934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.567154Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:18.567160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:18.567215Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:18.567219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T17:50:18.567281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.567288Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T17:50:18.567297Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T17:50:18.567300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:50:18.567305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T17:50:18.567308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:50:18.567312Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T17:50:18.567316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T17:50:18.567337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:50:18.567354Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2024-11-21T17:50:18.567358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:50:18.567488Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:50:18.567498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:50:18.567501Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:50:18.567505Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:50:18.567507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:18.567516Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2024-11-21T17:50:18.567519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:409:2378] 2024-11-21T17:50:18.567966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:50:18.568081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:50:18.568087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:683:2609] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2024-11-21T17:50:18.571095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:18.571143Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.571179Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2024-11-21T17:50:18.571609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.571636Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T17:50:18.571691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T17:50:18.571697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T17:50:18.571760Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T17:50:18.571777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T17:50:18.571781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:776:2690] TestWaitNotification: OK eventTxId 106 >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> ClientStatsCollector::PrepareQuery [GOOD] >> ClientStatsCollector::CounterCacheMiss >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> TCmsTenatsTest::TestClusterRatioLimit >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:18.607075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:18.607101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:18.607106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:18.607111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:18.607117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:18.607121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:18.607130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:18.607211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:18.617731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:18.617751Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:18.620766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:18.621626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:18.621664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:18.623292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:18.623483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:18.623580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:18.623668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:18.624796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:18.625066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:18.625077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:18.625115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:18.625122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:18.625128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:18.625147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.626506Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:18.644706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:18.644802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.644860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:18.644906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:18.644914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.645692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:18.645715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:18.645773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.645784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:18.645788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:18.645794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:18.646214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.646227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:18.646232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:18.646600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.646612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.646619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:18.646627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:18.647272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:18.647702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:18.647753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:18.647929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:18.647953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.647963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:18.648010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:18.648018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:18.648048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:18.648062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:18.648545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:18.648554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:18.648593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:18.648598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:18.648689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.648696Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:18.648709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:18.648713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:18.648719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:18.648724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:18.648729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:18.648733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:18.648747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:18.648754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:18.648758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:18.649070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:18.649087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:18.649093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:18.649099Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:18.649104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:18.649119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... es { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:18.985714Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:636:2058] recipient: [2:100:2135] Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:639:2058] recipient: [2:15:2062] Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:640:2058] recipient: [2:638:2562] Leader for TabletID 72057594046678944 is [2:641:2563] sender: [2:642:2058] recipient: [2:638:2562] 2024-11-21T17:50:18.992883Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:18.992913Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:18.992919Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:18.992924Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:18.992930Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:18.992934Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:18.992942Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:18.993009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:18.994190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:18.994569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:18.994612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:18.994648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:18.994654Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:18.994675Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:18.994779Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2024-11-21T17:50:18.994798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:50:18.994804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:50:18.994812Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.994820Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.994876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:18.994913Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.994963Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.994975Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.994989Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2024-11-21T17:50:18.994995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:18.994999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:18.995002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T17:50:18.995005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:50:18.995022Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995031Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995068Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:50:18.995165Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995179Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995239Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995247Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995282Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995292Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995302Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995326Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995336Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995370Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995406Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995422Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995428Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.995434Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:18.997573Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:18.997597Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:18.997858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:18.997871Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:18.997879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:18.998351Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:641:2563] sender: [2:700:2058] recipient: [2:15:2062] 2024-11-21T17:50:19.049899Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:50:19.050000Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 122us result status StatusSuccess 2024-11-21T17:50:19.050176Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 1 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCmsTenatsTest::TestTenantLimit >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo >> TCmsTest::WalleRebootDownNode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:19.171057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:19.171079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:19.171082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:19.171085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:19.171089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:19.171092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:19.171098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:19.171182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:19.179660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:19.179680Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:19.183062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:19.183918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:19.183958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:19.185519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:19.185689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:19.185766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:19.185827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:19.187063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:19.187332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:19.187340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:19.187377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:19.187383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:19.187389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:19.187404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.188854Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:19.201465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:19.201545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.201596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:19.201640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:19.201646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.202278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:19.202296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:19.202338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.202345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:19.202347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:19.202351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:19.202804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.202822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:19.202827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:19.203241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.203250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.203255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:19.203261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:19.203777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:19.204164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:19.204209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:19.204402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:19.204424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:19.204433Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:19.204476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:19.204482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:19.204508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:19.204518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:19.205117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:19.205128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:19.205167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:19.205172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:19.205249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.205256Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:19.205268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:19.205271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:19.205276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:19.205281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:19.205286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:19.205289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:19.205300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:19.205306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:19.205310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:19.205596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:19.205607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:19.205611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:19.205616Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:19.205621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:19.205635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... solution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:19.372694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:749:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:752:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:753:2058] recipient: [1:751:2665] Leader for TabletID 72057594046678944 is [1:754:2666] sender: [1:755:2058] recipient: [1:751:2665] 2024-11-21T17:50:19.379823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:19.379851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:19.379856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:19.379861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:19.379867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:19.379873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:19.379882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:19.379951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:19.381180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:19.381508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:19.381554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:19.381590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:19.381596Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:19.381619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:19.381734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2024-11-21T17:50:19.381755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:50:19.381761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:50:19.381770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.381778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.381823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:19.381856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.381893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.381925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.381939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2024-11-21T17:50:19.381946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:19.381949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:19.381952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T17:50:19.381955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:50:19.381969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.381978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:50:19.382105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.382375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.383824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:19.383843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:19.384134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:19.384155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:19.384166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:19.384719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:754:2666] sender: [1:812:2058] recipient: [1:15:2062] 2024-11-21T17:50:19.446286Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:50:19.446333Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 58us result status StatusSuccess 2024-11-21T17:50:19.446425Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2024-11-21T17:49:10.038647Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791102007852596:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:10.038835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0005d5/r3tmp/tmpb8xym5/pdisk_1.dat 2024-11-21T17:49:10.093688Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30991, node 1 2024-11-21T17:49:10.105535Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:10.105547Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:10.105548Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:10.105584Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:10.130439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:10.131510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:10.131527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:10.132137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2024-11-21T17:49:10.132251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:49:10.132264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:49:10.132674Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:49:10.132819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:49:10.132835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:49:10.134816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:10.135927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211350184, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:10.135942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:49:10.136010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:49:10.136593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:10.136650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:10.136667Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:49:10.136683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:49:10.136698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:49:10.136713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:49:10.137181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:49:10.137197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:49:10.137201Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:49:10.137284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:49:10.139299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:10.139325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:10.140614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Triggering split by load TClient is connected to server localhost:23234 2024-11-21T17:49:10.371595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791102007853520:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:10.371814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:10.374089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:10.374255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:49:10.374446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:10.374452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:10.376750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T17:49:10.376813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:10.376877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:10.376891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:49:10.377342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:49:10.377347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:49:10.377351Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:49:10.377390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:49:10.377391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:49:10.377393Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:49:10.378045Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:49:10.382180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:49:10.382213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T17:49:10.383457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:49:10.391425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:49:10.391438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:49:10.391460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T17:49:10.391802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:49:10.392457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211350436, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:10.392477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211350436 2024-11-21T17:49:10.392501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T17:49:10.394910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:10.394991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:10.395008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:49:10.395481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:49:10.395497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:49:10.395501Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPubli ... de: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732211350436 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) Table has 2 shards Fast forward > 10h to trigger the merge TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732211350436 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) 2024-11-21T17:50:15.512521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.7256 2024-11-21T17:50:15.512545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.6996 2024-11-21T17:50:15.616361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2024-11-21T17:50:15.616454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T17:50:15.616575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:50:15.616736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:50:15.617135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T17:50:15.617788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2024-11-21T17:50:15.620787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxSplitTablePartition, at tablet72057594046644480 2024-11-21T17:50:15.623641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:50:15.623678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:50:15.624781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:50:15.628871Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7439791381181855360:8412] 2024-11-21T17:50:15.631133Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2024-11-21T17:50:15.631170Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2024-11-21T17:50:15.631204Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2024-11-21T17:50:15.632774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId#281474976715658:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715658:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715658 TabletId: 72075186224037891 2024-11-21T17:50:15.632799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 131 2024-11-21T17:50:15.633248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:50:15.637441Z node 1 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2024-11-21T17:50:15.637480Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:50:15.637499Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2024-11-21T17:50:15.637509Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2024-11-21T17:50:15.637610Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2024-11-21T17:50:15.638383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T17:50:15.638848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037889 2024-11-21T17:50:15.638934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037890 2024-11-21T17:50:15.639049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 131 -> 132 2024-11-21T17:50:15.639574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:50:15.639657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:50:15.639673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:50:15.640031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:15.640055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:15.640060Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2024-11-21T17:50:15.641327Z node 1 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2024-11-21T17:50:15.641581Z node 1 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2024-11-21T17:50:15.641598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2024-11-21T17:50:15.641646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2024-11-21T17:50:15.641662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:50:15.641671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:50:15.642327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715658:0 2024-11-21T17:50:15.642335Z node 1 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T17:50:15.642419Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T17:50:15.642521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2024-11-21T17:50:15.642614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2024-11-21T17:50:15.643824Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2024-11-21T17:50:15.643840Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2024-11-21T17:50:15.643945Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2024-11-21T17:50:15.643951Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2024-11-21T17:50:15.643954Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2024-11-21T17:50:15.644058Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2024-11-21T17:50:15.644075Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2024-11-21T17:50:15.644124Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2024-11-21T17:50:15.738910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2024-11-21T17:50:15.739022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037891 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T17:50:15.739333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732211350436 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 3 } } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Version" Type: "Uint32" ... (TRUNCATED) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:19.189186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:19.189210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:19.189215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:19.189220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:19.189226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:19.189230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:19.189238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:19.189323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:19.199309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:19.199327Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:19.202885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:19.203769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:19.203802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:19.205327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:19.205551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:19.205647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:19.205738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:19.206622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:19.206845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:19.206853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:19.206886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:19.206894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:19.206900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:19.206912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.207985Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:19.224475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:19.224549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.224595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:19.224627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:19.224632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.225109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:19.225124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:19.225159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.225165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:19.225167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:19.225170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:19.225504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.225511Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:19.225514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:19.225755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.225760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.225765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:19.225769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:19.226227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:19.226576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:19.226606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:19.226719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:19.226735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:19.226740Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:19.226774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:19.226778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:19.226799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:19.226807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:19.227089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:19.227094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:19.227116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:19.227119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:19.227165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.227170Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:19.227177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:19.227179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:19.227183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:19.227186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:19.227189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:19.227191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:19.227198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:19.227201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:19.227203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:19.227387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:19.227396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:19.227399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:19.227402Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:19.227404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:19.227413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... perationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T17:50:19.750918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T17:50:19.750925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T17:50:19.751015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2024-11-21T17:50:19.751020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T17:50:19.751023Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2024-11-21T17:50:19.782287Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:19.782318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 373 RawX2: 8589936935 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:19.782328Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2024-11-21T17:50:19.782333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T17:50:19.798213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2024-11-21T17:50:19.798257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2024-11-21T17:50:19.798267Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2024-11-21T17:50:19.798277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.798280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T17:50:19.798315Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2024-11-21T17:50:19.798340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:19.798832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.798925Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:19.798931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:19.798995Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:19.799001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T17:50:19.799062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.799071Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T17:50:19.799084Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T17:50:19.799087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:50:19.799095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T17:50:19.799100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:50:19.799105Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T17:50:19.799109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T17:50:19.799140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:50:19.799146Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2024-11-21T17:50:19.799149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:50:19.799398Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:50:19.799419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:50:19.799422Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:50:19.799424Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:50:19.799427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:19.799438Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2024-11-21T17:50:19.799441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:404:2372] 2024-11-21T17:50:19.800266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:50:19.800297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:50:19.800305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:683:2606] TestWaitNotification: OK eventTxId 105 2024-11-21T17:50:19.803137Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:50:19.803193Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 67us result status StatusSuccess 2024-11-21T17:50:19.803374Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCmsTest::RequestRestartServicesMultipleNodes >> ClientStatsCollector::CounterCacheMiss [GOOD] >> ClientStatsCollector::CounterRetryOperation >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled >> DataShardSnapshots::MvccSnapshotReadLockedWrites [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] >> TCmsTest::TestKeepAvailableMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:17.935494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:17.935516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:17.935521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:17.935525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:17.935530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:17.935533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:17.935541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:17.935632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:17.946362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:17.946383Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:17.949253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:17.949852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:17.949890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:17.951117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:17.951326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:17.951427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:17.951512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:17.952497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.952758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:17.952768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.952806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:17.952813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:17.952819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:17.952833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.954115Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:17.970049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:17.970128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.970190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:17.970234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:17.970241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.970833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:17.970865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:17.970915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.970926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:17.970930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:17.970935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:17.971317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.971327Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:17.971332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:17.971647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.971656Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.971663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:17.971670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:17.972327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:17.972719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:17.972765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:17.972933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:17.972959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:17.972969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:17.973018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:17.973024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:17.973054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:17.973066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:17.973450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:17.973459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:17.973495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:17.973500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:17.973591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:17.973599Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:17.973610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:17.973614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:17.973619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:17.973625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:17.973629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:17.973633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:17.973643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:17.973649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:17.973654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:17.973964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:17.973981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:17.973986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:17.973992Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:17.973997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:17.974012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.986387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T17:50:19.986435Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2024-11-21T17:50:19.986469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:19.987084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.987149Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:19.987155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:19.987223Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:19.987229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T17:50:19.987331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.987338Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T17:50:19.987347Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T17:50:19.987349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:50:19.987353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T17:50:19.987356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:50:19.987360Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T17:50:19.987362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T17:50:19.987385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:50:19.987389Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2024-11-21T17:50:19.987392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:50:19.987501Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:50:19.987509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:50:19.987514Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:50:19.987517Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:50:19.987520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:19.987530Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2024-11-21T17:50:19.988054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T17:50:19.990051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T17:50:19.990061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T17:50:19.990145Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T17:50:19.990163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:50:19.990168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:771:2684] TestWaitNotification: OK eventTxId 105 2024-11-21T17:50:20.497487Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:20.497590Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 115us result status StatusSuccess 2024-11-21T17:50:20.497787Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:20.558954Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:50:20.559048Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 112us result status StatusSuccess 2024-11-21T17:50:20.559176Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:16.048898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:16.048924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:16.048930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:16.048936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:16.048941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:16.048945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:16.048954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:16.049057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:16.059492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:16.059513Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:16.065083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:16.065718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:16.065753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:16.067331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:16.067609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:16.067750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.067833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:16.068969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.069286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.069296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.069348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:16.069355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.069362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:16.069378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.070991Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:16.095104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:16.095183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.095243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:16.095289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:16.095298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.096627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.096671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:16.096741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.096753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:16.096758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:16.096763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:16.100866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.100896Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:16.100904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:16.101493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.101508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.101515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.101522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.102230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:16.102704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:16.102756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:16.102947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:16.102975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:16.102982Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.103042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:16.103051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:16.103081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.103093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:16.103545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:16.103554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:16.103585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:16.103589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:16.103648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:16.103655Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:16.103665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:16.103669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.103674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:16.103678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:16.103682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:16.103685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:16.103698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:16.103703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:16.103706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:16.104031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:16.104044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:16.104049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:16.104053Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:16.104057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:16.104069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:20.460256Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:20.460270Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:20.460319Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:20.460363Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:20.460370Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:202:2205], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:50:20.460376Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:202:2205], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2024-11-21T17:50:20.460497Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:20.460507Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:50:20.460917Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:20.460932Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:20.460937Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:20.460944Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:50:20.460949Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:20.461047Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:20.461057Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:20.461060Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:20.461064Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:50:20.461068Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:20.461078Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T17:50:20.461300Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 269 } } 2024-11-21T17:50:20.461308Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:20.461324Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 269 } } 2024-11-21T17:50:20.461339Z node 18 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 269 } } FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:50:20.461461Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 303 RawX2: 77309413619 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:20.461466Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:20.461478Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 303 RawX2: 77309413619 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:20.461482Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:20.461490Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 303 RawX2: 77309413619 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:20.461501Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:20.461505Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:20.461509Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:20.461514Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T17:50:20.461995Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:20.462275Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:20.462293Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:20.462310Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:20.462325Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:20.462331Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:50:20.462344Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:20.462348Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:20.462354Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T17:50:20.462367Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:339:2314] message: TxId: 101 2024-11-21T17:50:20.462373Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:20.462378Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:20.462383Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:50:20.462406Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:20.462831Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:20.462841Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:340:2315] TestWaitNotification: OK eventTxId 101 2024-11-21T17:50:20.462951Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:20.463012Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 70us result status StatusSuccess 2024-11-21T17:50:20.463129Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive >> TCmsTest::ManagePermissions >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] >> TCmsTest::ManageRequestsWrong >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:19.324384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:19.324407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:19.324411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:19.324415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:19.324421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:19.324426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:19.324433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:19.324516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:19.331813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:19.331828Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:19.334483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:19.335065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:19.335089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:19.336284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:19.336518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:19.336614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:19.336710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:19.337848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:19.338051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:19.338058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:19.338085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:19.338090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:19.338094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:19.338106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.339132Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:19.353302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:19.353386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.353442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:19.353487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:19.353494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.354194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:19.354215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:19.354269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.354280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:19.354285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:19.354289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:19.354625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.354634Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:19.354638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:19.354976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.354984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.354991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:19.354997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:19.355517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:19.355874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:19.355919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:19.356083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:19.356106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:19.356115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:19.356164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:19.356171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:19.356200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:19.356213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:19.356643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:19.356651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:19.356689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:19.356696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:19.356773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:19.356781Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:19.356793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:19.356797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:19.356803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:19.356807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:19.356811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:19.356816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:19.356826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:19.356832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:19.356836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:19.357125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:19.357138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:19.357143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:19.357148Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:19.357152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:19.357166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T17:50:20.447849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:20.447856Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T17:50:20.447865Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T17:50:20.447868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:50:20.447872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T17:50:20.447875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:50:20.447879Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T17:50:20.447882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T17:50:20.447905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:50:20.447909Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2024-11-21T17:50:20.447912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:50:20.448018Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:50:20.448026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:50:20.448029Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:50:20.448033Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:50:20.448035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:20.448043Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2024-11-21T17:50:20.448046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:404:2372] 2024-11-21T17:50:20.448997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:50:20.449041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:50:20.449046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:714:2635] TestWaitNotification: OK eventTxId 105 2024-11-21T17:50:20.922479Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:20.922582Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 113us result status StatusSuccess 2024-11-21T17:50:20.922731Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:20.994021Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:50:20.994116Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 113us result status StatusSuccess 2024-11-21T17:50:20.994267Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2024-11-21T17:50:20.994788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:20.994817Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:50:20.994853Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2024-11-21T17:50:20.995604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:20.995636Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T17:50:20.995702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T17:50:20.995707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T17:50:20.995766Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T17:50:20.995781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T17:50:20.995784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:780:2694] TestWaitNotification: OK eventTxId 106 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:05.311012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:05.311034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:05.311040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:05.311044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:05.311050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:05.311054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:05.311062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:05.311140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:05.321474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:05.321494Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:05.337851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:05.338779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:05.338819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:05.340330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:05.340528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:05.340636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:05.340708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:05.341557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:05.341829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:05.341838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:05.341875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:05.341881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:05.341887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:05.341902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.343481Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:05.359596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:05.359676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.359739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:05.359780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:05.359786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.360647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:05.360681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:05.360733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.360741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:05.360746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:05.360750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:05.361140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.361149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:05.361154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:05.361730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.361739Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.361744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:05.361751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:05.362321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:05.363214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:05.363270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:05.363447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:05.363499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:05.363507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:05.363558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:05.363564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:05.363595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:05.363605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:05.364016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:05.364023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:05.364065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:05.364070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:05.364136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.364143Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:05.364153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:05.364157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:05.364163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:05.364169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:05.364173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:05.364177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:05.364187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:05.364192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:05.364196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:05.364494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:05.364506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:05.364511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:05.364516Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:05.364521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:05.364533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ard::TEvSchemaChanged> complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.499155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:05.499162Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T17:50:05.499175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:50:05.499179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:50:05.499185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T17:50:05.499199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 103 2024-11-21T17:50:05.499205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:50:05.499210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:50:05.499214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:50:05.499237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:05.500667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:05.500681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:496:2460] TestWaitNotification: OK eventTxId 103 2024-11-21T17:50:09.529133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:50:09.529156Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:10.936419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0075 2024-11-21T17:50:10.936458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0127 2024-11-21T17:50:10.972362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T17:50:10.972452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T17:50:10.972490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2024-11-21T17:50:10.972500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T17:50:10.972544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T17:50:10.972551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2024-11-21T17:50:10.972555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T17:50:10.983869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:50:13.567401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0036 2024-11-21T17:50:13.567424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0023 2024-11-21T17:50:13.607981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T17:50:13.608052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T17:50:13.608070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2024-11-21T17:50:13.608077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T17:50:13.608114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T17:50:13.608119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2024-11-21T17:50:13.608122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T17:50:13.618292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:50:16.187938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0023 2024-11-21T17:50:16.187975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0036 2024-11-21T17:50:16.218425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T17:50:16.218508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T17:50:16.218527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2024-11-21T17:50:16.218535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T17:50:16.218574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T17:50:16.218581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2024-11-21T17:50:16.218585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T17:50:16.228783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:50:18.661693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0011 2024-11-21T17:50:18.661719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0013 2024-11-21T17:50:18.692272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T17:50:18.692326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T17:50:18.692344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2024-11-21T17:50:18.692352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T17:50:18.692388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T17:50:18.692394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2024-11-21T17:50:18.692398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T17:50:18.702568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:50:21.111729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2024-11-21T17:50:21.111766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:21.111830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T17:50:21.111873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 60025000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 4 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T17:50:21.112060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:21.112140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T17:50:21.112145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:21.112956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T17:50:21.112991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:21.112996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.025000Z, at schemeshard: 72057594046678944 2024-11-21T17:50:21.113004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> ClientStatsCollector::CounterRetryOperation [GOOD] >> ClientStatsCollector::ExternalMetricRegistryByRawPtr >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestRestartServices >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup >> TCmsTest::WalleTasks >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:07.192905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:07.192933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:07.192939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:07.192945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:07.192952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:07.192956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:07.192966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:07.193064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:07.203520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:07.203547Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:07.205704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:07.205775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:07.205799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:07.209817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:07.209905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:07.210006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.210222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:07.210977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.211301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:07.211313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.211325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:07.211331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:07.211338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:07.211381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:07.212772Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:07.230175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:07.230260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.230328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:07.230375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:07.230382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.231258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.231284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:07.231340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.231351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:07.231355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:07.231360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:07.231755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.231764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:07.231769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:07.232065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.232073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.232078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.232085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.232662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:07.233079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:07.233131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:07.233316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:07.233343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:07.233350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.233405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:07.233412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:07.233445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:07.233456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:07.233841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:07.233852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:07.233897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:07.233901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:07.233984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:07.233990Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:07.234000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:07.234005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.234011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:07.234016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:07.234020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:07.234025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:07.234034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:07.234040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:07.234044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... terRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1003 at step: 5000004 2024-11-21T17:50:21.435676Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:21.435689Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 219043334250 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:21.435693Z node 51 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#1003:0 HandleReply TEvOperationPlan, operationId: 1003:0, stepId: 5000004, at schemeshard: 72057594046678944 2024-11-21T17:50:21.435746Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 129 2024-11-21T17:50:21.435764Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2024-11-21T17:50:21.436700Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:21.436709Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:21.436742Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:21.436745Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [51:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:50:21.436833Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:21.436838Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:50:21.436903Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 146 } } 2024-11-21T17:50:21.436909Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:21.436919Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 146 } } 2024-11-21T17:50:21.436928Z node 51 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 146 } } 2024-11-21T17:50:21.436975Z node 51 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:21.436984Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:21.436988Z node 51 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:50:21.436992Z node 51 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:50:21.436996Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:50:21.437008Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:50:21.437076Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:50:21.437081Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:21.437092Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:50:21.437097Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:21.437103Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:50:21.437111Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:21.437114Z node 51 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:21.437117Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:21.437120Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T17:50:21.437678Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:21.437697Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:50:21.437705Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:21.437715Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:21.437718Z node 51 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:50:21.437725Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:50:21.437727Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:50:21.437730Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:50:21.437733Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:50:21.437735Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:50:21.437737Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:50:21.437752Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:50:21.438078Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:50:21.438083Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:50:21.438112Z node 51 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:50:21.438121Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:50:21.438123Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:444:2419] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:50:21.438161Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:21.438181Z node 51 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 27us result status StatusSuccess 2024-11-21T17:50:21.438253Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ClientStatsCollector::ExternalMetricRegistryByRawPtr [GOOD] >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> TCmsTest::ActionIssuePartialPermissions >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects |81.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T17:50:12.122824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:12.122853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.122859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:12.122865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:12.122871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:12.122874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:12.122885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.122994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:12.135946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:12.135977Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:12.138731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:12.138764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:12.138798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:12.141720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:12.141781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:12.141890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.141948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:12.142595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.142912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.142928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.142980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:12.142988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.142995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:12.143009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.144347Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:12.162811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:12.162923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.163003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:12.163061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:12.163070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.163892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.163943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:12.164007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.164018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:12.164023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:12.164028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:12.164436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.164448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:12.164453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:12.164759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.164771Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.164778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.164785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.165428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:12.166078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:12.166141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:12.166387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.166419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:12.166436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.166494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:12.166501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.166542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.166555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:12.167063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.167072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.167117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.167123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:12.167209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.167216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:12.167228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:12.167232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.167238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:12.167243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.167248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:12.167251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:12.167263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:12.167270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:12.167274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:12.167604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.167623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.167628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:12.167632Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:12.167637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.167653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... d manually' Items: 2 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710763 2024-11-21T17:50:22.237893Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T17:50:22.237897Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T17:50:22.237902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2024-11-21T17:50:22.237919Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:22.237992Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:22.237998Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:22.238001Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:50:22.238003Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2024-11-21T17:50:22.238006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:22.238076Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:22.238081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:22.238084Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:50:22.238086Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T17:50:22.238088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:22.238095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T17:50:22.238388Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T17:50:22.238404Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T17:50:22.238407Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T17:50:22.238410Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T17:50:22.238585Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2024-11-21T17:50:22.238601Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:50:22.238623Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 2024-11-21T17:50:22.238660Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:22.238674Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 12884904042 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:22.238677Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000009, at schemeshard: 72057594046678944 2024-11-21T17:50:22.238691Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T17:50:22.238696Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2024-11-21T17:50:22.238698Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T17:50:22.238703Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:22.238708Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:22.238712Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2024-11-21T17:50:22.238715Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T17:50:22.238718Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2024-11-21T17:50:22.238719Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2024-11-21T17:50:22.238724Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:22.238727Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2024-11-21T17:50:22.238730Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:50:22.238732Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:50:22.238990Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:22.239162Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:22.239167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:22.239184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:22.239196Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:22.239198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2024-11-21T17:50:22.239201Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2024-11-21T17:50:22.239280Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:22.239285Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:22.239288Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:50:22.239290Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:50:22.239292Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:22.239340Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:22.239345Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:22.239347Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:50:22.239349Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:50:22.239351Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:22.239356Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2024-11-21T17:50:22.239358Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:124:2150] 2024-11-21T17:50:22.239703Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:22.239728Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:22.239738Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2024-11-21T17:50:22.239744Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2024-11-21T17:50:22.239748Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2024-11-21T17:50:22.239750Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2024-11-21T17:50:22.239753Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2024-11-21T17:50:22.239928Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T17:50:22.239938Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:22.239942Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:753:2693] TestWaitNotification: OK eventTxId 103 >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry >> TCmsTenatsTest::TestNoneTenantPolicy >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] |81.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |81.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] Test command err: 2024-11-21T17:50:18.146527Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791393700818714:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:18.146712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001225/r3tmp/tmpgnXTQ5/pdisk_1.dat 2024-11-21T17:50:18.216962Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15367, node 1 2024-11-21T17:50:18.247384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:18.247410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:18.248579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:18.256472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:18.256500Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:18.256504Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:18.256554Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:18.285567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:18.286859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:50:18.286878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:18.288066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:50:18.288151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:50:18.288161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:50:18.288646Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:50:18.288654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:50:18.288846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:50:18.289057Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:18.289778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211418336, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:50:18.289785Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:50:18.289848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:50:18.290278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:50:18.290334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:50:18.290344Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:50:18.290356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:50:18.290365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:50:18.290380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:50:18.290866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:50:18.290887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:50:18.290891Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:50:18.290905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:50:18.508319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791393700819637:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:18.508338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791393700819645:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:18.508341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:18.508803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:50:18.508842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:50:18.508850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715658:1, at schemeshard: 72057594046644480 2024-11-21T17:50:18.508857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:50:18.508858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T17:50:18.508868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:50:18.508881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:50:18.508919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 1 -> 128 2024-11-21T17:50:18.508980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:50:18.508990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:50:18.509744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:50:18.509809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:50:18.509894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:50:18.509920Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T17:50:18.509955Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:50:18.509971Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:50:18.509986Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:50:18.510012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:50:18.510176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:18.510192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:18.510195Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:50:18.510241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:18.510248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:18.510249Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:50:18.510259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:18.510260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:18.510261Z node 1 :FLAT ... :50:22.564769Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:50:22.564787Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T17:50:22.564807Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:50:22.564817Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:50:22.564828Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:50:22.564848Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:50:22.565029Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:22.565039Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:22.565041Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:50:22.565065Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:22.565070Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:22.565071Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:50:22.565079Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:22.565085Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:22.565086Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T17:50:22.565094Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:22.565100Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:22.565100Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T17:50:22.565108Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:22.565115Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:22.565116Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T17:50:22.565746Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211422613, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:50:22.565758Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211422613, at schemeshard: 72057594046644480 2024-11-21T17:50:22.565775Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:50:22.565793Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211422613, at schemeshard: 72057594046644480 2024-11-21T17:50:22.565802Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T17:50:22.565808Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211422613, at schemeshard: 72057594046644480 2024-11-21T17:50:22.565814Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T17:50:22.565820Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732211422613 2024-11-21T17:50:22.565827Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T17:50:22.566124Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:50:22.566181Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:50:22.566191Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T17:50:22.566198Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T17:50:22.566214Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T17:50:22.566221Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T17:50:22.566227Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:50:22.566229Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T17:50:22.566235Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T17:50:22.566242Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T17:50:22.566245Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:50:22.566249Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T17:50:22.566255Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T17:50:22.566257Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T17:50:22.566258Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T17:50:22.566455Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:22.566473Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:22.566479Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:50:22.566500Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:22.566506Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:22.566507Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T17:50:22.566516Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:22.566521Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:22.566522Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T17:50:22.566530Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:22.566531Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:22.566532Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:50:22.566539Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:50:22.566545Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:50:22.566546Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T17:50:22.566549Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:50:22.566916Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439791411073683933:2301], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:50:22.654295Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:50:22.654333Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:50:22.655067Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> TCmsTest::TestForceRestartMode >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason >> TCmsTest::StateRequest >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> TCmsTest::RequestReplaceDevices >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T17:50:01.581654Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:01.581678Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.585832Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:01.587774Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T17:50:01.588001Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T17:50:01.588522Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T17:50:01.588943Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T17:50:01.589351Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.591008Z node 1 :PERSQUEUE INFO: new Cookie default|966b7a9-be7d481d-ea56fdf9-2650253b_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.650489Z node 1 :PERSQUEUE INFO: new Cookie default|67a42995-4220cac3-18e19eea-c5d12071_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.730447Z node 1 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.739252Z node 1 :PERSQUEUE INFO: new Cookie default|a86a4b1c-cbec313-35e293d1-deb049e1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.809567Z node 1 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:286:2057] recipient: [1:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:289:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:290:2057] recipient: [1:288:2284] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:291:2285] sender: [1:292:2057] recipient: [1:288:2284] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.819328Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:01.819349Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:50:01.819458Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:342:2328] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:50:01.820013Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:343:2329] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:50:01.823881Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:342:2328] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:50:01.824472Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:343:2329] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:291:2285] sender: [1:373:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T17:50:02.022713Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:02.022740Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:02.026881Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:02.027118Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2024-11-21T17:50:02.027253Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2024-11-21T17:50:02.027860Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] 2024-11-21T17:50:02.028137Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2024-11-21T17:50:02.028557Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:02.030392Z node 2 :PERSQUEUE INFO: new Cookie default|e21d8e97-aa779b84-caab5945-cb72fd67_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:02.082603Z node 2 :PERSQUEUE INFO: new Cookie default|25bbebab-f91d2019-ad19a137-6ed71d49_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:02.164142Z node 2 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:02.173016Z node 2 :PERSQUEUE INFO: new Cookie default|5b94e5a5-323d9e2d-ee7f0a8c-2dc8fb2f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TS ... up to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.155720Z node 54 :PERSQUEUE INFO: new Cookie default|9e354279-c7691911-60db92c8-4ef2b7a9_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.168129Z node 54 :PERSQUEUE INFO: new Cookie default|18c1bcd8-906d694c-c7f9bb53-bf20273_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.204659Z node 54 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.213224Z node 54 :PERSQUEUE INFO: new Cookie default|63aaa37-f472cd15-b2063c64-9d111a61_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.251309Z node 54 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:286:2057] recipient: [54:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:289:2057] recipient: [54:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:290:2057] recipient: [54:288:2284] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:291:2285] sender: [54:292:2057] recipient: [54:288:2284] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.258376Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:23.258392Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:50:23.258475Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:342:2328] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:50:23.258846Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:343:2329] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:50:23.261686Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [54:342:2328] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:50:23.262272Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [54:343:2329] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:291:2285] sender: [54:375:2057] recipient: [54:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:101:2057] recipient: [55:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:101:2057] recipient: [55:99:2133] Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:106:2057] recipient: [55:99:2133] 2024-11-21T17:50:23.390872Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:23.390888Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:147:2057] recipient: [55:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:147:2057] recipient: [55:145:2168] Leader for TabletID 72057594037927938 is [55:151:2172] sender: [55:152:2057] recipient: [55:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:177:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.393724Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:23.393859Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 55 actor [55:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } 2024-11-21T17:50:23.393941Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:184:2197] 2024-11-21T17:50:23.394334Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:50:23.394574Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:185:2198] 2024-11-21T17:50:23.394832Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.395799Z node 55 :PERSQUEUE INFO: new Cookie default|7f2cc90c-5686c810-f013cd77-4209164f_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.407464Z node 55 :PERSQUEUE INFO: new Cookie default|4f7b4cf9-30856020-4314b4f1-e0467254_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.440026Z node 55 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.447446Z node 55 :PERSQUEUE INFO: new Cookie default|13117bb6-7ad62824-394f2aaa-57b29eac_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.479078Z node 55 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:286:2057] recipient: [55:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:289:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:290:2057] recipient: [55:288:2284] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:291:2285] sender: [55:292:2057] recipient: [55:288:2284] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:23.485611Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:23.485625Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:50:23.485709Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:342:2328] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:50:23.486101Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:343:2329] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:50:23.488689Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:342:2328] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:50:23.489197Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:343:2329] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:291:2285] sender: [55:373:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCmsTest::ActionWithZeroDuration ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] Leader for TabletID 72057594046678944 is [1:226:2148] sender: [1:229:2060] recipient: [1:212:2140] 2024-11-21T17:49:43.800735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:43.800760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:43.800765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:43.800771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:43.800776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:43.800779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:43.800789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:43.800874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:43.811331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:43.811353Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:43.814365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:43.814407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:43.814434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:43.816385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:43.816439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:43.816535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:43.816588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:43.817159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:43.817444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:43.817457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:43.817489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:43.817497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:43.817503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:43.817522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.818936Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:226:2148] sender: [1:339:2060] recipient: [1:17:2064] 2024-11-21T17:49:43.837209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:43.837287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.837356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:43.837401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:43.837408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.838201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:43.838223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:43.838279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.838297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:43.838301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:43.838306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:43.838656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.838665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:43.838669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:43.838975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.838982Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.838986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:43.838992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:43.839563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:43.839873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:43.839915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:43.840078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:43.840096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 237 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:43.840104Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:43.840151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:43.840158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:43.840181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:43.840190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:43.840526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:43.840535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:43.840574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:43.840579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:306:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:43.840665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.840671Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:43.840680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:43.840684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:43.840689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:43.840694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:43.840698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:43.840702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:43.840711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:43.840717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:43.840721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:43.840973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:43.840984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:43.840988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:43.840992Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:43.840996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:43.841007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ode 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:2, at schemeshard: 72057594046678944 2024-11-21T17:50:23.411444Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:50:23.411446Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:2 2024-11-21T17:50:23.411457Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:952:2730] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2024-11-21T17:50:23.411469Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:229:2151], Recipient [7:952:2730]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2024-11-21T17:50:23.411473Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T17:50:23.411476Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409550 state Ready 2024-11-21T17:50:23.411482Z node 7 :TX_DATASHARD DEBUG: 72075186233409550 Got TEvSchemaChangedResult from SS at 72075186233409550 2024-11-21T17:50:23.411504Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T17:50:23.411507Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T17:50:23.411510Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2024-11-21T17:50:23.411514Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:2 ProgressState 2024-11-21T17:50:23.411521Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:50:23.411523Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 2/3 2024-11-21T17:50:23.411525Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2024-11-21T17:50:23.411530Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2024-11-21T17:50:23.411553Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:23.411555Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:50:23.411557Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:0 2024-11-21T17:50:23.411561Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:962:2738] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2024-11-21T17:50:23.411568Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:229:2151], Recipient [7:962:2738]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2024-11-21T17:50:23.411570Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T17:50:23.411571Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409551 state Ready 2024-11-21T17:50:23.411574Z node 7 :TX_DATASHARD DEBUG: 72075186233409551 Got TEvSchemaChangedResult from SS at 72075186233409551 2024-11-21T17:50:23.411592Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T17:50:23.411595Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T17:50:23.411597Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:23.411599Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T17:50:23.411602Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:50:23.411604Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2024-11-21T17:50:23.411606Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T17:50:23.411608Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2024-11-21T17:50:23.411614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:571:2402] message: TxId: 104 2024-11-21T17:50:23.411616Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T17:50:23.411621Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:50:23.411624Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:50:23.411639Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2024-11-21T17:50:23.411643Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2024-11-21T17:50:23.411644Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2024-11-21T17:50:23.411647Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2024-11-21T17:50:23.411649Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2024-11-21T17:50:23.411652Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2024-11-21T17:50:23.411658Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2024-11-21T17:50:23.411960Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:50:23.411975Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:50:23.411986Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:571:2402] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2024-11-21T17:50:23.412006Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:50:23.412009Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1013:2777] 2024-11-21T17:50:23.412038Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1015:2779], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:23.412042Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:23.412045Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T17:50:23.412179Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:545:2100], Recipient [7:229:2151] 2024-11-21T17:50:23.412182Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:50:23.412651Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:23.412723Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp:42, at schemeshard: 72057594046678944 2024-11-21T17:50:23.412728Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp:42, at schemeshard: 72057594046678944 2024-11-21T17:50:23.412773Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:50:23.413103Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp:42" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:23.413133Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp:42, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2024-11-21T17:50:23.413139Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T17:50:23.413224Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T17:50:23.413232Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T17:50:23.413280Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1077:2841], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:23.413285Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:23.413288Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:50:23.413306Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:571:2402], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2024-11-21T17:50:23.413310Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T17:50:23.413320Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T17:50:23.413334Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:50:23.413338Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1075:2839] 2024-11-21T17:50:23.413358Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1077:2841], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:23.413362Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:23.413365Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions >> TCmsTest::TestOutdatedState >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestKeepAvailableModeScheduled >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests >> TCmsTest::CollectInfo >> TCmsTest::ManageRequestsDry [GOOD] >> TCmsTest::Notifications >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost >> TCmsTenatsTest::TestClusterLimit >> BsControllerConfig::AddDriveSerialMassive [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:207:2066] recipient: [1:183:2075] 2024-11-21T17:50:17.626794Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:17.627602Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:17.627999Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:17.628090Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:17.628256Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:17.628264Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:17.628291Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:17.629149Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:17.629174Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:17.629200Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:17.629213Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:17.629224Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:17.629232Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:229:2066] recipient: [1:20:2067] 2024-11-21T17:50:17.639685Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:17.639726Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:17.650049Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:17.650098Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:17.650118Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:17.650128Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:17.650153Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:17.650160Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:17.650167Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:17.650175Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:17.660437Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:17.660496Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:17.660676Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:17.660683Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:17.660707Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:17.662845Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2024-11-21T17:50:17.663176Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2024-11-21T17:50:17.663276Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:207:2066] recipient: [11:185:2075] 2024-11-21T17:50:19.028347Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:19.028453Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:19.028600Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:19.028654Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:19.028725Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:19.028728Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:19.028749Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:19.029219Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:19.029233Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:19.029250Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:19.029258Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:19.029266Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:19.029271Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:229:2066] recipient: [11:20:2067] 2024-11-21T17:50:19.039584Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:19.039625Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:19.049902Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:19.049942Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:19.049953Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:19.049961Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:19.049978Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:19.049983Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:19.049987Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:19.049992Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:19.060298Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:19.060344Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:19.060483Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:19.060488Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:19.060511Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:19.060613Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2024-11-21T17:50:19.060785Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2024-11-21T17:50:19.060851Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:204:2066] recipient: [21:183:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:204:2066] recipient: [21:183:2075] Leader for TabletID 72057594037932033 is [21:206:2077] sender: [21:207:2066] recipient: [21:183:2075] 2024-11-21T17:50:21.038827Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:21.038938Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:21.039099Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:21.039149Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:21.039216Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NAct ... 01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2024-11-21T17:50:21.071628Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2024-11-21T17:50:21.071694Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2024-11-21T17:50:21.071761Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2024-11-21T17:50:21.071829Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2024-11-21T17:50:21.071924Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2024-11-21T17:50:21.072009Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2024-11-21T17:50:21.072084Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2024-11-21T17:50:21.072160Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2024-11-21T17:50:21.072255Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2024-11-21T17:50:21.072332Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2024-11-21T17:50:21.072414Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2024-11-21T17:50:21.072488Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2024-11-21T17:50:21.072568Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2024-11-21T17:50:21.072644Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2024-11-21T17:50:21.072724Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2024-11-21T17:50:21.072805Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2024-11-21T17:50:21.072883Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:204:2066] recipient: [31:188:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:204:2066] recipient: [31:188:2075] Leader for TabletID 72057594037932033 is [31:206:2077] sender: [31:207:2066] recipient: [31:188:2075] 2024-11-21T17:50:23.056318Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:23.056423Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:23.056575Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:23.056643Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:23.056714Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:23.056717Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:23.056739Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:23.057250Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:23.057264Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:23.057281Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:23.057289Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:23.057297Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:23.057303Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:206:2077] sender: [31:229:2066] recipient: [31:20:2067] 2024-11-21T17:50:23.067623Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:23.067669Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:23.078019Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:23.078078Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:23.078124Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:23.078142Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:23.078173Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:23.078185Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:23.078191Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:23.078198Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:23.088489Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:23.088532Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:23.088645Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:23.088649Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:23.088666Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:23.088750Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2024-11-21T17:50:23.088888Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2024-11-21T17:50:23.088935Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2024-11-21T17:50:23.088978Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2024-11-21T17:50:23.089039Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2024-11-21T17:50:23.089085Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2024-11-21T17:50:23.089132Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2024-11-21T17:50:23.089178Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2024-11-21T17:50:23.089225Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2024-11-21T17:50:23.089269Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2024-11-21T17:50:23.089330Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2024-11-21T17:50:23.089377Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2024-11-21T17:50:23.089425Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2024-11-21T17:50:23.089469Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2024-11-21T17:50:23.089525Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2024-11-21T17:50:23.089576Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2024-11-21T17:50:23.089621Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2024-11-21T17:50:23.089669Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2024-11-21T17:50:23.089715Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2024-11-21T17:50:23.089762Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] Test command err: 2024-11-21T17:50:19.956027Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:19.957030Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:19.958131Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:19.958743Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:19.958779Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:19.958841Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:19.959178Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:19.959229Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:19.959453Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:19.959759Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:19.960661Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:19.960694Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:19.960715Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:19.960739Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:19.978085Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:19.999580Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:19.999689Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:20.000810Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:20.000985Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:20.000992Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:20.001003Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:20.001007Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:20.001064Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:20.001078Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:20.001146Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:20.002701Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:20.023607Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:20.023648Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:21.629866Z node 9 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:21.630501Z node 9 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:21.631333Z node 9 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:21.631407Z node 9 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:21.631433Z node 9 :CMS DEBUG: Using default config. 2024-11-21T17:50:21.631499Z node 9 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:21.632332Z node 9 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:21.632386Z node 9 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:21.632699Z node 9 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:21.632826Z node 9 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:21.633991Z node 9 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:21.634012Z node 9 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:21.634073Z node 9 :CMS DEBUG: Using default config 2024-11-21T17:50:21.634093Z node 9 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:21.645660Z node 9 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:21.666681Z node 9 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:21.666796Z node 9 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:21.666811Z node 9 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:21.666896Z node 9 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:21.666901Z node 9 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:21.666909Z node 9 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:21.666913Z node 9 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:21.666923Z node 9 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:21.666947Z node 9 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:21.666957Z node 9 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:21.667191Z node 9 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 9 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 10 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 11 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 12 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 13 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 14 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 15 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 16 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 10 PDiskId: 10 VSlotId: 1000 } GroupGeneration: 1 ... h state: Up, with limit: 0, with ratio limit: 0, locked nodes: 4, down nodes: 0 2024-11-21T17:50:21.885100Z node 9 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:21.885106Z node 9 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "22" Services: "dynnode" Duration: 60000000 2024-11-21T17:50:21.885109Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 22, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 5, down nodes: 0 2024-11-21T17:50:21.885111Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 22, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 5, down nodes: 0 2024-11-21T17:50:21.885113Z node 9 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:21.885117Z node 9 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "23" Services: "dynnode" Duration: 60000000 2024-11-21T17:50:21.885119Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 23, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 6, down nodes: 0 2024-11-21T17:50:21.885121Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 23, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 6, down nodes: 0 2024-11-21T17:50:21.885124Z node 9 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:21.885127Z node 9 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "24" Services: "dynnode" Duration: 60000000 2024-11-21T17:50:21.885130Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 7, down nodes: 0 2024-11-21T17:50:21.885132Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 7, down nodes: 0 2024-11-21T17:50:21.885134Z node 9 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:21.885146Z node 9 :CMS DEBUG: Accepting permission: id# user-p-5, requestId# user-r-4, owner# user 2024-11-21T17:50:21.885149Z node 9 :CMS INFO: Adding lock for Host ::1:12013 (21) (permission user-p-5 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.885152Z node 9 :CMS DEBUG: Accepting permission: id# user-p-6, requestId# user-r-4, owner# user 2024-11-21T17:50:21.885155Z node 9 :CMS INFO: Adding lock for Host ::1:12014 (22) (permission user-p-6 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.885157Z node 9 :CMS DEBUG: Accepting permission: id# user-p-7, requestId# user-r-4, owner# user 2024-11-21T17:50:21.885160Z node 9 :CMS INFO: Adding lock for Host ::1:12015 (23) (permission user-p-7 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.885163Z node 9 :CMS DEBUG: Accepting permission: id# user-p-8, requestId# user-r-4, owner# user 2024-11-21T17:50:21.885165Z node 9 :CMS INFO: Adding lock for Host ::1:12016 (24) (permission user-p-8 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.885171Z node 9 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:21.885201Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-5, validity# 1970-01-01T00:03:00.336560Z, action# Type: RESTART_SERVICES Host: "21" Services: "dynnode" Duration: 60000000 2024-11-21T17:50:21.885207Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-6, validity# 1970-01-01T00:03:00.336560Z, action# Type: RESTART_SERVICES Host: "22" Services: "dynnode" Duration: 60000000 2024-11-21T17:50:21.885211Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-7, validity# 1970-01-01T00:03:00.336560Z, action# Type: RESTART_SERVICES Host: "23" Services: "dynnode" Duration: 60000000 2024-11-21T17:50:21.885215Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-8, validity# 1970-01-01T00:03:00.336560Z, action# Type: RESTART_SERVICES Host: "24" Services: "dynnode" Duration: 60000000 2024-11-21T17:50:21.895921Z node 9 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:21.896007Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "21" Services: "dynnode" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "22" Services: "dynnode" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "23" Services: "dynnode" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "24" Services: "dynnode" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-4" Permissions { Id: "user-p-5" Action { Type: RESTART_SERVICES Host: "21" Services: "dynnode" Duration: 60000000 } Deadline: 180336560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 21 InterconnectPort: 12013 } } } Permissions { Id: "user-p-6" Action { Type: RESTART_SERVICES Host: "22" Services: "dynnode" Duration: 60000000 } Deadline: 180336560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 22 InterconnectPort: 12014 } } } Permissions { Id: "user-p-7" Action { Type: RESTART_SERVICES Host: "23" Services: "dynnode" Duration: 60000000 } Deadline: 180336560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 23 InterconnectPort: 12015 } } } Permissions { Id: "user-p-8" Action { Type: RESTART_SERVICES Host: "24" Services: "dynnode" Duration: 60000000 } Deadline: 180336560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 24 InterconnectPort: 12016 } } } } 2024-11-21T17:50:21.907572Z node 9 :CMS INFO: Adding lock for Host ::1:12013 (21) (permission user-p-5 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.907590Z node 9 :CMS INFO: Adding lock for Host ::1:12009 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.907594Z node 9 :CMS INFO: Adding lock for Host ::1:12014 (22) (permission user-p-6 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.907614Z node 9 :CMS INFO: Adding lock for Host ::1:12010 (18) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.907618Z node 9 :CMS INFO: Adding lock for Host ::1:12012 (20) (permission user-p-4 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.907622Z node 9 :CMS INFO: Adding lock for Host ::1:12011 (19) (permission user-p-3 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.907625Z node 9 :CMS INFO: Adding lock for Host ::1:12016 (24) (permission user-p-8 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.907628Z node 9 :CMS INFO: Adding lock for Host ::1:12015 (23) (permission user-p-7 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.907671Z node 9 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:21.907686Z node 9 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:21.907695Z node 9 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:21.907784Z node 9 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:21.907789Z node 9 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 2024-11-21T17:50:21.907795Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 9, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 8, down nodes: 0 2024-11-21T17:50:21.907820Z node 9 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:21.907823Z node 9 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:21.907824Z node 9 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:21.907827Z node 9 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:21.907838Z node 9 :CMS DEBUG: Accepting permission: id# user-p-9, requestId# user-r-5, owner# user 2024-11-21T17:50:21.907841Z node 9 :CMS INFO: Adding lock for Host ::1:12001 (9) (permission user-p-9 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.907847Z node 9 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:21.907881Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-9, validity# 1970-01-01T00:03:00.438072Z, action# Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 2024-11-21T17:50:21.918543Z node 9 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:21.918615Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-5" Permissions { Id: "user-p-9" Action { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 } Deadline: 180438072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 9 InterconnectPort: 12001 } } } } 2024-11-21T17:50:21.930006Z node 9 :CMS INFO: Adding lock for Host ::1:12013 (21) (permission user-p-5 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.930025Z node 9 :CMS INFO: Adding lock for Host ::1:12009 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.930029Z node 9 :CMS INFO: Adding lock for Host ::1:12001 (9) (permission user-p-9 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.930032Z node 9 :CMS INFO: Adding lock for Host ::1:12014 (22) (permission user-p-6 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.930036Z node 9 :CMS INFO: Adding lock for Host ::1:12010 (18) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.930039Z node 9 :CMS INFO: Adding lock for Host ::1:12012 (20) (permission user-p-4 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.930042Z node 9 :CMS INFO: Adding lock for Host ::1:12011 (19) (permission user-p-3 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.930046Z node 9 :CMS INFO: Adding lock for Host ::1:12016 (24) (permission user-p-8 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.930052Z node 9 :CMS INFO: Adding lock for Host ::1:12015 (23) (permission user-p-7 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:21.930095Z node 9 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:21.930110Z node 9 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:21.930120Z node 9 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:21.930204Z node 9 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:21.930209Z node 9 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 60000000 2024-11-21T17:50:21.930216Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 10, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 9, down nodes: 0 2024-11-21T17:50:21.930234Z node 9 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (9) has planned shutdown (permission user-p-9 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2024-11-21T17:50:21.930242Z node 9 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:21.941004Z node 9 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:21.941071Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (9) has planned shutdown (permission user-p-9 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } RequestId: "user-r-6" Deadline: 420539584 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:15.826590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:15.826614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:15.826619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:15.826624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:15.826628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:15.826632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:15.826639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:15.826716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:15.837270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:15.837290Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:15.839923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:15.840687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:15.840719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:15.844714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:15.844940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:15.845040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:15.845102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:15.846861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:15.847165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:15.847198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:15.847232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:15.847240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:15.847247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:15.847260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.849890Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:15.866152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:15.866209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.866251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:15.866281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:15.866287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.867184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:15.867211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:15.867250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.867259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:15.867263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:15.867267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:15.867625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.867636Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:15.867641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:15.867914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.867922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.867927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:15.867933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:15.868474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:15.869674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:15.869726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:15.869883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:15.869908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:15.869914Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:15.869961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:15.869966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:15.869995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:15.870006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:15.870402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:15.870410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:15.870448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:15.870453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:15.870524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:15.870530Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:15.870540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:15.870543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:15.870548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:15.870553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:15.870558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:15.870561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:15.870572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:15.870577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:15.870580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:15.870821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:15.870832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:15.870836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:15.870841Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:15.870847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:15.870857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... eshard: 72057594046678944 2024-11-21T17:50:24.793938Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:24.793942Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:24.793945Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:24.793959Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:24.794905Z node 37 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [37:123:2149] sender: [37:235:2058] recipient: [37:15:2062] 2024-11-21T17:50:24.796549Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:24.796580Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:24.796607Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:24.796634Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:24.796638Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:24.796990Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:24.797003Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:24.797023Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:24.797028Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:24.797030Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:24.797033Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:24.797280Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:24.797286Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:24.797288Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:24.797527Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:24.797533Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:24.797535Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:24.797539Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:24.797556Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:24.797773Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:24.797793Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:24.797883Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:24.797896Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 158913792106 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:24.797902Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:24.797940Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:24.797947Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:24.797969Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:24.797980Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:24.798256Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:24.798260Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:24.798280Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:24.798282Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:202:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:24.798322Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:24.798326Z node 37 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:24.798334Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:24.798336Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:24.798340Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:24.798343Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:24.798346Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:24.798348Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:24.798354Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:24.798357Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:24.798359Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:24.798399Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:24.798407Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:24.798411Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:24.798415Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:24.798419Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:24.798434Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:24.798837Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:24.798885Z node 37 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:24.799010Z node 37 :TX_PROXY DEBUG: actor# [37:265:2257] Bootstrap 2024-11-21T17:50:24.799800Z node 37 :TX_PROXY DEBUG: actor# [37:265:2257] Become StateWork (SchemeCache [37:270:2262]) 2024-11-21T17:50:24.800259Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:24.800296Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:24.800306Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2024-11-21T17:50:24.800373Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2024-11-21T17:50:24.800502Z node 37 :TX_PROXY DEBUG: actor# [37:265:2257] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:24.800847Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:24.800863Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T17:50:24.801040Z node 37 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled >> TCmsTest::StateRequest [GOOD] >> TCmsTest::StateRequestNode >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::StateStorageTwoRings >> TMaintenanceApiTest::ActionReason [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] Test command err: 2024-11-21T17:50:19.995018Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:19.997603Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:19.997694Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:19.998013Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:19.998185Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:19.998355Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:19.999847Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:19.999875Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:19.999922Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:19.999942Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:20.000446Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:20.000472Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:20.000500Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:20.000549Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:20.020203Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:20.031133Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:20.031208Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:20.032441Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:20.032530Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:20.032536Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:20.032543Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:20.032547Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:20.032572Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:20.032596Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:20.034249Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:20.044542Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:20.078644Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:20.078694Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:20.078843Z node 1 :CMS INFO: Processing Wall-E request: TaskId: "task-1" Type: "automated" Issuer: "UT" Action: "reboot" Hosts: "1" DryRun: false 2024-11-21T17:50:20.105683Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:20.105723Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:20.105788Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:20.105999Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-5 ... 1, validity# 1970-01-01T00:03:00.028000Z, action# Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2024-11-21T17:50:23.774950Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:23.805509Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:23.846813Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:23.846889Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Deadline: 180028000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2024-11-21T17:50:23.846898Z node 17 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.028000Z 2024-11-21T17:50:23.847968Z node 17 :CMS INFO: User user is done with permissions user-p-1 2024-11-21T17:50:23.847986Z node 17 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:23.847994Z node 17 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:23.848007Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2024-11-21T17:50:23.869639Z node 17 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:23.869686Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:23.869782Z node 17 :CMS INFO: Processing Wall-E request: TaskId: "task-1" Type: "automated" Issuer: "UT" Action: "reboot" Hosts: "18" DryRun: false 2024-11-21T17:50:23.880976Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:23.881011Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:23.881026Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:23.892512Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:23.892547Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:23.892563Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:23.892689Z node 17 :CMS INFO: Check request: User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 } Schedule: true DryRun: false Priority: 20 2024-11-21T17:50:23.892697Z node 17 :CMS DEBUG: Checking action: Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 2024-11-21T17:50:23.892705Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Locked, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:23.892713Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '18': node state: 'Locked') 2024-11-21T17:50:23.892730Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:23.892779Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# Wall-E-r-2, owner# Wall-E, order# 2, priority# 20, body# User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'18\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2024-11-21T17:50:23.903480Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:23.903534Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 } Schedule: true DryRun: false Priority: 20 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'18\': node state: \'Locked\'" } RequestId: "Wall-E-r-2" Deadline: 420233512 } 2024-11-21T17:50:23.903570Z node 17 :CMS DEBUG: TTxStoreWalleTask Execute 2024-11-21T17:50:23.903592Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store wall-e task: id# task-1, requestId# Wall-E-r-2 2024-11-21T17:50:23.914399Z node 17 :CMS DEBUG: TTxStoreWalleTask Complete 2024-11-21T17:50:23.914432Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvStoreWalleTask { Task: { TaskId: task-1 RequestId: Wall-E-r-2 Owner: Permissions: [] HasSingleCompositeActionGroup: 0 } }, response# NKikimr::NCms::TEvCms::TEvWalleTaskStored { TaskId: task-1 } 2024-11-21T17:50:23.914495Z node 17 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCreateTaskRequest { TaskId: "task-1" Type: "automated" Issuer: "UT" Action: "reboot" Hosts: "18" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvWalleCreateTaskResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'18\': node state: \'Locked\'" } TaskId: "task-1" Hosts: "18" } 2024-11-21T17:50:23.926019Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:23.926054Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:23.926069Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:23.926206Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:23.926218Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2024-11-21T17:50:23.926226Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:23.926259Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:23.926278Z node 17 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2024-11-21T17:50:23.926286Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:23.926297Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:23.926335Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.336536Z, action# Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2024-11-21T17:50:23.926345Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2024-11-21T17:50:23.937197Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:23.937272Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180336536 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2024-11-21T17:50:23.937401Z node 17 :CMS INFO: User user is done with permissions user-p-2 2024-11-21T17:50:23.937412Z node 17 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:23.937425Z node 17 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:23.937443Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2024-11-21T17:50:23.948133Z node 17 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:23.948184Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:23.948322Z node 17 :CMS INFO: Processing Wall-E request: TaskId: "task-1" 2024-11-21T17:50:23.959462Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:23.959491Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:23.959505Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:23.959597Z node 17 :CMS INFO: Check request: User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'18\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2024-11-21T17:50:23.959608Z node 17 :CMS DEBUG: Checking action: Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'18\': node state: \'Locked\'" } 2024-11-21T17:50:23.959618Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:23.959653Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:23.959670Z node 17 :CMS DEBUG: Accepting permission: id# Wall-E-p-3, requestId# Wall-E-r-2, owner# Wall-E 2024-11-21T17:50:23.959677Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission Wall-E-p-3 until 586524-01-19T08:01:49Z) 2024-11-21T17:50:23.959686Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:23.959716Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# Wall-E-p-3, validity# 586524-01-19T08:01:49.551615Z, action# Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 2024-11-21T17:50:23.959724Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# Wall-E-r-2, owner# Wall-E 2024-11-21T17:50:23.970498Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:23.970566Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-2" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "Wall-E-p-3" Action { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 } Deadline: 18446744073709551615 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2024-11-21T17:50:23.970602Z node 17 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: ALLOW } Task { TaskId: "task-1" Hosts: "18" } } >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ActionReason [GOOD] Test command err: 2024-11-21T17:50:21.953116Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:21.953832Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:21.954568Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:21.955106Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:21.955134Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:21.955203Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:21.955488Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:21.955528Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:21.955750Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:21.955964Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:21.956762Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:21.956802Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:21.956826Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:21.956844Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:21.971993Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:21.993512Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:21.993648Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:21.994913Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:21.995114Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:21.995123Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:21.995135Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:21.995139Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:21.995206Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:21.995232Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:21.995299Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:21.996934Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:22.017981Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:22.018034Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:22.045698Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:22.045730Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:22.045811Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:22.046071Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... lotId { NodeId: 14 PDiskId: 14 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 14 PDiskId: 14 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 14 PDiskId: 14 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 14 PDiskId: 14 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 15 PDiskId: 15 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 15 PDiskId: 15 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 15 PDiskId: 15 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 15 PDiskId: 15 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 16 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 16 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 16 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 16 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1000 } VSlotId { NodeId: 10 PDiskId: 10 VSlotId: 1000 } VSlotId { NodeId: 11 PDiskId: 11 VSlotId: 1000 } VSlotId { NodeId: 12 PDiskId: 12 VSlotId: 1000 } VSlotId { NodeId: 13 PDiskId: 13 VSlotId: 1000 } VSlotId { NodeId: 14 PDiskId: 14 VSlotId: 1000 } VSlotId { NodeId: 15 PDiskId: 15 VSlotId: 1000 } VSlotId { NodeId: 16 PDiskId: 16 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1001 } VSlotId { NodeId: 10 PDiskId: 10 VSlotId: 1001 } VSlotId { NodeId: 11 PDiskId: 11 VSlotId: 1001 } VSlotId { NodeId: 12 PDiskId: 12 VSlotId: 1001 } VSlotId { NodeId: 13 PDiskId: 13 VSlotId: 1001 } VSlotId { NodeId: 14 PDiskId: 14 VSlotId: 1001 } VSlotId { NodeId: 15 PDiskId: 15 VSlotId: 1001 } VSlotId { NodeId: 16 PDiskId: 16 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1002 } VSlotId { NodeId: 10 PDiskId: 10 VSlotId: 1002 } VSlotId { NodeId: 11 PDiskId: 11 VSlotId: 1002 } VSlotId { NodeId: 12 PDiskId: 12 VSlotId: 1002 } VSlotId { NodeId: 13 PDiskId: 13 VSlotId: 1002 } VSlotId { NodeId: 14 PDiskId: 14 VSlotId: 1002 } VSlotId { NodeId: 15 PDiskId: 15 VSlotId: 1002 } VSlotId { NodeId: 16 PDiskId: 16 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1003 } VSlotId { NodeId: 10 PDiskId: 10 VSlotId: 1003 } VSlotId { NodeId: 11 PDiskId: 11 VSlotId: 1003 } VSlotId { NodeId: 12 PDiskId: 12 VSlotId: 1003 } VSlotId { NodeId: 13 PDiskId: 13 VSlotId: 1003 } VSlotId { NodeId: 14 PDiskId: 14 VSlotId: 1003 } VSlotId { NodeId: 15 PDiskId: 15 VSlotId: 1003 } VSlotId { NodeId: 16 PDiskId: 16 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:23.870399Z node 9 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:23.870434Z node 9 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:23.893183Z node 9 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:23.893208Z node 9 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:23.893223Z node 9 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:23.893457Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 9 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 10 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 11 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 12 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 13 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 14 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 15 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 16 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120030000 } } 2024-11-21T17:50:23.893504Z node 9 :CMS INFO: Check request: User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "9" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "10" Duration: 600000000 } PartialPermissionAllowed: true Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY MaintenanceTaskId: "task-1" 2024-11-21T17:50:23.893511Z node 9 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "9" Duration: 600000000 2024-11-21T17:50:23.893517Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 9, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:23.893547Z node 9 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:23.893549Z node 9 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:23.893552Z node 9 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:23.893555Z node 9 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:23.893562Z node 9 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "10" Duration: 600000000 2024-11-21T17:50:23.893565Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 10, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:23.893578Z node 9 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (9) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2024-11-21T17:50:23.893589Z node 9 :CMS DEBUG: Accepting permission: id# test-user-p-1, requestId# test-user-r-1, owner# test-user 2024-11-21T17:50:23.893594Z node 9 :CMS INFO: Adding lock for Host ::1:12001 (9) (permission test-user-p-1 until 1970-01-01T00:12:00Z) 2024-11-21T17:50:23.893602Z node 9 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:23.893633Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# test-user-p-1, validity# 1970-01-01T00:12:00.030000Z, action# Type: SHUTDOWN_HOST Host: "9" Duration: 600000000 2024-11-21T17:50:23.893653Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# test-user-r-1, owner# test-user, order# 1, priority# 0, body# User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "10" Duration: 600000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (9) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:23.934286Z node 9 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:23.986478Z node 9 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:23.986561Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "9" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "10" Duration: 600000000 } PartialPermissionAllowed: true Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY MaintenanceTaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "test-user-r-1" Permissions { Id: "test-user-p-1" Action { Type: SHUTDOWN_HOST Host: "9" Duration: 600000000 } Deadline: 720030000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 9 InterconnectPort: 12001 } } } } 2024-11-21T17:50:23.986570Z node 9 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:32:00.030000Z >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers >> TCmsTest::StateRequestUnknownNode >> TCmsTenatsTest::TestTenantRatioLimit >> TCmsTest::TestForceRestartModeDisconnects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] Test command err: 2024-11-21T17:50:20.641529Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:20.642336Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:20.643265Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:20.643312Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:20.643342Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:20.643409Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:20.644319Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:20.644368Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:20.644767Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:20.644791Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:20.645990Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:20.646019Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:20.646042Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:20.646070Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:20.662739Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:20.694890Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:20.695006Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:20.696009Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:20.696133Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:20.696138Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:20.696144Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:20.696147Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:20.696160Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:20.696181Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:20.696274Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:20.697572Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:20.729599Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:20.729664Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:20.757208Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:20.757250Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:20.757326Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:20.757660Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... tamp: 120029512 } Timestamp: 120029512 NodeId: 20 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 21 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 22 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 23 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 24 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120029512 } } 2024-11-21T17:50:24.420174Z node 17 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 17 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 18 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 19 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 20 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 21 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 22 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 23 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 24 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120029512 } 2024-11-21T17:50:24.420202Z node 17 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.004512s 2024-11-21T17:50:24.420210Z node 17 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2024-11-21T17:50:24.420251Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:24.420303Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2024-11-21T17:50:24.420307Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2024-11-21T17:50:24.420309Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2024-11-21T17:50:24.420311Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2024-11-21T17:50:24.420313Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2024-11-21T17:50:24.420315Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2024-11-21T17:50:24.420317Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2024-11-21T17:50:24.420319Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2024-11-21T17:50:24.420343Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2024-11-21T17:50:24.420397Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2024-11-21T17:50:24.420406Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2024-11-21T17:50:24.420416Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2024-11-21T17:50:24.420421Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2024-11-21T17:50:24.420426Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2024-11-21T17:50:24.420431Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2024-11-21T17:50:24.420438Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2024-11-21T17:50:24.420442Z node 17 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2024-11-21T17:50:24.460935Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:24.512993Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:24.513074Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: WRONG_REQUEST Reason: "Cannot perform several actions and evict vdisks" } RequestId: "user-r-1" } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:08.875795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:08.875817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:08.875823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:08.875828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:08.875834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:08.875838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:08.875847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:08.875930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:08.886356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:08.886375Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:08.888604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:08.888695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:08.888720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:08.891036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:08.891108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:08.891201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:08.891359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:08.891940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:08.892195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:08.892206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:08.892219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:08.892242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:08.892250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:08.892289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:08.893510Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:08.910450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:08.910532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.910597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:08.910639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:08.910647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.912541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:08.912591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:08.912657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.912670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:08.912676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:08.912682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:08.913169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.913181Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:08.913186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:08.913518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.913528Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.913534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:08.913539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:08.914165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:08.914820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:08.914886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:08.915090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:08.915117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:08.915125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:08.915177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:08.915184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:08.915216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:08.915227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:08.915700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:08.915711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:08.915742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:08.915748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:08.915810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:08.915818Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:08.915830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:08.915835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:08.915841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:08.915847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:08.915852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:08.915856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:08.915867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:08.915873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:08.915878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... e: TxMoveTable, at tablet72057594046678944 2024-11-21T17:50:25.796786Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:50:25.796789Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2024-11-21T17:50:25.796795Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvPrivate:TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T17:50:25.796842Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:50:25.796861Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 137 -> 129 2024-11-21T17:50:25.796871Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:25.796876Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:50:25.797223Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:50:25.797243Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:50:25.797501Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:25.797517Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:25.797538Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:25.797541Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:25.797560Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:25.797577Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:25.797580Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T17:50:25.797582Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:50:25.797603Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:25.797608Z node 62 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:50:25.797616Z node 62 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:25.797619Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:25.797622Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T17:50:25.797748Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:25.797773Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:25.797776Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:50:25.797779Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:50:25.797782Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:25.797968Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:25.797975Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:50:25.797978Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:50:25.797982Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:50:25.797985Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:50:25.797991Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:50:25.798211Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:25.798217Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:25.798220Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:50:25.798226Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:50:25.798229Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:50:25.798232Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:50:25.798235Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:50:25.798239Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:50:25.798241Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:50:25.798254Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:25.798256Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:50:25.798299Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:25.798303Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:50:25.798309Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:25.798345Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:50:25.798696Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:50:25.798770Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:50:25.798803Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:50:25.798807Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:50:25.798845Z node 62 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:50:25.798857Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:50:25.798860Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:463:2438] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:50:25.798907Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:25.798929Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 30us result status StatusSuccess 2024-11-21T17:50:25.799023Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::DynamicConfig |81.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |81.3%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] Test command err: 2024-11-21T17:50:20.938481Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:20.941404Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:20.941506Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:20.941860Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:20.942061Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:20.942224Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:20.943796Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:20.943832Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:20.943886Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:20.943908Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:20.944364Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:20.944385Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:20.944402Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:20.944443Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:20.961666Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:20.972560Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:20.972641Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:20.973671Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:20.973746Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:20.973750Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:20.973756Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:20.973759Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:20.973784Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:20.973810Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:20.975077Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:20.985337Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:21.017045Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:21.017094Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:21.042708Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:21.042740Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:21.042814Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2024-11-21T17:50:21.043122Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 300027000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 300027000 } Timestamp: 300027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 300027000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 300027000 } Timestamp: 300027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 300027000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 300027000 } Timestamp: 300027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 300027000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 300027000 } Timestamp: 300027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 300027000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 300027000 } Timestamp: 300027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 300027000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 3000 ... Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:24.753499Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:24.806150Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:24.806261Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 } Deadline: 360028512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2024-11-21T17:50:24.806275Z node 17 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:08:00.028512Z 2024-11-21T17:50:24.817616Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:24.817702Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:24.817723Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:24.817734Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2024-11-21T17:50:24.817829Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:24.817836Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } 2024-11-21T17:50:24.817843Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:24.817871Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:24.817879Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } 2024-11-21T17:50:24.817881Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2024-11-21T17:50:24.817890Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: You cannot get two or more disks from the same group at the same time in partial permissions allowed mode) 2024-11-21T17:50:24.817903Z node 17 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2024-11-21T17:50:24.817906Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:24.817946Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:24.817979Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:06:00.130512Z, action# Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 2024-11-21T17:50:24.817996Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:24.828797Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:24.828874Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 } Deadline: 360130512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2024-11-21T17:50:24.840070Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:24.840090Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:24.840151Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:24.840169Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:24.840181Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2024-11-21T17:50:24.840314Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:24.840324Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } 2024-11-21T17:50:24.840335Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2024-11-21T17:50:24.840367Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), Host ::1:12002 (18) has planned shutdown (permission user-p-2 owned by user). Down: ) 2024-11-21T17:50:24.840389Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:24.840440Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), Host ::1:12002 (18) has planned shutdown (permission user-p-2 owned by user). Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:24.851400Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:24.851472Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), Host ::1:12002 (18) has planned shutdown (permission user-p-2 owned by user). Down: " } RequestId: "user-r-1" Deadline: 600232024 } 2024-11-21T17:50:24.851578Z node 17 :CMS INFO: User user is done with permissions user-p-1 2024-11-21T17:50:24.851586Z node 17 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:24.851597Z node 17 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:24.851626Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2024-11-21T17:50:24.862498Z node 17 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:24.862569Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:24.874627Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:24.874749Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:24.874780Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:24.874800Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2024-11-21T17:50:24.874962Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), Host ::1:12002 (18) has planned shutdown (permission user-p-2 owned by user). Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:24.874977Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), Host ::1:12002 (18) has planned shutdown (permission user-p-2 owned by user). Down: " } 2024-11-21T17:50:24.874989Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:24.875053Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:24.875074Z node 17 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2024-11-21T17:50:24.875081Z node 17 :CMS INFO: Adding lock for Host ::1:12003 (19) (permission user-p-3 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:24.875094Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:24.875145Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:06:00.335048Z, action# Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 2024-11-21T17:50:24.875155Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2024-11-21T17:50:24.885880Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:24.885953Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 } Deadline: 360335048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12003 } } } } >> TCmsTest::Notifications [GOOD] >> TCmsTest::Mirror3dcPermissions >> TCmsTest::RequestReplaceBrokenDevices >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::SysTabletsNode >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] Test command err: 2024-11-21T17:50:05.181917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:05.182429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:05.182452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00157b/r3tmp/tmpMNmKTb/pdisk_1.dat 2024-11-21T17:50:05.293225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:05.311848Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:05.359167Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:05.359509Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:05.359561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:05.359584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:05.372786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:05.480876Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:50:05.480901Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:50:05.480942Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T17:50:05.490198Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:50:05.490440Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:50:05.490455Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:50:05.490515Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:50:05.490553Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:50:05.490563Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:50:05.490637Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:50:05.491061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:05.491313Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:50:05.491325Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:50:05.504934Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:05.505164Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:05.505255Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:50:05.505318Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:05.512530Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:05.512728Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:05.512761Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:50:05.512930Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:50:05.512943Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:50:05.512949Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:50:05.513005Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:50:05.516739Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:50:05.516827Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:50:05.516855Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:50:05.516861Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:50:05.516865Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:50:05.516869Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:05.517024Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:05.517031Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:05.517172Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:50:05.517193Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:50:05.517206Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:05.517210Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:05.517216Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:50:05.517223Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:05.517230Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:05.517236Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:50:05.517241Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:50:05.517244Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:50:05.517248Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:50:05.517253Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:05.517274Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:50:05.517279Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:50:05.517299Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:50:05.517343Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:50:05.517351Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:50:05.517368Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:50:05.517375Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:50:05.517379Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:50:05.517384Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:50:05.517387Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:50:05.517432Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:50:05.517436Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:50:05.517442Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:50:05.517453Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:05.517466Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:50:05.517469Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:50:05.517472Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:50:05.517475Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:50:05.517479Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:50:05.517642Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:50:05.517648Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:50:05.517653Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:05.517660Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:50:05.517670Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:50:05.518085Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:50:05.518096Z ... n unit StoreAndSendOutRS 2024-11-21T17:50:26.179115Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit StoreAndSendOutRS 2024-11-21T17:50:26.179121Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2024-11-21T17:50:26.179124Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit StoreAndSendOutRS 2024-11-21T17:50:26.179126Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit PrepareKqpDataTxInRS 2024-11-21T17:50:26.179129Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit PrepareKqpDataTxInRS 2024-11-21T17:50:26.179133Z node 7 :TX_DATASHARD TRACE: Prepare InReadsets from 72075186224037889 to 72075186224037888 2024-11-21T17:50:26.179136Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2024-11-21T17:50:26.179139Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit PrepareKqpDataTxInRS 2024-11-21T17:50:26.179141Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit LoadAndWaitInRS 2024-11-21T17:50:26.179144Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit LoadAndWaitInRS 2024-11-21T17:50:26.179148Z node 7 :TX_DATASHARD TRACE: Prepare for loading readset for [900:281474976715668] at 72075186224037888 source=72075186224037889 target=72075186224037888 2024-11-21T17:50:26.179151Z node 7 :TX_DATASHARD TRACE: Expected 1 readsets for [900:281474976715668] at 72075186224037888 2024-11-21T17:50:26.179160Z node 7 :TX_DATASHARD TRACE: Filled readset for [900:281474976715668] from=72075186224037889 to=72075186224037888origin=72075186224037889 2024-11-21T17:50:26.179163Z node 7 :TX_DATASHARD TRACE: Remain 0 read sets for [900:281474976715668] at 72075186224037888 2024-11-21T17:50:26.179167Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2024-11-21T17:50:26.179170Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit LoadAndWaitInRS 2024-11-21T17:50:26.179173Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T17:50:26.179176Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T17:50:26.179182Z node 7 :TX_DATASHARD TRACE: Operation [900:281474976715668] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191852 2024-11-21T17:50:26.179208Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T17:50:26.179213Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:50:26.179216Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T17:50:26.179220Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit CompleteOperation 2024-11-21T17:50:26.179222Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit CompleteOperation 2024-11-21T17:50:26.179246Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is DelayComplete 2024-11-21T17:50:26.179249Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit CompleteOperation 2024-11-21T17:50:26.179252Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:50:26.179255Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:50:26.179260Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2024-11-21T17:50:26.179265Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:50:26.179268Z node 7 :TX_DATASHARD TRACE: Execution plan for [900:281474976715668] at 72075186224037888 has finished 2024-11-21T17:50:26.179272Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:26.179275Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:50:26.179278Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:50:26.179281Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:50:26.179352Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:50:26.179358Z node 7 :TX_DATASHARD TRACE: Complete execution for [700:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T17:50:26.179374Z node 7 :TX_DATASHARD DEBUG: Complete [700 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [7:961:2752], exec latency: 652 ms, propose latency: 652 ms 2024-11-21T17:50:26.179384Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037889 {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T17:50:26.179388Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037889 {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T17:50:26.179394Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:50:26.179478Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:738:2604], Recipient [7:1075:2853]: {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T17:50:26.179483Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:50:26.179487Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 2024-11-21T17:50:26.179516Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:26.179520Z node 7 :TX_DATASHARD TRACE: Complete execution for [700:281474976715664] at 72075186224037888 on unit CompleteOperation 2024-11-21T17:50:26.179527Z node 7 :TX_DATASHARD DEBUG: Complete [700 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [7:961:2752], exec latency: 202 ms, propose latency: 202 ms 2024-11-21T17:50:26.179532Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T17:50:26.179536Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:26.179575Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:1075:2853], Recipient [7:738:2604]: {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T17:50:26.179579Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:50:26.179582Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2024-11-21T17:50:26.179844Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:26.179852Z node 7 :TX_DATASHARD TRACE: Complete execution for [800:281474976715666] at 72075186224037888 on unit CompleteOperation 2024-11-21T17:50:26.179860Z node 7 :TX_DATASHARD DEBUG: Complete [800 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [7:1000:2787], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:50:26.179867Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 800 txid# 281474976715666 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2024-11-21T17:50:26.179871Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:26.179946Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:1075:2853], Recipient [7:738:2604]: {TEvReadSet step# 800 txid# 281474976715666 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2024-11-21T17:50:26.179951Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:50:26.179955Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715666 2024-11-21T17:50:26.179977Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:738:2604]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 7 Status: STATUS_NOT_FOUND { items { int32_value: 1 } items { int32_value: 10 } } { items { int32_value: 2 } items { int32_value: 20 } } 2024-11-21T17:50:26.180193Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:26.180200Z node 7 :TX_DATASHARD TRACE: Complete execution for [900:281474976715668] at 72075186224037888 on unit CompleteOperation 2024-11-21T17:50:26.180207Z node 7 :TX_DATASHARD DEBUG: Complete [900 : 281474976715668] from 72075186224037888 at tablet 72075186224037888 send result to client [7:1034:2814], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:50:26.180214Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 900 txid# 281474976715668 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 3} 2024-11-21T17:50:26.180218Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:26.180889Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:1075:2853], Recipient [7:738:2604]: {TEvReadSet step# 900 txid# 281474976715668 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 3} 2024-11-21T17:50:26.180899Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:50:26.180903Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715668 { items { int32_value: 2 } items { int32_value: 20 } } 2024-11-21T17:50:26.181017Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:738:2604]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 7 Status: STATUS_NOT_FOUND 2024-11-21T17:50:26.181195Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:738:2604]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715667 LockNode: 7 Status: STATUS_NOT_FOUND { items { int32_value: 2 } items { int32_value: 20 } } >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHost >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] Leader for TabletID 72057594046678944 is [1:226:2148] sender: [1:229:2060] recipient: [1:212:2140] 2024-11-21T17:49:34.917392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:34.917420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:34.917423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:34.917427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:34.917431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:34.917433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:34.917440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:34.917536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:34.927344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:34.927368Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:34.930309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:34.930350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:34.930378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:34.931883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:34.931934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:34.932024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:34.932068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:34.932577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:34.932826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:34.932833Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:34.932861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:34.932866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:34.932871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:34.932886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:34.933847Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:226:2148] sender: [1:339:2060] recipient: [1:17:2064] 2024-11-21T17:49:34.951285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:34.951387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:34.951474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:34.951522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:34.951530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:34.952686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:34.952719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:34.952790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:34.952815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:34.952820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:34.952825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:34.953845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:34.953860Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:34.953866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:34.954239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:34.954247Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:34.954253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:34.954260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:34.954947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:34.955398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:34.955457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:34.955665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:34.955691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 237 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:34.955700Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:34.955760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:34.955767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:34.955800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:34.955813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:34.956265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:34.956278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:34.956324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:34.956330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:306:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:34.956447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:34.956455Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:34.956468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:34.956474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:34.956480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:34.956486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:34.956490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:34.956495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:34.956508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:34.956514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:34.956518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:34.956840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:34.956853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:34.956858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:34.956863Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:34.956867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:34.956880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... chemeshard: 72057594046678944, txId: 106 2024-11-21T17:50:26.559156Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2024-11-21T17:50:26.559160Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:50:26.559165Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2024-11-21T17:50:26.559167Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:50:26.559540Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:50:26.559602Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T17:50:26.559606Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:50:26.559789Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T17:50:26.559794Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T17:50:26.559832Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T17:50:26.559837Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T17:50:26.559872Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:666:2497], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:26.559876Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:26.559878Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:50:26.559891Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:568:2399], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2024-11-21T17:50:26.559894Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T17:50:26.559899Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T17:50:26.559910Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T17:50:26.559913Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:664:2495] 2024-11-21T17:50:26.559925Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:666:2497], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:26.559927Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:26.559929Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2024-11-21T17:50:26.559972Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:542:2100], Recipient [7:229:2151] 2024-11-21T17:50:26.559975Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:50:26.560282Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 542 RawX2: 34359740468 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:26.560306Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:50:26.560320Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:133, at schemeshard: 72057594046678944 2024-11-21T17:50:26.560346Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:50:26.560652Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:133" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:26.560672Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:133, operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2024-11-21T17:50:26.560675Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2024-11-21T17:50:26.560725Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2024-11-21T17:50:26.560728Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2024-11-21T17:50:26.560760Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:672:2503], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:26.560763Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:26.560765Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:50:26.560776Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:568:2399], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2024-11-21T17:50:26.560778Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T17:50:26.560783Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2024-11-21T17:50:26.560794Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T17:50:26.560796Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:670:2501] 2024-11-21T17:50:26.560807Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:672:2503], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:26.560809Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:26.560811Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2024-11-21T17:50:26.560845Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:542:2100], Recipient [7:229:2151] 2024-11-21T17:50:26.560848Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:50:26.561108Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 542 RawX2: 34359740468 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:26.561125Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:50:26.561129Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2024-11-21T17:50:26.561147Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:50:26.561470Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:26.561487Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2024-11-21T17:50:26.561491Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2024-11-21T17:50:26.561530Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2024-11-21T17:50:26.561533Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2024-11-21T17:50:26.561563Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:678:2509], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:26.561566Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:26.561569Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:50:26.561578Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:568:2399], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2024-11-21T17:50:26.561580Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T17:50:26.561585Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2024-11-21T17:50:26.561595Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T17:50:26.561597Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:676:2507] 2024-11-21T17:50:26.561608Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:678:2509], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:26.561610Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:26.561612Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 >> TCmsTest::StateStorageNodesFromOneRing >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndex >> YdbIndexTable::MultiShardTableOneIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] Test command err: 2024-11-21T17:50:19.718254Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:19.721212Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:19.721311Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:19.721758Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:19.721858Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:19.722019Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:19.722974Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:19.723057Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:19.723103Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:19.723121Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:19.724181Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:19.724242Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:19.724266Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:19.724316Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:19.741796Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:19.752630Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:19.752697Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:19.754061Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:19.754181Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:19.754186Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:19.754195Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:19.754198Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:19.754205Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:19.754228Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:19.755213Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2024-11-21T17:50:19.765492Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:19.797172Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:19.797223Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:19.797386Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2024-11-21T17:50:19.797460Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:19.821954Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:19.822029Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:19.822123Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120027000 } } 2024-11-21T17:50:19.852612Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:19.913981Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:19.914049Z node 1 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 2 DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2024-11-21T17:50:19.914073Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:19.947414Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:19.947438Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:19.947451Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:19.947483Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:19.947489Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2024-11-21T17:50:19.947495Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:19.947497Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 2, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:19.947503Z node 1 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:19.947504Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:19.947506Z node 1 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:19.947509Z node 1 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:19.947518Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:19.947523Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:19.947528Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:19.947548Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.128000Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2024-11-21T17:50:19.958076Z node 1 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:19.958135Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2024-11-21T17:50:19.958143Z node 1 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.128000Z 2024-11-21T17:50:19.969133Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:19.969183Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:19.969200Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:19.969211Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:19.969252Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:19.969260Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2024-11-21T17:50:19.969269Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:19.969273Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 2, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:19.969278Z node 1 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:19.969290Z node 1 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2024-11-21T17:50:19.969295Z node 1 :CMS INFO: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:19.969302Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:19.969332Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.229512Z, action# Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2024-11-21T17:50:19.980006Z node 1 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:19.980064Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } Deadline: 180229512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 2 InterconnectPort: 12002 } } } } 2024-11-21T17:50:19.990870Z node 1 :CMS INFO: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:19.990886Z node 1 :CMS INFO: Adding lock fo ... 0000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:25.770627Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'26\' of tenant \'user0\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } 2024-11-21T17:50:25.770634Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:25.770637Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:25.770640Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:25.770647Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } 2024-11-21T17:50:25.770650Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:25.770652Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:25.770658Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '27' of tenant 'user0': too many unavailable nodes. Locked: 1, down: 0, limit: 1) 2024-11-21T17:50:25.770669Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2024-11-21T17:50:25.770674Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:25.770680Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:25.770705Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.538560Z, action# Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 2024-11-21T17:50:25.770721Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:25.781376Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:25.781437Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } Deadline: 180538560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2024-11-21T17:50:25.781532Z node 25 :CMS INFO: User user is done with permissions user-p-2 2024-11-21T17:50:25.781538Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:25.781547Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:25.781562Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2024-11-21T17:50:25.792153Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:25.792194Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:25.803073Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:25.803128Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:25.803164Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:25.803173Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } 2024-11-21T17:50:25.803179Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:25.803196Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:25.803201Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '27' of tenant 'user0': too many unavailable nodes. Locked: 0, down: 1, limit: 1) 2024-11-21T17:50:25.803213Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:25.803237Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:25.813893Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:25.813910Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:25.813946Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } RequestId: "user-r-1" Deadline: 420641584 } 2024-11-21T17:50:25.825053Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:25.825127Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:25.825175Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:25.825188Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } 2024-11-21T17:50:25.825197Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:25.825201Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:25.825209Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '27' of tenant 'user0': too many unavailable nodes. Locked: 0, down: 1, limit: 1) 2024-11-21T17:50:25.825227Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:25.825257Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:25.836073Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:25.836092Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:25.836149Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } RequestId: "user-r-1" Deadline: 420743096 } 2024-11-21T17:50:25.847076Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:25.847134Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:25.847171Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:25.847179Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\' of tenant \'user0\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } 2024-11-21T17:50:25.847185Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:25.847188Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:25.847192Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:25.847204Z node 25 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2024-11-21T17:50:25.847209Z node 25 :CMS INFO: Adding lock for Host ::1:12003 (27) (permission user-p-3 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:25.847215Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:25.847233Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.844608Z, action# Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 2024-11-21T17:50:25.847242Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2024-11-21T17:50:25.857989Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:25.858011Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:25.858075Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 } Deadline: 180844608 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 27 InterconnectPort: 12003 } } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] Test command err: 2024-11-21T17:50:19.500668Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:19.500953Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:19.502928Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:19.502995Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:19.503251Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:19.503292Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:19.503310Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:19.503325Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:19.503349Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:19.503408Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:19.504637Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:19.504658Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:19.504682Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:19.504707Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:19.521329Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:19.553269Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:19.553363Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:19.554332Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:19.554405Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:19.554410Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:19.554418Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:19.554421Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:19.554466Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:19.554489Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:19.554519Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:19.555601Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2024-11-21T17:50:19.587297Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:19.587353Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:19.587500Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2024-11-21T17:50:19.587557Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:19.612696Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:19.612781Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:19.612875Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029000 } Timestamp: 120029000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029000 } Timestamp: 120029000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029000 } Timestamp: 120029000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029000 } Timestamp: 120029000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029000 } Timestamp: 120029000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029000 } Timestamp: 120029000 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029000 } Timestamp: 120029000 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029000 } Timestamp: 120029000 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120029000 } } 2024-11-21T17:50:19.653549Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:19.694656Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:19.694711Z node 1 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 10 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2024-11-21T17:50:19.694730Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:19.716645Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:19.716674Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:19.716688Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:19.716726Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:19.716731Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2024-11-21T17:50:19.716738Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 10, locked nodes: 0, down nodes: 0 2024-11-21T17:50:19.716744Z node 1 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:19.716746Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:19.716748Z node 1 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:19.716750Z node 1 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:19.716761Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:19.716765Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:19.716772Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:19.716798Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.130000Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2024-11-21T17:50:19.727427Z node 1 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:19.727511Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2024-11-21T17:50:19.727522Z node 1 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.130000Z 2024-11-21T17:50:19.738697Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:19.738758Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:19.738777Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:19.738790Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:19.738841Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:19.738849Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2024-11-21T17:50:19.738858Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 10, locked nodes: 1, down nodes: 0 2024-11-21T17:50:19.738873Z node 1 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '2': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 10%) 2024-11-21T17:50:19.738884Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:19.749724Z node 1 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:19.749815Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'2\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 10%" } RequestId: "user-r-2" Deadline: 420231512 } 2024-11-21T17:50:19.749949Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 10 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2024-11- ... mand: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:25.756781Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:25.756812Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:25.756825Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:25.756874Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'26\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } } Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:25.756882Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'26\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } 2024-11-21T17:50:25.756890Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 0 2024-11-21T17:50:25.756893Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:25.756897Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:25.756906Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } 2024-11-21T17:50:25.756909Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 1, down nodes: 0 2024-11-21T17:50:25.756917Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '27': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%) 2024-11-21T17:50:25.756931Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2024-11-21T17:50:25.756936Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:25.756944Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:25.756975Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.535560Z, action# Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 2024-11-21T17:50:25.756992Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:25.767826Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:25.767919Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } Deadline: 180535560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2024-11-21T17:50:25.768034Z node 25 :CMS INFO: User user is done with permissions user-p-2 2024-11-21T17:50:25.768044Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:25.768055Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:25.768078Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2024-11-21T17:50:25.778882Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:25.778940Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:25.790279Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:25.790351Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:25.790397Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:25.790406Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } 2024-11-21T17:50:25.790415Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 1 2024-11-21T17:50:25.790423Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '27': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%) 2024-11-21T17:50:25.790438Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:25.790464Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:25.801227Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:25.801248Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:25.801297Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } RequestId: "user-r-1" Deadline: 420638584 } 2024-11-21T17:50:25.812282Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:25.812351Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:25.812393Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:25.812402Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } 2024-11-21T17:50:25.812410Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 1 2024-11-21T17:50:25.812421Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '27': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%) 2024-11-21T17:50:25.812434Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:25.812458Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:25.823188Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:25.823206Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:25.823247Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } RequestId: "user-r-1" Deadline: 420740096 } 2024-11-21T17:50:25.834458Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:25.834542Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:25.834597Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:25.834608Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } 2024-11-21T17:50:25.834619Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 1 2024-11-21T17:50:25.834623Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:25.834630Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:25.834648Z node 25 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2024-11-21T17:50:25.834656Z node 25 :CMS INFO: Adding lock for Host ::1:12003 (27) (permission user-p-3 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:25.834665Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:25.834690Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.841608Z, action# Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 2024-11-21T17:50:25.834700Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2024-11-21T17:50:25.845569Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:25.845596Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:25.845663Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 } Deadline: 180841608 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 27 InterconnectPort: 12003 } } } } >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::AllVDisksEvictionInRack >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestProcessingQueue >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::StateStorageAvailabilityMode >> TCmsTest::SysTabletsNode [GOOD] >> YdbIndexTable::OnlineBuild >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeScheduled >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] Test command err: 2024-11-21T17:50:23.711449Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:23.711731Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:23.713569Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:23.713614Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:23.714141Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:23.714170Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:23.714193Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:23.714214Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:23.714240Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:23.714299Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:23.715252Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:23.715280Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:23.715296Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:23.715316Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:23.730980Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:23.752108Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:23.752183Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:23.753053Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:23.753167Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:23.753171Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:23.753178Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:23.753180Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:23.753226Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:23.753243Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:23.753298Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:23.754450Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:23.775350Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:23.775400Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:23.800265Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:23.800292Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:23.800360Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2024-11-21T17:50:23.800542Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 3000 ... 2024-11-21T17:50:27.034876Z node 14 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "16" Services: "storage" Duration: 60000000 2024-11-21T17:50:27.034886Z node 14 :CMS DEBUG: [Nodes Counter] Checking Node: 16, with state: Down, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.034892Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_BS_CONTROLLER, on node: 16, with state: Down, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.034895Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_SCHEMESHARD, on node: 16, with state: Down, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.034897Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_TX_COORDINATOR, on node: 16, with state: Down, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.034899Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: TX_MEDIATOR, on node: 16, with state: Down, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.034901Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: TX_ALLOCATOR, on node: 16, with state: Down, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.034903Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: CONSOLE, on node: 16, with state: Down, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.034906Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: CMS, on node: 16, with state: Down, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.034908Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: NODE_BROKER, on node: 16, with state: Down, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.034910Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: TENANT_SLOT_BROKER, on node: 16, with state: Down, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.034914Z node 14 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:27.034949Z node 14 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "16" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "16" Services: "storage" Duration: 60000000 } Deadline: 180241512 Extentions { Type: HostInfo Hosts { Name: "::1" State: DOWN NodeId: 16 InterconnectPort: 12003 } } } } 2024-11-21T17:50:27.045804Z node 14 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:27.056567Z node 14 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:27.056627Z node 14 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:27.056664Z node 14 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:27.056671Z node 14 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2024-11-21T17:50:27.056681Z node 14 :CMS DEBUG: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.056688Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_BS_CONTROLLER, on node: 17, with state: Up, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.056694Z node 14 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '17': tablet 'FLAT_BS_CONTROLLER' has too many unavailable nodes. Locked: 0, down: 3, limit: 3) 2024-11-21T17:50:27.056716Z node 14 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'17\': tablet \'FLAT_BS_CONTROLLER\' has too many unavailable nodes. Locked: 0, down: 3, limit: 3" } Deadline: 420341512 } 2024-11-21T17:50:27.056808Z node 14 :CMS INFO: OnTabletDead: 72057594037936128 2024-11-21T17:50:27.056813Z node 14 :CMS DEBUG: TCms::Cleanup 2024-11-21T17:50:27.057857Z node 14 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:27.058333Z node 14 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:27.058358Z node 14 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:27.058607Z node 14 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:27.058677Z node 14 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:27.058756Z node 14 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:27.058804Z node 14 :CMS DEBUG: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:27.058820Z node 14 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:27.058861Z node 14 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:27.058887Z node 14 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2024-11-21T17:50:27.080197Z node 14 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:27.101271Z node 14 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:27.101334Z node 14 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:27.101378Z node 14 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:27.101386Z node 14 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2024-11-21T17:50:27.101400Z node 14 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 4 2024-11-21T17:50:27.101409Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_BS_CONTROLLER, on node: 18, with state: Up, locked nodes: 0, down nodes: 4 2024-11-21T17:50:27.101416Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_SCHEMESHARD, on node: 18, with state: Up, locked nodes: 0, down nodes: 4 2024-11-21T17:50:27.101420Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_TX_COORDINATOR, on node: 18, with state: Up, locked nodes: 0, down nodes: 4 2024-11-21T17:50:27.101424Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: TX_MEDIATOR, on node: 18, with state: Up, locked nodes: 0, down nodes: 4 2024-11-21T17:50:27.101427Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: TX_ALLOCATOR, on node: 18, with state: Up, locked nodes: 0, down nodes: 4 2024-11-21T17:50:27.101430Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: CONSOLE, on node: 18, with state: Up, locked nodes: 0, down nodes: 4 2024-11-21T17:50:27.101434Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: CMS, on node: 18, with state: Up, locked nodes: 0, down nodes: 4 2024-11-21T17:50:27.101437Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: NODE_BROKER, on node: 18, with state: Up, locked nodes: 0, down nodes: 4 2024-11-21T17:50:27.101440Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: TENANT_SLOT_BROKER, on node: 18, with state: Up, locked nodes: 0, down nodes: 4 2024-11-21T17:50:27.101445Z node 14 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:27.101490Z node 14 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180447512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12005 } } } } 2024-11-21T17:50:27.101622Z node 14 :CMS INFO: OnTabletDead: 72057594037936128 2024-11-21T17:50:27.101628Z node 14 :CMS DEBUG: TCms::Cleanup 2024-11-21T17:50:27.102915Z node 14 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:27.103657Z node 14 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:27.103750Z node 14 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:27.103832Z node 14 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:27.103917Z node 14 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:27.103938Z node 14 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:27.103990Z node 14 :CMS DEBUG: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:27.104011Z node 14 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:27.104050Z node 14 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:27.104077Z node 14 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2024-11-21T17:50:27.125414Z node 14 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:27.146352Z node 14 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:27.146410Z node 14 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:27.146450Z node 14 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:27.146457Z node 14 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2024-11-21T17:50:27.146467Z node 14 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 5 2024-11-21T17:50:27.146473Z node 14 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_BS_CONTROLLER, on node: 19, with state: Up, locked nodes: 0, down nodes: 5 2024-11-21T17:50:27.146479Z node 14 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '19': tablet 'FLAT_BS_CONTROLLER' has too many unavailable nodes. Locked: 0, down: 5, limit: 5) 2024-11-21T17:50:27.146499Z node 14 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'19\': tablet \'FLAT_BS_CONTROLLER\' has too many unavailable nodes. Locked: 0, down: 5, limit: 5" } Deadline: 420553512 } >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageTwoBrokenRings >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::PermissionDuration >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::SingleCompositeActionGroup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] Test command err: 2024-11-21T17:50:21.255160Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:21.256059Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:21.257004Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:21.257578Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:21.257612Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:21.257674Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:21.258057Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:21.258108Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:21.258327Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:21.258593Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:21.259381Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:21.259409Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:21.259435Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:21.259458Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:21.275722Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:21.297435Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:21.297555Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:21.298845Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:21.299045Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:21.299052Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:21.299061Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:21.299065Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:21.299142Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:21.299167Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:21.299246Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:21.300862Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:21.322080Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:21.322148Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:21.348801Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:21.348839Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:21.348906Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:21.349183Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... Z node 17 :CMS INFO: Check request: User: "user1" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission user1-p-4 owned by user1), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:25.380980Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission user1-p-4 owned by user1), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2024-11-21T17:50:25.380987Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:25.381042Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:25.381059Z node 17 :CMS DEBUG: Accepting permission: id# user1-p-5, requestId# user1-r-4, owner# user1 2024-11-21T17:50:25.381064Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user1-p-5 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:25.381070Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:25.381105Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user1-p-5, validity# 1970-01-01T00:03:00.854144Z, action# Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 2024-11-21T17:50:25.381114Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user1-r-4, owner# user1 2024-11-21T17:50:25.391934Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:25.392020Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user1" RequestId: "user1-r-4" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user1-p-5" Action { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 } Deadline: 180854144 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2024-11-21T17:50:25.392136Z node 17 :CMS INFO: Get all requests for user1 2024-11-21T17:50:25.392142Z node 17 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:25.392157Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user1" Command: LIST DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } 2024-11-21T17:50:26.976798Z node 25 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:26.977415Z node 25 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:26.978824Z node 25 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:26.978863Z node 25 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:26.979074Z node 25 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:26.979104Z node 25 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:26.979166Z node 25 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:26.979178Z node 25 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:26.979194Z node 25 :CMS DEBUG: Using default config. 2024-11-21T17:50:26.979230Z node 25 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:26.980316Z node 25 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:26.980354Z node 25 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:26.980382Z node 25 :CMS DEBUG: Using default config 2024-11-21T17:50:26.980397Z node 25 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:26.991444Z node 25 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: false EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:27.012200Z node 25 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:27.012312Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:27.012327Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:27.012385Z node 25 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:27.012388Z node 25 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:27.012393Z node 25 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:27.012395Z node 25 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:27.012402Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:27.012419Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:27.012427Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:27.012585Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 25 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 26 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 27 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 28 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 29 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 30 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 31 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 32 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:27.033262Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:27.033304Z node 25 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:27.033599Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: WRONG_REQUEST Reason: "Unsupported: feature flag EnableCMSRequestPriorities is off" } } >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] Test command err: 2024-11-21T17:50:23.851113Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:23.853609Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:23.853700Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:23.854020Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:23.854166Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:23.854314Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:23.855631Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:23.855658Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:23.855696Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:23.855720Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:23.856062Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:23.856081Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:23.856101Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:23.856158Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:23.875928Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:23.886860Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:23.886945Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:23.888201Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:23.888308Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:23.888315Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:23.888324Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:23.888328Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:23.888356Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:23.888384Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:23.889997Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:23.900306Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:23.931732Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:23.931775Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:23.955519Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:23.955545Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:23.955596Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:23.955770Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... S DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:27.368018Z node 17 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:27.368030Z node 17 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:27.368038Z node 17 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:27.368047Z node 17 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:27.368267Z node 17 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 17 PDiskId: 17 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 18 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 19 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 20 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 21 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 22 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 23 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 24 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 17 PDiskId: 17 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 17 PDiskId: 17 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 17 PDiskId: 17 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 17 PDiskId: 17 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 17 PDiskId: 17 VSlotId: 1000 } VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1000 } VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1000 } VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1000 } VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1000 } VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1000 } VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1000 } VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 17 PDiskId: 17 VSlotId: 1001 } VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1001 } VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1001 } VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1001 } VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1001 } VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1001 } VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1001 } VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 17 PDiskId: 17 VSlotId: 1002 } VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1002 } VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1002 } VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1002 } VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1002 } VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1002 } VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1002 } VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 17 PDiskId: 17 VSlotId: 1003 } VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1003 } VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1003 } VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1003 } VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1003 } VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1003 } VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1003 } VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:27.389244Z node 17 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:27.389307Z node 17 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:27.412759Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:27.412798Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:27.412820Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:27.413128Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 17 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 18 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 19 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 20 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 21 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 22 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 23 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 24 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028512 } } 2024-11-21T17:50:27.413189Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { Hosts: "ghrun-2rqap2spfm.auto.internal.com" }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: NO_SUCH_HOST Reason: "Unknown host ghrun-2rqap2spfm.auto.internal.com" } } >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> TCmsTest::TestProcessingQueue [GOOD] >> TCmsTest::RequestRestartServicesOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] Test command err: 2024-11-21T17:50:20.225432Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:20.228860Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:20.228983Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:20.229499Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:20.230087Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:20.230367Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:20.232858Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:20.232896Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:20.232983Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:20.233010Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:20.233649Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:20.233677Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:20.233705Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:20.233767Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:20.253665Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:20.264672Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:20.264749Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:20.266258Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:20.266360Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:20.266365Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:20.266373Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:20.266377Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:20.266404Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:20.266430Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:20.268292Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:20.278619Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:20.311050Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:20.311124Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:20.336271Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:20.336311Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:20.336373Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:20.336612Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 121 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 122 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195692Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 93 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 94 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 95 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195709Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 96 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 97 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 98 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195726Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 33, response# PDiskStateInfo { PDiskId: 99 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 100 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 101 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195742Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 34, response# PDiskStateInfo { PDiskId: 102 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 103 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 104 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195761Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 35, response# PDiskStateInfo { PDiskId: 105 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 106 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 107 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195776Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 36, response# PDiskStateInfo { PDiskId: 108 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 109 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 110 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195790Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 37, response# PDiskStateInfo { PDiskId: 111 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 112 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 113 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195805Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 38, response# PDiskStateInfo { PDiskId: 114 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 115 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 116 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195819Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 78 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 79 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 80 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195835Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 81 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 82 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 83 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195852Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 84 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 85 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 86 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195868Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 87 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 88 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 89 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195884Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 90 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 91 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 92 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2024-11-21T17:50:26.195896Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2024-11-21T17:50:26.206897Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:26.206979Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: REPLACE_DEVICES Host: "25" Devices: "pdisk-25-75" Devices: "pdisk-25-76" Devices: "pdisk-25-77" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: REPLACE_DEVICES Host: "25" Devices: "pdisk-25-75" Devices: "pdisk-25-76" Devices: "pdisk-25-77" Duration: 60000000 } Deadline: 180128512 } } 2024-11-21T17:50:26.206990Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.128512Z 2024-11-21T17:50:26.219599Z node 25 :CMS INFO: Adding lock for PDisk 25:77 (::1:/pdisk.data) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:26.219618Z node 25 :CMS INFO: Adding lock for PDisk 25:75 (::1:/pdisk.data) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:26.219621Z node 25 :CMS INFO: Adding lock for PDisk 25:76 (::1:/pdisk.data) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:26.219721Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:26.219734Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:26.219743Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:26.220074Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:26.220080Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2024-11-21T17:50:26.220086Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 34, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:26.220132Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:26.220148Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2024-11-21T17:50:26.220153Z node 25 :CMS INFO: Adding lock for Host ::1:12010 (34) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:26.220160Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:26.220185Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.230024Z, action# Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2024-11-21T17:50:26.231095Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:26.231175Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Deadline: 180230024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 34 InterconnectPort: 12010 } } } } >> TCmsTest::AllVDisksEvictionInRack [GOOD] >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback >> TCmsTest::TestForceRestartModeScheduled [GOOD] >> TCmsTest::TestForceRestartModeScheduledDisconnects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::AllVDisksEvictionInRack [GOOD] Test command err: 2024-11-21T17:50:22.454777Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:22.455499Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:22.456204Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:22.456780Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:22.456805Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:22.456864Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:22.457177Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:22.457213Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:22.457401Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:22.457618Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:22.458382Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:22.458418Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:22.458444Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:22.458464Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:22.474146Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:22.495505Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:22.495614Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:22.496700Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:22.496826Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:22.496831Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:22.496836Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:22.496839Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:22.496885Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:22.496903Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:22.496971Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:22.498146Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:22.519150Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:22.519197Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:22.545287Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:22.545315Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:22.545382Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:22.545590Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120030512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120030512 } Timestamp: 120030512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120030512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... pdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2024-11-21T17:50:28.227172Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2024-11-21T17:50:28.227175Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2024-11-21T17:50:28.227178Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2024-11-21T17:50:28.227182Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2024-11-21T17:50:28.227244Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2024-11-21T17:50:28.227381Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2024-11-21T17:50:28.227397Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2024-11-21T17:50:28.227410Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2024-11-21T17:50:28.227417Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2024-11-21T17:50:28.227427Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2024-11-21T17:50:28.227433Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2024-11-21T17:50:28.227439Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2024-11-21T17:50:28.227446Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2024-11-21T17:50:28.227477Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 26:26, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2024-11-21T17:50:28.227485Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 25:25, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2024-11-21T17:50:28.227489Z node 25 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2024-11-21T17:50:28.227520Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2024-11-21T17:50:28.227562Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2024-11-21T17:50:28.227580Z node 25 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2024-11-21T17:50:28.227583Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 25:25 2024-11-21T17:50:28.227585Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 26:26 2024-11-21T17:50:28.238407Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2024-11-21T17:50:28.238430Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2024-11-21T17:50:28.249528Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:28.249554Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:28.249570Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2024-11-21T17:50:28.249689Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T17:50:28.249702Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } 2024-11-21T17:50:28.249712Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:28.249721Z node 25 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:28.249724Z node 25 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:28.249727Z node 25 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:28.249731Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:28.249755Z node 25 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:28.249764Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2024-11-21T17:50:28.249776Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:28.249812Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.128512Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2024-11-21T17:50:28.249830Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T17:50:28.264668Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:28.264746Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } Deadline: 780128512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2024-11-21T17:50:28.264754Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:33:00.128512Z 2024-11-21T17:50:28.276190Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2024-11-21T17:50:28.276299Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:28.276323Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:28.276339Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2024-11-21T17:50:28.276471Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T17:50:28.276483Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } 2024-11-21T17:50:28.276494Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:28.276500Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:28.276522Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2024-11-21T17:50:28.276530Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:13:00Z) 2024-11-21T17:50:28.276541Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:28.276575Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.230024Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2024-11-21T17:50:28.276594Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T17:50:28.287429Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:28.287517Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } Deadline: 780230024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2024-11-21T17:50:28.287642Z node 25 :CMS INFO: User user is done with permissions user-p-1 2024-11-21T17:50:28.287652Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:28.287665Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:28.287687Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 25 2024-11-21T17:50:28.287704Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2024-11-21T17:50:28.287709Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2024-11-21T17:50:28.298465Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:28.298524Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:28.298631Z node 25 :CMS INFO: User user is done with permissions user-p-2 2024-11-21T17:50:28.298640Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:28.298652Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:28.298690Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 26 2024-11-21T17:50:28.298707Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2024-11-21T17:50:28.298719Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2024-11-21T17:50:28.309386Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:28.309444Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2024-11-21T17:50:24.454483Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:24.454757Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:24.455863Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:24.455899Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:24.456064Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:24.456150Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:24.456766Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:24.456796Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:24.456820Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:24.456875Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:24.457829Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:24.457850Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:24.457867Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:24.457890Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:24.473012Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:24.494031Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:24.494141Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:24.494986Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:24.495089Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:24.495151Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:24.495154Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:24.495161Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:24.495164Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:24.495191Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:24.495211Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:24.496266Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:24.516957Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:24.516992Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:24.543263Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2024-11-21T17:50:24.543404Z node 1 :CMS NOTICE: Couldn't collect cluster state. 2024-11-21T17:50:24.543456Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: ERROR_TEMP Reason: "Cannot collect cluster state" } } 2024-11-21T17:50:24.543485Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: ERROR_TEMP Reason: "Cannot collect cluster state" } } 2024-11-21T17:50:24.594404Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:24.646559Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2024-11-21T17:50:24.646625Z node 1 :CMS NOTICE: Couldn't collect cluster state. 2024-11-21T17:50:24.646678Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR_TEMP Reason: "Cannot collect cluster state" } } 2024-11-21T17:50:24.657726Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2024-11-21T17:50:24.657809Z node 1 :CMS NOTICE: Couldn't collect cluster state. 2024-11-21T17:50:24.657880Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvNotification { User: "user" Actions { Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-2-2" Duration: 60000000 } Time: 720129512 }, response# NKikimr::NCms::TEvCms::TEvNotificationResponse { Status { Code: ERROR_TEMP Reason: "Cannot collect cluster state" } } 2024-11-21T17:50:24.669109Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:24.669142Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:24.669213Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:24.669354Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-1-1" Duration: 60000000 } Actions { Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-2-2" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:24.669362Z node 1 :CMS DEBUG: Checking action: Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-1-1" Duration: 60000000 2024-11-21T17:50:24.669409Z node 1 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:24.669421Z node 1 :CMS DEBUG: Checking action: Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-2-2" Duration: 60000000 2024-11-21T17:50:24.669439Z node 1 :CMS DEBUG: Result: DISALLOW (reason: The request is incorrect: too many disks from the one group. Fix the request or set PartialPermissionAllowed to true) 2024-11-21T17:50:24.669462Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:24.669470Z node 1 :CMS INFO: Adding lock for PDisk 1:1 (::1:/pdisk.data) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:24.669481Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:24.669516Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.329512Z, action# Type: REPLACE_DEVICES Host: "1" Devices: "pdis ... t: "17" Services: "storage" Duration: 60000000 } Deadline: 180027512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2024-11-21T17:50:28.078167Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:28.130362Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2024-11-21T17:50:28.130377Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2024-11-21T17:50:28.130379Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2024-11-21T17:50:28.130381Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2024-11-21T17:50:28.130383Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2024-11-21T17:50:28.130385Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2024-11-21T17:50:28.130387Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2024-11-21T17:50:28.130388Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2024-11-21T17:50:28.130479Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:28.130516Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:28.130558Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:28.130564Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2024-11-21T17:50:28.130573Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:28.130583Z node 17 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:28.130585Z node 17 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:28.130587Z node 17 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:28.130590Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:28.130623Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Deadline: 180127512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2024-11-21T17:50:28.130638Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:28.130641Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2024-11-21T17:50:28.130644Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:28.130646Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:28.130658Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180127512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2024-11-21T17:50:28.130668Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:28.130671Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2024-11-21T17:50:28.130676Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:28.130678Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:28.130689Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } Deadline: 180127512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12003 } } } } 2024-11-21T17:50:28.130697Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:28.130700Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 2024-11-21T17:50:28.130703Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 20, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:28.130705Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:28.130715Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 } Deadline: 180127512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 20 InterconnectPort: 12004 } } } } 2024-11-21T17:50:28.130723Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:28.130725Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 2024-11-21T17:50:28.130728Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 21, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:28.130730Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:28.130740Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } Deadline: 180127512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 21 InterconnectPort: 12005 } } } } 2024-11-21T17:50:28.130749Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:28.130752Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 2024-11-21T17:50:28.130755Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 22, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:28.130757Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:28.130767Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } Deadline: 180127512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 22 InterconnectPort: 12006 } } } } 2024-11-21T17:50:28.130775Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:28.130778Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 2024-11-21T17:50:28.130780Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 23, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:28.130782Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:28.130792Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } Deadline: 180127512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 23 InterconnectPort: 12007 } } } } 2024-11-21T17:50:28.130801Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:28.130804Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 2024-11-21T17:50:28.130806Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:28.130808Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:28.130818Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } Deadline: 180127512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 24 InterconnectPort: 12008 } } } } >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> TCmsTest::StateStorageLockedNodes >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> TCmsTest::Mirror3dcPermissions [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] Test command err: 2024-11-21T17:50:23.068364Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:23.071097Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:23.071185Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:23.071632Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:23.071821Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:23.071982Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:23.073663Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:23.073695Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:23.073747Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:23.073777Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:23.074183Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:23.074205Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:23.074225Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:23.074270Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:23.091034Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:23.102109Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:23.102207Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:23.103480Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:23.103585Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:23.103593Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:23.103602Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:23.103606Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:23.103638Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:23.103701Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:23.105056Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2024-11-21T17:50:23.115282Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:23.147186Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:23.147252Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:23.147460Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2024-11-21T17:50:23.147526Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:23.171681Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:23.171768Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:23.171871Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120027000 } } 2024-11-21T17:50:23.202377Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:23.263798Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:23.263857Z node 1 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 10 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2024-11-21T17:50:23.263874Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:23.297521Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:23.297554Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:23.297566Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:23.297609Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:23.297615Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2024-11-21T17:50:23.297623Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:23.297628Z node 1 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:23.297630Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:23.297632Z node 1 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:23.297635Z node 1 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:23.297649Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:23.297654Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:23.297662Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:23.297693Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.128000Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2024-11-21T17:50:23.308645Z node 1 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:23.308730Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2024-11-21T17:50:23.308739Z node 1 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.128000Z 2024-11-21T17:50:23.319593Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:23.319633Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:23.319647Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:23.319655Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:23.319689Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:23.319694Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2024-11-21T17:50:23.319700Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:23.319704Z node 1 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:23.319713Z node 1 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2024-11-21T17:50:23.319717Z node 1 :CMS INFO: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:23.319722Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:23.319750Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.229512Z, action# Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2024-11-21T17:50:23.330427Z node 1 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:23.330498Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } Deadline: 180229512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 2 InterconnectPort: 12002 } } } } 2024-11-21T17:50:23.341352Z node 1 :CMS INFO: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:23.341370Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:23.341396Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:23.341410Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:23.341418Z node ... ock for Host ::1:12004 (28) (permission user-p-4 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:29.081193Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:29.081213Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.337048Z, action# Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 2024-11-21T17:50:29.081218Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-4, validity# 1970-01-01T00:03:00.337048Z, action# Type: SHUTDOWN_HOST Host: "28" Duration: 60000000 2024-11-21T17:50:29.081238Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "29" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'29\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } Actions { Type: SHUTDOWN_HOST Host: "30" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'30\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } Actions { Type: SHUTDOWN_HOST Host: "31" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'31\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } Actions { Type: SHUTDOWN_HOST Host: "32" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'32\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:29.091793Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:29.091855Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 } Deadline: 180337048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 27 InterconnectPort: 12003 } } } Permissions { Id: "user-p-4" Action { Type: SHUTDOWN_HOST Host: "28" Duration: 60000000 } Deadline: 180337048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 28 InterconnectPort: 12004 } } } } 2024-11-21T17:50:29.091961Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 30 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2024-11-21T17:50:29.091997Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:29.102676Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:29.102754Z node 25 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2024-11-21T17:50:29.113733Z node 25 :CMS INFO: Adding lock for Host ::1:12003 (27) (permission user-p-3 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:29.113754Z node 25 :CMS INFO: Adding lock for Host ::1:12004 (28) (permission user-p-4 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:29.113795Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:29.113811Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:29.113820Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:29.113866Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "29" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'29\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } Actions { Type: SHUTDOWN_HOST Host: "30" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'30\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } Actions { Type: SHUTDOWN_HOST Host: "31" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'31\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } Actions { Type: SHUTDOWN_HOST Host: "32" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'32\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:29.113874Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "29" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'29\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } 2024-11-21T17:50:29.113881Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 29, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2024-11-21T17:50:29.113883Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 29, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2024-11-21T17:50:29.113887Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:29.113897Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "30" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'30\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } 2024-11-21T17:50:29.113901Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 30, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 3, down nodes: 0 2024-11-21T17:50:29.113904Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 30, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 3, down nodes: 0 2024-11-21T17:50:29.113908Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:29.113916Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "31" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'31\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } 2024-11-21T17:50:29.113920Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 31, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 4, down nodes: 0 2024-11-21T17:50:29.113923Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 31, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 4, down nodes: 0 2024-11-21T17:50:29.113927Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:29.113934Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "32" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'32\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } 2024-11-21T17:50:29.113938Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 32, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 5, down nodes: 0 2024-11-21T17:50:29.113941Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 32, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 5, down nodes: 0 2024-11-21T17:50:29.113944Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:29.113961Z node 25 :CMS DEBUG: Accepting permission: id# user-p-5, requestId# user-r-1, owner# user 2024-11-21T17:50:29.113966Z node 25 :CMS INFO: Adding lock for Host ::1:12005 (29) (permission user-p-5 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:29.113971Z node 25 :CMS DEBUG: Accepting permission: id# user-p-6, requestId# user-r-1, owner# user 2024-11-21T17:50:29.113976Z node 25 :CMS INFO: Adding lock for Host ::1:12006 (30) (permission user-p-6 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:29.113980Z node 25 :CMS DEBUG: Accepting permission: id# user-p-7, requestId# user-r-1, owner# user 2024-11-21T17:50:29.113984Z node 25 :CMS INFO: Adding lock for Host ::1:12007 (31) (permission user-p-7 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:29.113988Z node 25 :CMS DEBUG: Accepting permission: id# user-p-8, requestId# user-r-1, owner# user 2024-11-21T17:50:29.114006Z node 25 :CMS INFO: Adding lock for Host ::1:12008 (32) (permission user-p-8 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:29.114014Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:29.114074Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-5, validity# 1970-01-01T00:03:00.440072Z, action# Type: SHUTDOWN_HOST Host: "29" Duration: 60000000 2024-11-21T17:50:29.114085Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-6, validity# 1970-01-01T00:03:00.440072Z, action# Type: SHUTDOWN_HOST Host: "30" Duration: 60000000 2024-11-21T17:50:29.114110Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-7, validity# 1970-01-01T00:03:00.440072Z, action# Type: SHUTDOWN_HOST Host: "31" Duration: 60000000 2024-11-21T17:50:29.114120Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-8, validity# 1970-01-01T00:03:00.440072Z, action# Type: SHUTDOWN_HOST Host: "32" Duration: 60000000 2024-11-21T17:50:29.114128Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2024-11-21T17:50:29.124813Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:29.124889Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-5" Action { Type: SHUTDOWN_HOST Host: "29" Duration: 60000000 } Deadline: 180440072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 29 InterconnectPort: 12005 } } } Permissions { Id: "user-p-6" Action { Type: SHUTDOWN_HOST Host: "30" Duration: 60000000 } Deadline: 180440072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 30 InterconnectPort: 12006 } } } Permissions { Id: "user-p-7" Action { Type: SHUTDOWN_HOST Host: "31" Duration: 60000000 } Deadline: 180440072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 31 InterconnectPort: 12007 } } } Permissions { Id: "user-p-8" Action { Type: SHUTDOWN_HOST Host: "32" Duration: 60000000 } Deadline: 180440072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 32 InterconnectPort: 12008 } } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::EmergencyDuringRollingRestart [GOOD] Test command err: 2024-11-21T17:50:24.917470Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:24.918097Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:24.919347Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:24.919387Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:24.919596Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:24.919617Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:24.919660Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:24.919685Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:24.919738Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:24.919747Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:24.920631Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:24.920644Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:24.920661Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:24.920683Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:24.935431Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:24.967394Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:24.967493Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:24.968914Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:24.969004Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:24.969009Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:24.969016Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:24.969019Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:24.969061Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:24.969089Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:24.969124Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:24.970980Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:25.002925Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:25.002980Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:26.524850Z node 9 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:26.525464Z node 9 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:26.527506Z node 9 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:26.527564Z node 9 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:26.527838Z node 9 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:26.527996Z node 9 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:26.528010Z node 9 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:26.528027Z node 9 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:26.528050Z node 9 :CMS DEBUG: Using default config. 2024-11-21T17:50:26.528095Z node 9 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:26.529854Z node 9 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:26.529880Z node 9 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:26.529908Z node 9 :CMS DEBUG: Using default config 2024-11-21T17:50:26.529924Z node 9 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:26.551275Z node 9 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:26.572369Z node 9 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:26.572482Z node 9 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:26.572504Z node 9 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:26.572541Z node 9 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:26.572594Z node 9 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:26.572598Z node 9 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:26.572603Z node 9 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:26.572606Z node 9 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:26.572611Z node 9 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:26.572620Z node 9 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:26.572712Z node 9 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 9 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } Group { GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:26.593591Z node 9 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:26.593639Z node 9 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { ... on: 60000000 2024-11-21T17:50:28.930224Z node 18 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:28.930248Z node 18 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:28.930250Z node 18 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:28.930252Z node 18 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:28.930254Z node 18 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:28.930260Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2024-11-21T17:50:28.930263Z node 18 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:28.930289Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (18) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2024-11-21T17:50:28.930300Z node 18 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:28.930305Z node 18 :CMS INFO: Adding lock for Host ::1:12001 (18) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:28.930313Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:28.930340Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.027512Z, action# Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2024-11-21T17:50:28.930358Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (18) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:28.981272Z node 18 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:29.033814Z node 18 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:29.033919Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180027512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12001 } } } } 2024-11-21T17:50:29.033932Z node 18 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.027512Z 2024-11-21T17:50:29.034060Z node 18 :CMS INFO: User user is done with permissions user-p-1 2024-11-21T17:50:29.034070Z node 18 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:29.034085Z node 18 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:29.034107Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2024-11-21T17:50:29.045010Z node 18 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:29.045076Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:29.056301Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:29.056339Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:29.056356Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:29.056509Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -100 2024-11-21T17:50:29.056518Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2024-11-21T17:50:29.056528Z node 18 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:29.056563Z node 18 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:29.056578Z node 18 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2024-11-21T17:50:29.056585Z node 18 :CMS INFO: Adding lock for Host ::1:12002 (19) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:29.056595Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:29.056632Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.131024Z, action# Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2024-11-21T17:50:29.067558Z node 18 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:29.067652Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -100 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } Deadline: 180131024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12002 } } } } 2024-11-21T17:50:29.079134Z node 18 :CMS INFO: Adding lock for Host ::1:12002 (19) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:29.079239Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:29.079258Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:29.079271Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:29.079432Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (18) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:29.079444Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (18) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2024-11-21T17:50:29.079453Z node 18 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Locked, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:29.079461Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '19': node state: 'Locked') 2024-11-21T17:50:29.079480Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:29.079526Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: GENERIC Message: "Cannot lock node \'19\': node state: \'Locked\'" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:29.090406Z node 18 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:29.090475Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'19\': node state: \'Locked\'" } RequestId: "user-r-1" Deadline: 420232536 } 2024-11-21T17:50:29.090593Z node 18 :CMS INFO: User user is done with permissions user-p-2 2024-11-21T17:50:29.090603Z node 18 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:29.090615Z node 18 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:29.090638Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2024-11-21T17:50:29.101494Z node 18 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:29.101557Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:29.113161Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:29.113201Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:29.113218Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:29.113373Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: GENERIC Message: "Cannot lock node \'19\': node state: \'Locked\'" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:29.113384Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: GENERIC Message: "Cannot lock node \'19\': node state: \'Locked\'" } 2024-11-21T17:50:29.113394Z node 18 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:29.113429Z node 18 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:29.113450Z node 18 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2024-11-21T17:50:29.113457Z node 18 :CMS INFO: Adding lock for Host ::1:12002 (19) (permission user-p-3 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:29.113468Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:29.113507Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.335560Z, action# Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2024-11-21T17:50:29.113519Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2024-11-21T17:50:29.124399Z node 18 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:29.124498Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } Deadline: 180335560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12002 } } } } >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> TCmsTest::RequestRestartServicesRejectSecond ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] Test command err: 2024-11-21T17:50:21.199991Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:21.202426Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:21.202512Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:21.202845Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:21.203029Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:21.203205Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:21.204829Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:21.204858Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:21.204906Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:21.204935Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:21.205325Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:21.205346Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:21.205367Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:21.205414Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:21.222113Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:21.232918Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:21.233007Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:21.233883Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:21.233957Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:21.233961Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:21.233967Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:21.233970Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:21.233992Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:21.234015Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:21.235209Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:21.245566Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:21.277151Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:21.277193Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:21.303804Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:21.303837Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:21.303908Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:21.304241Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... :27.003067Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "27" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request. Down: Host ::1:12002 (26) is down" } Deadline: 420135512 } 2024-11-21T17:50:27.003185Z node 25 :CMS INFO: OnTabletDead: 72057594037936128 2024-11-21T17:50:27.003189Z node 25 :CMS DEBUG: TCms::Cleanup 2024-11-21T17:50:27.004278Z node 25 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:27.004710Z node 25 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:27.004742Z node 25 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:27.004926Z node 25 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:27.004986Z node 25 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:27.005052Z node 25 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:27.005086Z node 25 :CMS DEBUG: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:27.005096Z node 25 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:27.005128Z node 25 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:27.005153Z node 25 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2024-11-21T17:50:27.026278Z node 25 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:27.049623Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:27.049687Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:27.050647Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:27.050656Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 2024-11-21T17:50:27.050664Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 30, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.050699Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Number of data centers with unavailable vdisks: 3. Locked: VDisk [0:1:1:5:0] (::1:/pdisk.data) is locked by this request. Down: Host ::1:12008 (32) is down, Host ::1:12005 (29) is down, Host ::1:12002 (26) is down) 2024-11-21T17:50:27.050750Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Number of data centers with unavailable vdisks: 3. Locked: VDisk [0:1:1:5:0] (::1:/pdisk.data) is locked by this request. Down: Host ::1:12008 (32) is down, Host ::1:12005 (29) is down, Host ::1:12002 (26) is down" } Deadline: 420241512 } 2024-11-21T17:50:27.050848Z node 25 :CMS INFO: OnTabletDead: 72057594037936128 2024-11-21T17:50:27.050852Z node 25 :CMS DEBUG: TCms::Cleanup 2024-11-21T17:50:27.052031Z node 25 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:27.052649Z node 25 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:27.052702Z node 25 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:27.052905Z node 25 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:27.052969Z node 25 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:27.053048Z node 25 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:27.053087Z node 25 :CMS DEBUG: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:27.053098Z node 25 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:27.053128Z node 25 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:27.053157Z node 25 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2024-11-21T17:50:27.074464Z node 25 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:27.100064Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:27.100152Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:27.102076Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:27.102098Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 2024-11-21T17:50:27.102111Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 32, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 2 2024-11-21T17:50:27.102169Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Number of data centers with unavailable vdisks: 3. Locked: VDisk [0:1:2:7:0] (::1:/pdisk.data) is locked by this request. Down: Host ::1:12005 (29) is down, Host ::1:12002 (26) is down) 2024-11-21T17:50:27.102238Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Number of data centers with unavailable vdisks: 3. Locked: VDisk [0:1:2:7:0] (::1:/pdisk.data) is locked by this request. Down: Host ::1:12005 (29) is down, Host ::1:12002 (26) is down" } Deadline: 420347512 } 2024-11-21T17:50:27.112917Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:27.126896Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:27.126973Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:27.128248Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:27.128268Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 2024-11-21T17:50:27.128280Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 30, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 2 2024-11-21T17:50:27.128610Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:27.128698Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } Deadline: 180447512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 30 InterconnectPort: 12006 } } } } 2024-11-21T17:50:27.128860Z node 25 :CMS INFO: OnTabletDead: 72057594037936128 2024-11-21T17:50:27.128867Z node 25 :CMS DEBUG: TCms::Cleanup 2024-11-21T17:50:27.130442Z node 25 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:27.131111Z node 25 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:27.131152Z node 25 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:27.131441Z node 25 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:27.131534Z node 25 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:27.131638Z node 25 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:27.131693Z node 25 :CMS DEBUG: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:27.131707Z node 25 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:27.131745Z node 25 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:27.131781Z node 25 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2024-11-21T17:50:27.153027Z node 25 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:27.176454Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:27.176519Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:27.177419Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "27" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:27.177426Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "27" Services: "storage" Duration: 60000000 2024-11-21T17:50:27.177435Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 3 2024-11-21T17:50:27.177470Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request. Down: Host ::1:12006 (30) is down, Host ::1:12005 (29) is down, Host ::1:12002 (26) is down) 2024-11-21T17:50:27.177520Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "27" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request. Down: Host ::1:12006 (30) is down, Host ::1:12005 (29) is down, Host ::1:12002 (26) is down" } Deadline: 420553512 } >> BsControllerConfig::MoveGroups >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::ShouldCheckQuotas >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesReject >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> TUserAccountServiceTest::Get ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] Test command err: 2024-11-21T17:50:26.448780Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:26.449052Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:26.451082Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:26.451126Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:26.451547Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:26.451565Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:26.451582Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:26.451598Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:26.451622Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:26.451678Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:26.452597Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:26.452624Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:26.452642Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:26.452663Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:26.468478Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:26.490122Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:26.490234Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:26.491601Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:26.491781Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:26.491792Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:26.491804Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:26.491809Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:26.491888Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:26.491922Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:26.492017Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:26.493924Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:26.514766Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:26.514812Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:26.542408Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:26.542448Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:26.542528Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2024-11-21T17:50:26.542845Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 3000 ... 00 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 300028000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 300028000 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 300028000 } Timestamp: 300028000 NodeId: 22 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300028000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 300028000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 300028000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 300028000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 300028000 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 300028000 } Timestamp: 300028000 NodeId: 23 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300028000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 300028000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 300028000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 300028000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 300028000 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 300028000 } Timestamp: 300028000 NodeId: 24 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 300028000 } } 2024-11-21T17:50:30.238213Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:30.238223Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 2024-11-21T17:50:30.238234Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:30.238268Z node 17 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:30.238271Z node 17 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:30.238273Z node 17 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:30.238275Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.238284Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 2024-11-21T17:50:30.238286Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:30.238298Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: You cannot get two or more disks from the same group at the same time in partial permissions allowed mode) 2024-11-21T17:50:30.238303Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 2024-11-21T17:50:30.238305Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:30.238311Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: You cannot get two or more disks from the same group at the same time in partial permissions allowed mode) 2024-11-21T17:50:30.238323Z node 17 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:30.238328Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:30.238340Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:30.238380Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:06:00.028000Z, action# Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 2024-11-21T17:50:30.238406Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:30.268968Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:30.310422Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:30.310530Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 } Deadline: 360028000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2024-11-21T17:50:30.310542Z node 17 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:08:00.028000Z 2024-11-21T17:50:30.334377Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:30.334481Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:30.334558Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2024-11-21T17:50:30.334724Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:30.334735Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } 2024-11-21T17:50:30.334746Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:30.334792Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.334805Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } 2024-11-21T17:50:30.334809Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2024-11-21T17:50:30.334824Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: You cannot get two or more disks from the same group at the same time in partial permissions allowed mode) 2024-11-21T17:50:30.334843Z node 17 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2024-11-21T17:50:30.334849Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:30.334859Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:30.334895Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:06:00.130512Z, action# Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 2024-11-21T17:50:30.334918Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:30.345751Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:30.345776Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:30.345858Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 } Deadline: 360130512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2024-11-21T17:50:30.357211Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:30.357241Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:30.357320Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:30.357405Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2024-11-21T17:50:30.357541Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:30.357550Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } 2024-11-21T17:50:30.357558Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2024-11-21T17:50:30.357591Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.357608Z node 17 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2024-11-21T17:50:30.357612Z node 17 :CMS INFO: Adding lock for Host ::1:12003 (19) (permission user-p-3 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:30.357620Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:30.357639Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:06:00.232024Z, action# Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 2024-11-21T17:50:30.357646Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2024-11-21T17:50:30.368575Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:30.368600Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:30.368675Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 } Deadline: 360232024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12003 } } } } >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> TUserAccountServiceTest::Get [GOOD] >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::PriorityRange >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue >> TCmsTest::StateStorageLockedNodes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2024-11-21T17:50:31.900011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791449919916488:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:31.900195Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001402/r3tmp/tmp50KQeb/pdisk_1.dat 2024-11-21T17:50:31.947437Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:32.001394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:32.001436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:32.002447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:32.033080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... |81.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |81.4%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> TExportToS3Tests::ShouldCheckQuotas [GOOD] >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleTasksDifferentPriorities >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> FolderServiceTest::TFolderServiceTransitional ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] Test command err: 2024-11-21T17:50:26.406985Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:26.407448Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:26.409180Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:26.409239Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:26.409565Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:26.409708Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:26.410903Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:26.410954Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:26.410991Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:26.411094Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:26.412709Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:26.412750Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:26.412782Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:26.412815Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:26.432318Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:26.453614Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:26.453771Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:26.455195Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:26.455304Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:26.455368Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:26.455373Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:26.455381Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:26.455385Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:26.455417Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:26.455436Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:26.457379Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:26.478205Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:26.478244Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:26.504748Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:26.504781Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:26.504864Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:26.505178Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... 8.239064Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 12, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:28.239076Z node 9 :CMS DEBUG: Ring: 0; State: Restart 2024-11-21T17:50:28.239079Z node 9 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:28.239082Z node 9 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:28.239085Z node 9 :CMS DEBUG: Ring: 3; State: Ok 2024-11-21T17:50:28.239088Z node 9 :CMS DEBUG: Ring: 4; State: Ok 2024-11-21T17:50:28.239093Z node 9 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:28.239142Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "12" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "12" Services: "storage" Duration: 60000000 } Deadline: 180134512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 12 InterconnectPort: 12004 } } } } 2024-11-21T17:50:28.251813Z node 9 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:28.264799Z node 9 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:28.264896Z node 9 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:28.264951Z node 9 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "12" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:28.264962Z node 9 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "12" Services: "storage" Duration: 60000000 2024-11-21T17:50:28.264978Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 12, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:28.264990Z node 9 :CMS DEBUG: Ring: 0; State: Restart 2024-11-21T17:50:28.264993Z node 9 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:28.264996Z node 9 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:28.264999Z node 9 :CMS DEBUG: Ring: 3; State: Ok 2024-11-21T17:50:28.265002Z node 9 :CMS DEBUG: Ring: 4; State: Ok 2024-11-21T17:50:28.265011Z node 9 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Too many unavailable state storage rings. Restarting rings: 1. Temporary (for a 2 minutes) locked rings: 0. Maximum allowed number of unavailable rings for this mode: 1) 2024-11-21T17:50:28.265044Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "12" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Too many unavailable state storage rings. Restarting rings: 1. Temporary (for a 2 minutes) locked rings: 0. Maximum allowed number of unavailable rings for this mode: 1" } Deadline: 420234512 } 2024-11-21T17:50:30.338123Z node 19 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:30.340351Z node 19 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:30.340486Z node 19 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:30.340522Z node 19 :CMS DEBUG: Using default config. 2024-11-21T17:50:30.340587Z node 19 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:30.340624Z node 19 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:30.341839Z node 19 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:30.341908Z node 19 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:30.342249Z node 19 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:30.342262Z node 19 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:30.343815Z node 19 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:30.344079Z node 19 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:30.344141Z node 19 :CMS DEBUG: Using default config 2024-11-21T17:50:30.344165Z node 19 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:30.357948Z node 19 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:30.379740Z node 19 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:30.379871Z node 19 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:30.379894Z node 19 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:30.380008Z node 19 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:30.380014Z node 19 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:30.380024Z node 19 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:30.380028Z node 19 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:30.380043Z node 19 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:30.380055Z node 19 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:30.380078Z node 19 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:30.380163Z node 19 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2024-11-21T17:50:30.401282Z node 19 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:30.401339Z node 19 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:30.401829Z node 19 :CMS INFO: OnTabletDead: 72057594037936128 2024-11-21T17:50:30.401842Z node 19 :CMS DEBUG: TCms::Cleanup 2024-11-21T17:50:30.403941Z node 19 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:30.404587Z node 19 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:30.404625Z node 19 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:30.404903Z node 19 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:30.404970Z node 19 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:30.405086Z node 19 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:30.405142Z node 19 :CMS DEBUG: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:30.405157Z node 19 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:30.405216Z node 19 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:30.405275Z node 19 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2024-11-21T17:50:30.520561Z node 19 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:30.553152Z node 19 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:30.553216Z node 19 :CMS DEBUG: Timestamp: 1970-01-01T00:03:30Z 2024-11-21T17:50:30.553262Z node 19 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:30.553270Z node 19 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.553282Z node 19 :CMS DEBUG: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 2 2024-11-21T17:50:30.553290Z node 19 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:30.553295Z node 19 :CMS DEBUG: Ring: 1; State: Restart 2024-11-21T17:50:30.553299Z node 19 :CMS DEBUG: Ring: 2; State: Restart 2024-11-21T17:50:30.553302Z node 19 :CMS DEBUG: Ring: 3; State: Locked 2024-11-21T17:50:30.553305Z node 19 :CMS DEBUG: Ring: 4; State: Locked 2024-11-21T17:50:30.553308Z node 19 :CMS DEBUG: Ring: 5; State: Ok 2024-11-21T17:50:30.553311Z node 19 :CMS DEBUG: Ring: 6; State: Ok 2024-11-21T17:50:30.553314Z node 19 :CMS DEBUG: Ring: 7; State: Ok 2024-11-21T17:50:30.553317Z node 19 :CMS DEBUG: Ring: 8; State: Ok 2024-11-21T17:50:30.553324Z node 19 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Too many unavailable state storage rings. Restarting rings: 2. Temporary (for a 2 minutes) locked rings: 2. Disabled rings: 0. Maximum allowed number of unavailable rings for this mode: 4) 2024-11-21T17:50:30.553351Z node 19 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Too many unavailable state storage rings. Restarting rings: 2. Temporary (for a 2 minutes) locked rings: 2. Disabled rings: 0. Maximum allowed number of unavailable rings for this mode: 4" } Deadline: 510133512 } 2024-11-21T17:50:30.564398Z node 19 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:30.575529Z node 19 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:30.575601Z node 19 :CMS DEBUG: Timestamp: 1970-01-01T00:03:30Z 2024-11-21T17:50:30.575650Z node 19 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:30.575660Z node 19 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.575673Z node 19 :CMS DEBUG: [Nodes Counter] Checking Node: 23, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 2 2024-11-21T17:50:30.575681Z node 19 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:30.575686Z node 19 :CMS DEBUG: Ring: 1; State: Restart 2024-11-21T17:50:30.575690Z node 19 :CMS DEBUG: Ring: 2; State: Restart 2024-11-21T17:50:30.575693Z node 19 :CMS DEBUG: Ring: 3; State: Locked 2024-11-21T17:50:30.575696Z node 19 :CMS DEBUG: Ring: 4; State: Locked 2024-11-21T17:50:30.575698Z node 19 :CMS DEBUG: Ring: 5; State: Ok 2024-11-21T17:50:30.575701Z node 19 :CMS DEBUG: Ring: 6; State: Ok 2024-11-21T17:50:30.575704Z node 19 :CMS DEBUG: Ring: 7; State: Ok 2024-11-21T17:50:30.575706Z node 19 :CMS DEBUG: Ring: 8; State: Ok 2024-11-21T17:50:30.575710Z node 19 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.575756Z node 19 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } Deadline: 270233512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 23 InterconnectPort: 12005 } } } } >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable [GOOD] >> FolderServiceTest::TFolderService >> YdbTableSplit::RenameTablesAndSplit [GOOD] >> TServiceAccountServiceTest::IssueToken [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:12.655289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:12.655319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.655325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:12.655330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:12.655336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:12.655340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:12.655349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.655436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:12.665703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:12.665722Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:12.668755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:12.669381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:12.669411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:12.670280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:12.670405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:12.670474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.670524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:12.671125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.671324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.671331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.671360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:12.671365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.671369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:12.671379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.672324Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:12.686528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:12.686590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.686642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:12.686672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:12.686678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.687341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.687367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:12.687408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.687418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:12.687422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:12.687427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:12.687917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.687930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:12.687935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:12.688505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.688519Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.688524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.688530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.689160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:12.689541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:12.689591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:12.689749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.689776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:12.689786Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.689839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:12.689845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.689869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.689880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:12.690280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.690288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.690327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.690332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:12.690404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.690410Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:12.690418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:12.690422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.690426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:12.690430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.690434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:12.690438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:12.690448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:12.690454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:12.690458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:12.690743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.690758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.690762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:12.690767Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:12.690771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.690785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 74976720762 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976720762 2024-11-21T17:50:32.374101Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720762:0, at schemeshard: 72057594046678944 2024-11-21T17:50:32.374107Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976720762:0, at schemeshard: 72057594046678944 2024-11-21T17:50:32.374113Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720762 ready parts: 1/1 2024-11-21T17:50:32.374134Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976720762 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:32.374192Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T17:50:32.374199Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T17:50:32.374201Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2024-11-21T17:50:32.374204Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T17:50:32.374207Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:32.374297Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T17:50:32.374303Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T17:50:32.374305Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2024-11-21T17:50:32.374307Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2024-11-21T17:50:32.374309Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:50:32.374314Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2024-11-21T17:50:32.374672Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T17:50:32.374718Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720762, at schemeshard: 72057594046678944 2024-11-21T17:50:32.374722Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2024-11-21T17:50:32.374727Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720762, at schemeshard: 72057594046678944 2024-11-21T17:50:32.374918Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720762:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720762 msg type: 269090816 2024-11-21T17:50:32.374941Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720762, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976720762 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720762 at step: 5000007 2024-11-21T17:50:32.375017Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2024-11-21T17:50:32.375044Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:32.375072Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720762 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 17179871338 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:32.375078Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720762:0, step: 5000007, at schemeshard: 72057594046678944 2024-11-21T17:50:32.375097Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720762:0, at schemeshard: 72057594046678944 2024-11-21T17:50:32.375105Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2024-11-21T17:50:32.375109Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2024-11-21T17:50:32.375117Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:32.375125Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:50:32.375130Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 1/1, is published: false 2024-11-21T17:50:32.375136Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2024-11-21T17:50:32.375141Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720762:0 2024-11-21T17:50:32.375144Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720762:0 2024-11-21T17:50:32.375151Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:50:32.375156Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720762, publications: 2, subscribers: 1 2024-11-21T17:50:32.375159Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T17:50:32.375163Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:50:32.375441Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2024-11-21T17:50:32.375674Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:32.375679Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:32.375699Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:32.375712Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:32.375718Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:332:2310], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 1 2024-11-21T17:50:32.375722Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:332:2310], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976720762 2024-11-21T17:50:32.375826Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T17:50:32.375832Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T17:50:32.375836Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720762 2024-11-21T17:50:32.375839Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T17:50:32.375843Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:32.375911Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T17:50:32.375918Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T17:50:32.375921Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720762 2024-11-21T17:50:32.375925Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:50:32.375928Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:50:32.375935Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720762, subscribers: 1 2024-11-21T17:50:32.375940Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:285:2273] 2024-11-21T17:50:32.376460Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2024-11-21T17:50:32.376781Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2024-11-21T17:50:32.376813Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720762 2024-11-21T17:50:32.376826Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720762 2024-11-21T17:50:32.376834Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2024-11-21T17:50:32.376838Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762 2024-11-21T17:50:32.376842Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762, id# 102, itemIdx# 4294967295 2024-11-21T17:50:32.377304Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T17:50:32.377327Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:32.377337Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:697:2641] TestWaitNotification: OK eventTxId 102 >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] Test command err: 2024-11-21T17:50:26.997751Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:26.998008Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:26.999025Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:26.999061Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:26.999242Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:26.999325Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:26.999962Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:26.999989Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:27.000008Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:27.000065Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:27.001043Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:27.001064Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:27.001081Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:27.001101Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:27.016410Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:27.037639Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:27.037780Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:27.039271Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:27.039415Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:27.039494Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:27.039499Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:27.039510Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:27.039515Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:27.039555Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:27.039580Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:27.041515Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:27.062379Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:27.062432Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:27.089680Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:27.089716Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:27.089796Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:27.090104Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... 76405Z node 9 :CMS INFO: Adding lock for Host ::1:12001 (9) (permission test-user-p-1 until 1970-01-01T00:12:00Z) 2024-11-21T17:50:29.076471Z node 9 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:29.076487Z node 9 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:29.076496Z node 9 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:29.076632Z node 9 :CMS INFO: Check request: User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "10" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 } PartialPermissionAllowed: false Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY MaintenanceTaskId: "task-2" 2024-11-21T17:50:29.076637Z node 9 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "10" Duration: 600000000 2024-11-21T17:50:29.076645Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 10, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:29.076665Z node 9 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (9) has planned shutdown (permission test-user-p-1 owned by test-user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2024-11-21T17:50:29.076682Z node 9 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:29.076732Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# test-user-r-2, owner# test-user, order# 2, priority# 0, body# User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "10" Duration: 600000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (9) has planned shutdown (permission test-user-p-1 owned by test-user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:29.087449Z node 9 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:29.087507Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "10" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 } PartialPermissionAllowed: false Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY MaintenanceTaskId: "task-2" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (9) has planned shutdown (permission test-user-p-1 owned by test-user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } RequestId: "test-user-r-2" Deadline: 420130512 } 2024-11-21T17:50:32.275785Z node 25 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:32.276412Z node 25 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:32.278594Z node 25 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:32.278645Z node 25 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:32.278987Z node 25 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:32.279049Z node 25 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:32.279101Z node 25 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:32.279119Z node 25 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:32.279141Z node 25 :CMS DEBUG: Using default config. 2024-11-21T17:50:32.279182Z node 25 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:32.280092Z node 25 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:32.280112Z node 25 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:32.280134Z node 25 :CMS DEBUG: Using default config 2024-11-21T17:50:32.280149Z node 25 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:32.291187Z node 25 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:32.322396Z node 25 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:32.322705Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:32.322737Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:32.322871Z node 25 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:32.322875Z node 25 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:32.322881Z node 25 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:32.322884Z node 25 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:32.322892Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:32.322913Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:32.322922Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:32.323027Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 25 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 26 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 27 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } GroupGeneration: 1 VDiskIdx: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } GroupGeneration: 1 VDiskIdx: 2 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } GroupGeneration: 1 FailRealmIdx: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } GroupGeneration: 1 FailRealmIdx: 1 FailDomainIdx: 1 VDiskIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } GroupGeneration: 1 FailRealmIdx: 1 FailDomainIdx: 1 VDiskIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } GroupGeneration: 1 FailRealmIdx: 2 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } GroupGeneration: 1 FailRealmIdx: 2 FailDomainIdx: 2 VDiskIdx: 1 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } GroupGeneration: 1 FailRealmIdx: 2 FailDomainIdx: 2 VDiskIdx: 2 } Group { GroupGeneration: 1 ErasureSpecies: "mirror-3-dc" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } } } } Success: true 2024-11-21T17:50:32.343974Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:32.344028Z node 25 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:32.366472Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:32.366497Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:32.366514Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:32.366655Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-0-1" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-0-2" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 25 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-1-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-0-1-1-1-1" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-0-1-1-1-2" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 26 InterconnectPort: 12002 Location { DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-2-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-0-1-2-2-1" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-0-1-2-2-2" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 27 InterconnectPort: 12003 Location { DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Timestamp: 120028512 } } 2024-11-21T17:50:32.366699Z node 25 :CMS INFO: Check request: User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "25" Duration: 600000000 } PartialPermissionAllowed: true Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_KEEP_AVAILABLE MaintenanceTaskId: "task-1" 2024-11-21T17:50:32.366705Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "25" Duration: 600000000 2024-11-21T17:50:32.366714Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:32.366751Z node 25 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:32.366754Z node 25 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:32.366756Z node 25 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:32.366759Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:32.366772Z node 25 :CMS DEBUG: Accepting permission: id# test-user-p-1, requestId# test-user-r-1, owner# test-user 2024-11-21T17:50:32.366778Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission test-user-p-1 until 1970-01-01T00:12:00Z) 2024-11-21T17:50:32.366788Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:32.366831Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# test-user-p-1, validity# 1970-01-01T00:12:00.028512Z, action# Type: SHUTDOWN_HOST Host: "25" Duration: 600000000 2024-11-21T17:50:32.366838Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# test-user-r-1, owner# test-user 2024-11-21T17:50:32.417620Z node 25 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:32.469729Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:32.469831Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "25" Duration: 600000000 } PartialPermissionAllowed: true Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_KEEP_AVAILABLE MaintenanceTaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "test-user-r-1" Permissions { Id: "test-user-p-1" Action { Type: SHUTDOWN_HOST Host: "25" Duration: 600000000 } Deadline: 720028512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2024-11-21T17:50:32.469842Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:32:00.028512Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2024-11-21T17:49:09.382898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791093829244653:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:09.383059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0006d2/r3tmp/tmpoDAJHE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21112, node 1 2024-11-21T17:49:09.440612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: FeatureFlags { EnableResourcePools: true } 2024-11-21T17:49:09.440636Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:09.441974Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:09.441984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:09.441985Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:09.442027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:49:09.471403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.472383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:09.472399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.473018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:49:09.473097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:49:09.473108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:49:09.473541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:49:09.473598Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:49:09.473605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:49:09.474053Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.474915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211349519, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:09.474929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:49:09.474983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:49:09.475355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:49:09.475396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:49:09.475406Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:49:09.475415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:49:09.475424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:49:09.475434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:49:09.475810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:49:09.475835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:49:09.475839Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:49:09.475853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:49:09.483063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:09.483093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:09.484590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:09.659748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791093829245567:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:09.659778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:09.688833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.689146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:09.689154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.691988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2024-11-21T17:49:09.748756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211349792, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:09.755003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T17:49:09.757023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791093829245782:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:09.757047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:09.759899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.760059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:09.760070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:49:09.762827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo 2024-11-21T17:49:09.766339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211349813, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:09.767847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 Fast forward 1m partitions 2 Fast forward 1m 2024-11-21T17:49:14.383338Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791093829244653:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:49:14.383372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2024-11-21T17:49:19.805922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:19.806018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:19.916584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:49:19.938151Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2024-11-21T17:49:19.938164Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found partitions 1 2024-11-21T17:49:21.803946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMoveTable Propose, from: /Root/Foo, to: /Root/Bar, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:49:21.804010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:49:21.808491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, subject: , status: StatusAccepted, operation: ALTER TABLE RENAME, dst path: /Root/Foo, dst path: /Root/Bar 2024-11-21T17:49:21.817503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211841864, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:49:21.819573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976710660, done: 0, blocked: 1 2024-11-21T17:49:21.822767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: ... hard 72057594046644480 2024-11-21T17:50:32.495366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2024-11-21T17:50:32.495370Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T17:50:32.495376Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 parts [ [72075186224037890:1:118:1:12288:11328:0] ] return ack processed 2024-11-21T17:50:32.495378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2024-11-21T17:50:32.495381Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T17:50:32.495393Z node 1 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2024-11-21T17:50:32.495402Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037890:1:118:1:12288:11328:0] ] return ack processed 2024-11-21T17:50:32.495409Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T17:50:32.495420Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T17:50:32.495434Z node 1 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2024-11-21T17:50:32.495665Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7439791450311535244:2692], serverId# [1:7439791450311535250:4653], sessionId# [0:0:0] 2024-11-21T17:50:32.495694Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7439791450311535243:2691], serverId# [1:7439791450311535249:4652], sessionId# [0:0:0] 2024-11-21T17:50:32.495740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7439791136778919178 RawX2: 4503603922340144 } TabletId: 72075186224037890 State: 4 2024-11-21T17:50:32.495757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2024-11-21T17:50:32.495810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7439791136778919178 RawX2: 4503603922340144 } TabletId: 72075186224037890 State: 4 2024-11-21T17:50:32.495817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2024-11-21T17:50:32.495918Z node 1 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T17:50:32.495929Z node 1 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T17:50:32.495929Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T17:50:32.495933Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T17:50:32.495962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7439791437426632910 RawX2: 4503603922340449 } TabletId: 72075186224037892 State: 4 2024-11-21T17:50:32.495972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2024-11-21T17:50:32.495993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7439791437426632910 RawX2: 4503603922340449 } TabletId: 72075186224037892 State: 4 2024-11-21T17:50:32.496000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2024-11-21T17:50:32.496010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7439791437426632911 RawX2: 4503603922340450 } TabletId: 72075186224037891 State: 4 2024-11-21T17:50:32.496017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2024-11-21T17:50:32.496028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7439791437426632911 RawX2: 4503603922340450 } TabletId: 72075186224037891 State: 4 2024-11-21T17:50:32.496035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2024-11-21T17:50:32.496176Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2024-11-21T17:50:32.496178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:50:32.496189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:50:32.496190Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2024-11-21T17:50:32.496391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:50:32.496401Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2024-11-21T17:50:32.496408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:50:32.496412Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2024-11-21T17:50:32.496415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:50:32.496418Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2024-11-21T17:50:32.496420Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2024-11-21T17:50:32.496420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:50:32.497438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2024-11-21T17:50:32.497510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2024-11-21T17:50:32.497533Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2024-11-21T17:50:32.497543Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2024-11-21T17:50:32.497556Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2024-11-21T17:50:32.497571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2024-11-21T17:50:32.497587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2024-11-21T17:50:32.497608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2024-11-21T17:50:32.497632Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2024-11-21T17:50:32.497635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2024-11-21T17:50:32.497646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2024-11-21T17:50:32.497650Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2024-11-21T17:50:32.497664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2024-11-21T17:50:32.497688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2024-11-21T17:50:32.497698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T17:50:32.497705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2024-11-21T17:50:32.497716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:50:32.497720Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2024-11-21T17:50:32.497726Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2024-11-21T17:50:32.497922Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2024-11-21T17:50:32.497934Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2024-11-21T17:50:32.497939Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2024-11-21T17:50:32.498050Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2024-11-21T17:50:32.498064Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2024-11-21T17:50:32.498347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2024-11-21T17:50:32.498360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2024-11-21T17:50:32.498495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2024-11-21T17:50:32.498515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2024-11-21T17:50:32.498524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2024-11-21T17:50:32.498528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2024-11-21T17:50:32.498531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2024-11-21T17:50:32.498533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2024-11-21T17:50:32.498537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2024-11-21T17:50:32.498543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2024-11-21T17:50:32.711130Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791452533045355:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:32.711150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013c0/r3tmp/tmpCn5PSz/pdisk_1.dat 2024-11-21T17:50:32.759977Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:32.812444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:32.812472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:32.813522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:32.836452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:32.838295Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:50:33.010706Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791457743144431:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:33.010735Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013c0/r3tmp/tmpu4XS80/pdisk_1.dat 2024-11-21T17:50:33.021002Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:33.110904Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:33.110939Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:33.112039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:33.113230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> TAccessServiceTest::Authenticate >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap >> TCmsTest::PriorityRange [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> TAccessServiceTest::Authenticate [GOOD] >> FolderServiceTest::TFolderService [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEviction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:03.119668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:03.119685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.119688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:03.119691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:03.119694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:03.119697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:03.119709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.119787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:03.128398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:03.128414Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.130487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:03.130576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:03.130600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:03.133062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:03.133120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:03.133204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.133335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.133869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.134091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.134099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.134109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:03.134115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.134119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:03.134150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:03.135275Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.146348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.146412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.146455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:03.146483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:03.146488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.147086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.147116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:03.147149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.147157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:03.147159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:03.147162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:03.147534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.147543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:03.147546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:03.147842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.147848Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.147853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.147858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.148384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:03.148746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:03.148786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:03.148927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.148949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.148953Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.148990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:03.148998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.149016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.149024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.149493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.149508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.149550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.149555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:03.149630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.149638Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:03.149648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:03.149652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.149658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:03.149662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.149666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:03.149669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:03.149681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.149686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:03.149689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... PathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409547 2024-11-21T17:50:32.800332Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:32.800344Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:50:32.800356Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:32.800472Z node 106 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:32.800485Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:32.800490Z node 106 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:50:32.800494Z node 106 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T17:50:32.800498Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:32.800511Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:50:32.801031Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T17:50:32.801041Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:50:32.801046Z node 106 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T17:50:32.801109Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T17:50:32.801131Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2024-11-21T17:50:32.801462Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:32.801480Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 455266535530 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:32.801488Z node 106 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2024-11-21T17:50:32.801505Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T17:50:32.801512Z node 106 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T17:50:32.801516Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:50:32.801524Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:32.801532Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:32.801537Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2024-11-21T17:50:32.801543Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:50:32.801547Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T17:50:32.801550Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T17:50:32.801558Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:32.801565Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2024-11-21T17:50:32.801569Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:50:32.801572Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:50:32.801778Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:32.801827Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:50:32.801837Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:50:32.802071Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:50:32.802204Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:32.802591Z node 106 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:32.802603Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:32.802632Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:32.802652Z node 106 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:32.802657Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [106:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2024-11-21T17:50:32.802661Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [106:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T17:50:32.802809Z node 106 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:32.802821Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:32.802826Z node 106 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:50:32.802830Z node 106 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:50:32.802834Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:32.802949Z node 106 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:32.802959Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:32.802966Z node 106 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:50:32.802970Z node 106 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:50:32.802974Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:32.802985Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2024-11-21T17:50:32.802989Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [106:120:2146] 2024-11-21T17:50:32.803015Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:32.803019Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:50:32.803028Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:32.803611Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:32.803719Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:32.803733Z node 106 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T17:50:32.803742Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2024-11-21T17:50:32.803800Z node 106 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T17:50:32.804113Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:50:32.804120Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:50:32.804168Z node 106 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:50:32.804181Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:50:32.804185Z node 106 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [106:715:2675] TestWaitNotification: OK eventTxId 1003 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::PriorityRange [GOOD] Test command err: 2024-11-21T17:50:26.821657Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:26.824256Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:26.824350Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:26.824700Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:26.824889Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:26.825055Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:26.826875Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:26.826904Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:26.826945Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:26.826969Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:26.827402Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:26.827426Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:26.827449Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:26.827500Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:26.846563Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:26.857462Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:26.857532Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:26.858356Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:26.858413Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:26.858417Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:26.858421Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:26.858425Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:26.858443Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:26.858459Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:26.859490Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:26.869721Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:26.901412Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:26.901460Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:26.925618Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:26.925716Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:26.925920Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-1-1" State: DOWN Timestamp: 120027000 } Timestamp: 120027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027000 ... 024-11-21T17:50:30.728805Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:30.728839Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:30.750190Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2024-11-21T17:50:30.750210Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2024-11-21T17:50:30.750214Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2024-11-21T17:50:30.750217Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2024-11-21T17:50:30.750220Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2024-11-21T17:50:30.750223Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2024-11-21T17:50:30.750226Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2024-11-21T17:50:30.750229Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2024-11-21T17:50:30.750293Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:30.750331Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:15Z 2024-11-21T17:50:30.750380Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:30.750388Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.750399Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:30.750408Z node 17 :CMS DEBUG: Result: DISALLOW (reason: Affected group 0 has no parity parts) 2024-11-21T17:50:30.750435Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW Reason: "Affected group 0 has no parity parts" } } 2024-11-21T17:50:32.365600Z node 25 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:32.366704Z node 25 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:32.368727Z node 25 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:32.368798Z node 25 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:32.369115Z node 25 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:32.369136Z node 25 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:32.369207Z node 25 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:32.369241Z node 25 :CMS DEBUG: Using default config. 2024-11-21T17:50:32.369301Z node 25 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:32.369313Z node 25 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:32.370736Z node 25 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:32.370756Z node 25 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:32.370793Z node 25 :CMS DEBUG: Using default config 2024-11-21T17:50:32.370814Z node 25 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:32.382235Z node 25 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:32.414111Z node 25 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:32.414190Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:32.414215Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:32.414280Z node 25 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:32.414286Z node 25 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:32.414295Z node 25 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:32.414299Z node 25 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:32.414344Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:32.414357Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:32.414380Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:32.414646Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 25 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 26 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 27 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 28 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 29 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 30 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 31 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 32 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:32.446476Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:32.446525Z node 25 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:32.446676Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -101 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: WRONG_REQUEST Reason: "Priority value is out of range" } } 2024-11-21T17:50:32.446728Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 101 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: WRONG_REQUEST Reason: "Priority value is out of range" } } >> TAccessServiceTest::PassRequestId >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2024-11-21T17:50:32.913142Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791451869829430:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:32.913192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013b1/r3tmp/tmpJF4PHV/pdisk_1.dat 2024-11-21T17:50:32.963284Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:32.980194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:32.982070Z node 1 :GRPC_CLIENT DEBUG: [563c3f2816d0] Connect to grpc://localhost:65528 2024-11-21T17:50:32.983439Z node 1 :GRPC_CLIENT DEBUG: [563c3f2816d0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T17:50:32.985027Z node 1 :GRPC_CLIENT DEBUG: [563c3f2816d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:65528: Failed to connect to remote host: Connection refused 2024-11-21T17:50:32.985386Z node 1 :GRPC_CLIENT DEBUG: [563c3f2816d0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T17:50:32.985578Z node 1 :GRPC_CLIENT DEBUG: [563c3f2816d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:65528: Failed to connect to remote host: Connection refused 2024-11-21T17:50:33.015004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:33.015037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:33.016118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:33.985733Z node 1 :GRPC_CLIENT DEBUG: [563c3f2816d0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T17:50:33.986377Z node 1 :GRPC_CLIENT DEBUG: [563c3f2816d0] Status 5 Not Found 2024-11-21T17:50:33.986437Z node 1 :GRPC_CLIENT DEBUG: [563c3f2816d0] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2024-11-21T17:50:33.986921Z node 1 :GRPC_CLIENT DEBUG: [563c3f2816d0] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2024-11-21T17:50:33.754358Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791456714506111:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:33.754558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00139d/r3tmp/tmpC1cQxf/pdisk_1.dat 2024-11-21T17:50:33.801263Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:33.855446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:33.855477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:33.856551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:33.882548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:33.886566Z node 1 :GRPC_CLIENT DEBUG: [78eff0816d0] Connect to grpc://localhost:4068 2024-11-21T17:50:33.886948Z node 1 :GRPC_CLIENT DEBUG: [78eff0816d0] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2024-11-21T17:50:33.888695Z node 1 :GRPC_CLIENT DEBUG: [78eff0816d0] Status 7 Permission Denied 2024-11-21T17:50:33.888840Z node 1 :GRPC_CLIENT DEBUG: [78eff0816d0] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2024-11-21T17:50:33.889229Z node 1 :GRPC_CLIENT DEBUG: [78eff0816d0] Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> FolderServiceTest::TFolderServiceAdapter >> BsControllerConfig::ReassignGroupDisk >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCmsTest::RestartNodeInDownState >> TAccessServiceTest::PassRequestId [GOOD] >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TCmsTest::TestLogOperationsRollback [GOOD] |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> FolderServiceTest::TFolderServiceTransitional [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] Test command err: 2024-11-21T17:50:22.090932Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:22.091965Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:22.092011Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:22.092342Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:22.092372Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:22.093229Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:22.094065Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:22.094104Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:22.094146Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:22.094168Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:22.094444Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:22.094455Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:22.094474Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:22.094515Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:22.113430Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:22.124310Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:22.124436Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:22.125659Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:22.125767Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:22.125773Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:22.125782Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:22.125786Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:22.125794Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:22.125821Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:22.129135Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 36 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 37 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 38 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 39 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 40 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 41 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 42 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 43 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 44 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 45 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 46 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 47 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 48 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 49 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 50 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 51 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 52 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 53 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 54 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 55 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 56 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 57 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 58 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 59 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 60 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 61 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 62 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 63 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 64 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 65 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 66 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 67 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 68 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 69 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 70 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 71 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 72 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 73 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 74 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 75 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 76 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 77 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 78 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 79 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 80 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 81 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 82 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 83 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 84 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 85 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 86 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 87 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 88 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 89 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 90 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 91 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 92 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 93 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 94 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 95 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 96 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 97 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 98 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 99 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: ... Id: Wall-E-r-3 Owner: Permissions: [] HasSingleCompositeActionGroup: 0 } }, response# NKikimr::NCms::TEvCms::TEvWalleTaskStored { TaskId: task-2 } 2024-11-21T17:50:33.074176Z node 49 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCreateTaskRequest { TaskId: "task-2" Type: "automated" Issuer: "UT" Action: "reboot" Hosts: "50" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvWalleCreateTaskResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'50\': node state: \'Locked\'" } TaskId: "task-2" Hosts: "50" } 2024-11-21T17:50:33.074254Z node 49 :CMS INFO: User user is done with permissions user-p-2 2024-11-21T17:50:33.074262Z node 49 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:33.074271Z node 49 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:33.074289Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2024-11-21T17:50:33.085041Z node 49 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:33.085110Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:33.085230Z node 49 :CMS INFO: Processing Wall-E request: TaskId: "task-1" 2024-11-21T17:50:33.096528Z node 49 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:33.096560Z node 49 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:33.096573Z node 49 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:33.096673Z node 49 :CMS INFO: Check request: User: "Wall-E" Actions { Type: SHUTDOWN_HOST Host: "51" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (50) has planned shutdown (permission user-p-2 owned by user). Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 50 2024-11-21T17:50:33.096681Z node 49 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "51" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (50) has planned shutdown (permission user-p-2 owned by user). Down: " } 2024-11-21T17:50:33.096688Z node 49 :CMS DEBUG: [Nodes Counter] Checking Node: 51, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:33.096708Z node 49 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (50) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: ) 2024-11-21T17:50:33.096723Z node 49 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:33.096757Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# Wall-E-r-2, owner# Wall-E, order# 2, priority# 50, body# User: "Wall-E" Actions { Type: SHUTDOWN_HOST Host: "51" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (50) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 50 2024-11-21T17:50:33.107697Z node 49 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:33.107773Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-2" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (50) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } RequestId: "Wall-E-r-2" Deadline: 420741608 } 2024-11-21T17:50:33.107827Z node 49 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (50) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } Task { TaskId: "task-1" Hosts: "51" } } 2024-11-21T17:50:33.107942Z node 49 :CMS INFO: Processing Wall-E request: TaskId: "task-2" 2024-11-21T17:50:33.119407Z node 49 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:33.119438Z node 49 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:33.119452Z node 49 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:33.119539Z node 49 :CMS INFO: Check request: User: "Wall-E" Actions { Type: REBOOT_HOST Host: "50" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'50\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2024-11-21T17:50:33.119545Z node 49 :CMS DEBUG: Checking action: Type: REBOOT_HOST Host: "50" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'50\': node state: \'Locked\'" } 2024-11-21T17:50:33.119552Z node 49 :CMS DEBUG: [Nodes Counter] Checking Node: 50, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:33.119574Z node 49 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:33.119587Z node 49 :CMS DEBUG: Accepting permission: id# Wall-E-p-3, requestId# Wall-E-r-3, owner# Wall-E 2024-11-21T17:50:33.119593Z node 49 :CMS INFO: Adding lock for Host ::1:12002 (50) (permission Wall-E-p-3 until 586524-01-19T08:01:49Z) 2024-11-21T17:50:33.119604Z node 49 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:33.119641Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# Wall-E-p-3, validity# 586524-01-19T08:01:49.551615Z, action# Type: REBOOT_HOST Host: "50" Duration: 18446744073709551615 2024-11-21T17:50:33.119651Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# Wall-E-r-3, owner# Wall-E 2024-11-21T17:50:33.130533Z node 49 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:33.130616Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-3" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "Wall-E-p-3" Action { Type: REBOOT_HOST Host: "50" Duration: 18446744073709551615 } Deadline: 18446744073709551615 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 50 InterconnectPort: 12002 } } } } 2024-11-21T17:50:33.130661Z node 49 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-2" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: ALLOW } Task { TaskId: "task-2" Hosts: "50" } } 2024-11-21T17:50:33.130792Z node 49 :CMS INFO: Processing Wall-E request: TaskId: "task-2" 2024-11-21T17:50:33.130815Z node 49 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:33.130847Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# Wall-E-p-3, reason# explicit remove 2024-11-21T17:50:33.141746Z node 49 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:33.141781Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvRemoveWalleTask { TaskId: task-2 }, response# NKikimr::NCms::TEvCms::TEvWalleTaskRemoved { TaskId: task-2 } 2024-11-21T17:50:33.141791Z node 49 :CMS DEBUG: Found empty task task-2 2024-11-21T17:50:33.141852Z node 49 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleRemoveTaskRequest { TaskId: "task-2" }, response# NKikimr::NCms::TEvCms::TEvWalleRemoveTaskResponse { Status { Code: OK } } 2024-11-21T17:50:33.141872Z node 49 :CMS DEBUG: TTxRemoveTask Execute 2024-11-21T17:50:33.141894Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Remove task: id# task-2 2024-11-21T17:50:33.142040Z node 49 :CMS INFO: Processing Wall-E request: TaskId: "task-1" 2024-11-21T17:50:33.152899Z node 49 :CMS DEBUG: TTxRemoveTask Complete 2024-11-21T17:50:33.164631Z node 49 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:33.164675Z node 49 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:33.164694Z node 49 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:33.164861Z node 49 :CMS INFO: Check request: User: "Wall-E" Actions { Type: SHUTDOWN_HOST Host: "51" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (50) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 50 2024-11-21T17:50:33.164876Z node 49 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "51" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (50) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } 2024-11-21T17:50:33.164885Z node 49 :CMS DEBUG: [Nodes Counter] Checking Node: 51, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:33.164922Z node 49 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:33.164941Z node 49 :CMS DEBUG: Accepting permission: id# Wall-E-p-4, requestId# Wall-E-r-2, owner# Wall-E 2024-11-21T17:50:33.164949Z node 49 :CMS INFO: Adding lock for Host ::1:12003 (51) (permission Wall-E-p-4 until 586524-01-19T08:01:49Z) 2024-11-21T17:50:33.164960Z node 49 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:33.165001Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# Wall-E-p-4, validity# 586524-01-19T08:01:49.551615Z, action# Type: SHUTDOWN_HOST Host: "51" Duration: 18446744073709551615 2024-11-21T17:50:33.165011Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# Wall-E-r-2, owner# Wall-E 2024-11-21T17:50:33.176002Z node 49 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:33.176094Z node 49 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-2" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "Wall-E-p-4" Action { Type: SHUTDOWN_HOST Host: "51" Duration: 18446744073709551615 } Deadline: 18446744073709551615 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 51 InterconnectPort: 12003 } } } } 2024-11-21T17:50:33.176141Z node 49 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: ALLOW } Task { TaskId: "task-1" Hosts: "51" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2024-11-21T17:50:34.353526Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791462650485451:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:34.353686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001344/r3tmp/tmpeSN2MA/pdisk_1.dat 2024-11-21T17:50:34.395734Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:34.421059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:34.424970Z node 1 :GRPC_CLIENT DEBUG: [16507f0816d0]{trololo} Connect to grpc://localhost:16962 2024-11-21T17:50:34.425322Z node 1 :GRPC_CLIENT DEBUG: [16507f0816d0]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2024-11-21T17:50:34.427234Z node 1 :GRPC_CLIENT DEBUG: [16507f0816d0]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } 2024-11-21T17:50:34.454390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:34.454423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:34.455455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] Test command err: 2024-11-21T17:50:27.493868Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:27.494128Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:27.495319Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:27.495359Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:27.495571Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:27.495666Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:27.496373Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:27.496406Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:27.496430Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:27.496488Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:27.497624Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:27.497647Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:27.497665Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:27.497689Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:27.513825Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:27.534933Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:27.535050Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:27.535861Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:27.535954Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:27.536008Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:27.536011Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:27.536017Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:27.536020Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:27.536047Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:27.536067Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:27.537135Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:27.558091Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:27.558142Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:27.583044Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:27.583073Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:27.583124Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2024-11-21T17:50:27.583329Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 300029512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 300029512 } Timestamp: 300029512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 300029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 3000 ... ion { Type: RESTART_SERVICES Host: "13" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 13 InterconnectPort: 12004 } } } Permissions { Action { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 21 InterconnectPort: 12012 } } } Permissions { Action { Type: RESTART_SERVICES Host: "29" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 29 InterconnectPort: 12020 } } } } 2024-11-21T17:50:30.288200Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "14" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:30.288205Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "14" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.288209Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 14, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:30.288250Z node 10 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.288259Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.288262Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 22, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:30.288285Z node 10 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.288292Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.288297Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 30, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 2, down nodes: 0 2024-11-21T17:50:30.288321Z node 10 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.288358Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "14" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "14" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 14 InterconnectPort: 12005 } } } Permissions { Action { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 22 InterconnectPort: 12013 } } } Permissions { Action { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 30 InterconnectPort: 12021 } } } } 2024-11-21T17:50:30.288382Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "15" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:30.288387Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "15" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.288390Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 15, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:30.288414Z node 10 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.288421Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.288425Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 23, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:30.288449Z node 10 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.288456Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.288460Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 31, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 2, down nodes: 0 2024-11-21T17:50:30.288484Z node 10 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.288520Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "15" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "15" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 15 InterconnectPort: 12006 } } } Permissions { Action { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 23 InterconnectPort: 12014 } } } Permissions { Action { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 31 InterconnectPort: 12022 } } } } 2024-11-21T17:50:30.288544Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "16" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:30.288550Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "16" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.288554Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 16, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:30.288580Z node 10 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.288587Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.288591Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:30.288614Z node 10 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.288620Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.288624Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 32, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 2, down nodes: 0 2024-11-21T17:50:30.288646Z node 10 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.288682Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "16" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "16" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 16 InterconnectPort: 12007 } } } Permissions { Action { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 24 InterconnectPort: 12015 } } } Permissions { Action { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 32 InterconnectPort: 12023 } } } } 2024-11-21T17:50:30.288706Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:30.288712Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.288715Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:30.288740Z node 10 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.288748Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.288752Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:30.288776Z node 10 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.288783Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 2024-11-21T17:50:30.288786Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 33, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 2, down nodes: 0 2024-11-21T17:50:30.288807Z node 10 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:30.288843Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12008 } } } Permissions { Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12016 } } } Permissions { Action { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 33 InterconnectPort: 12024 } } } } >> TCmsTest::SamePriorityRequest2 [GOOD] >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesNoUser >> BsControllerConfig::Basic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2024-11-21T17:50:34.591427Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791459462429882:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:34.591822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00130e/r3tmp/tmpA0yU8z/pdisk_1.dat 2024-11-21T17:50:34.649094Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13410 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:34.673413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:34.677596Z node 1 :GRPC_CLIENT DEBUG: [4917f083650] Connect to grpc://localhost:29557 2024-11-21T17:50:34.677704Z node 1 :GRPC_CLIENT DEBUG: [4917f083650] Request ListFoldersRequest { id: "i_am_exists" } 2024-11-21T17:50:34.679449Z node 1 :GRPC_CLIENT DEBUG: [4917f083650] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2024-11-21T17:50:34.679650Z node 1 :GRPC_CLIENT DEBUG: [4917f083f50] Connect to grpc://localhost:4722 2024-11-21T17:50:34.679715Z node 1 :GRPC_CLIENT DEBUG: [4917f083f50] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2024-11-21T17:50:34.680916Z node 1 :GRPC_CLIENT DEBUG: [4917f083f50] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2024-11-21T17:50:34.681022Z node 1 :GRPC_CLIENT DEBUG: [4917f083f50] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T17:50:34.681475Z node 1 :GRPC_CLIENT DEBUG: [4917f083f50] Status 5 Not Found 2024-11-21T17:50:34.681561Z node 1 :GRPC_CLIENT DEBUG: [4917f083650] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T17:50:34.681963Z node 1 :GRPC_CLIENT DEBUG: [4917f083650] Status 5 Not Found 2024-11-21T17:50:34.692784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:34.692806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:34.693867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected |81.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] Test command err: 2024-11-21T17:50:29.576474Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:29.576807Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:29.578330Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:29.578395Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:29.578680Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:29.578825Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:29.579890Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:29.579931Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:29.579959Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:29.580027Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:29.581582Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:29.581614Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:29.581641Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:29.581667Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:29.600134Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:29.621374Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:29.621507Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:29.622717Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:29.622827Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:29.622886Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:29.622889Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:29.622897Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:29.622900Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:29.622935Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:29.622955Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:29.624353Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:29.645459Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:29.645526Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:29.672819Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:29.672853Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:29.672932Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:29.673167Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:33.346839Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:33.346846Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:33.346875Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.028512Z, action# Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2024-11-21T17:50:33.346892Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:33.387543Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:33.439729Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:33.439813Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Deadline: 180028512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2024-11-21T17:50:33.439822Z node 17 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.028512Z 2024-11-21T17:50:33.451009Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:33.451084Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:33.451100Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:33.451110Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:33.451199Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:33.451204Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2024-11-21T17:50:33.451211Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:33.451230Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2024-11-21T17:50:33.451244Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:33.451298Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:33.462113Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:33.462186Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } RequestId: "user-r-2" Deadline: 420130512 } 2024-11-21T17:50:33.462292Z node 17 :CMS INFO: User user is done with permissions user-p-1 2024-11-21T17:50:33.462300Z node 17 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:33.462316Z node 17 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:33.462347Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2024-11-21T17:50:33.473240Z node 17 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:33.473304Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:33.484524Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:33.484563Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:33.484580Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:33.484690Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:33.484699Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2024-11-21T17:50:33.484707Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:33.484731Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:33.484747Z node 17 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2024-11-21T17:50:33.484752Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:33.484759Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:33.484793Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.233536Z, action# Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2024-11-21T17:50:33.484802Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, owner# user 2024-11-21T17:50:33.495641Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:33.495723Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180233536 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2024-11-21T17:50:33.495836Z node 17 :CMS INFO: User user is done with permissions user-p-2 2024-11-21T17:50:33.495844Z node 17 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:33.495856Z node 17 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:33.495878Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2024-11-21T17:50:33.506747Z node 17 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:33.506802Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:33.517996Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:33.518030Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:33.518043Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:33.518157Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:33.518168Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2024-11-21T17:50:33.518177Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:33.518211Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:33.518232Z node 17 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2024-11-21T17:50:33.518238Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-3 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:33.518248Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:33.518288Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.336560Z, action# Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2024-11-21T17:50:33.518298Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2024-11-21T17:50:33.529242Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:33.529341Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180336560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } >> TCmsTest::StateStorageRollingRestart [GOOD] >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2024-11-21T17:50:32.774801Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791454487702663:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:32.774855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0013e2/r3tmp/tmp65Wbyc/pdisk_1.dat 2024-11-21T17:50:32.822358Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21884 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:32.875829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:32.875860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:32.876966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:32.902378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:32.904914Z node 1 :GRPC_CLIENT DEBUG: [4751ff0816d0] Connect to grpc://localhost:17915 2024-11-21T17:50:32.906874Z node 1 :GRPC_CLIENT DEBUG: [4751ff0816d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T17:50:32.908454Z node 1 :GRPC_CLIENT DEBUG: [4751ff0816d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17915: Failed to connect to remote host: Connection refused 2024-11-21T17:50:32.908949Z node 1 :GRPC_CLIENT DEBUG: [4751ff0816d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T17:50:32.909147Z node 1 :GRPC_CLIENT DEBUG: [4751ff0816d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17915: Failed to connect to remote host: Connection refused 2024-11-21T17:50:33.909366Z node 1 :GRPC_CLIENT DEBUG: [4751ff0816d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T17:50:33.909599Z node 1 :GRPC_CLIENT DEBUG: [4751ff0816d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17915: Failed to connect to remote host: Connection refused 2024-11-21T17:50:34.909860Z node 1 :GRPC_CLIENT DEBUG: [4751ff0816d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T17:50:34.910899Z node 1 :GRPC_CLIENT DEBUG: [4751ff0816d0] Status 5 Not Found 2024-11-21T17:50:34.911043Z node 1 :GRPC_CLIENT DEBUG: [4751ff0816d0] Request ListFoldersRequest { id: "i_am_exists" } 2024-11-21T17:50:34.911808Z node 1 :GRPC_CLIENT DEBUG: [4751ff0816d0] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } |81.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> BsControllerConfig::PDiskCreate |81.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> TServiceAccountServiceTest::Get [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageRollingRestart [GOOD] Test command err: 2024-11-21T17:50:27.445182Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:27.445941Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:27.447160Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:27.447198Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:27.447427Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:27.447492Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:27.447799Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:27.447930Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:27.447955Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:27.448011Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:27.448732Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:27.448751Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:27.448766Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:27.448788Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:27.464027Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:27.485012Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:27.485174Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:27.486084Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:27.486186Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:27.486191Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:27.486197Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:27.486200Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:27.486220Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:27.486248Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:27.486269Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:27.487310Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2024-11-21T17:50:27.508414Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:27.508478Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:27.533727Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:27.533760Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:27.533820Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2024-11-21T17:50:27.533915Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Timestamp: 300027512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Timestamp: 300027512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Timestamp: 300027512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Timestamp: 300027512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Timestamp: 300027512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Timestamp: 300027512 } } 2024-11-21T17:50:27.533949Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:27.533956Z node 1 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 2024-11-21T17:50:27.533967Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:27.533975Z node 1 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:27.533978Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:27.533982Z node 1 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:27.533997Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:27.534003Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:27.534011Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:27.534044Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:06:00.027512Z, action# Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 2024-11-21T17:50:27.584928Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:27.637015Z node 1 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:27.637097Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } Deadline: 360027512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2024-11-21T17:50:27.637107Z node 1 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:08:00.027512Z 2024-11-21T17:50:27.648056Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:27.648108Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:27.648128Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:27.648141Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2024-11-21T17:50:27.648193Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:27.648200Z node 1 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 2024-11-21T17:50:27.648210Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:27.648220Z node 1 :CMS DEBUG: Ring: 0; State: Restart 2024-11-21T17:50:27.648223Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:27.648243Z node 1 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:27.648259Z node 1 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2024-11-21T17:50:27.648264Z node 1 :CMS INFO: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:06:00Z) 2024-11-21T17:50:27.648272Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:27.648314Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:06:00.129512Z, action# Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 2024-11-21T17:50:27.659131Z node 1 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:27.659226Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 } Deadline: 360129512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 2 InterconnectPort: 12002 } } } } 2024-11-21T17:50:28.701195Z node 6 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:28.702630Z node 6 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:28.703011Z node 6 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:28.703059Z node 6 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:28.703390Z node 6 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:28.703402Z node 6 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:28.704658Z node 6 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:28.704713Z node 6 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:28.704731Z node 6 :CMS DEBUG: Using default config. 2024-11-21T17:50:28.704799Z node 6 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:28.704847Z node 6 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:28.704870Z node 6 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:28.704894Z node 6 :CMS DEBUG: Using default config 2024-11-21T17:50:28.704910Z node 6 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:28.717716Z node 6 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:28.738837Z node 6 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:28.738959Z node 6 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:28.738975Z node 6 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:28.739061Z node 6 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:28.739066Z node 6 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:28.739073Z node 6 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:28.739076Z node 6 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:28.739087Z node 6 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:28.739110Z node 6 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:28.739120Z node 6 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:28.739179Z node 6 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2024-11-21T17:50:28.760203Z node 6 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:28.760275Z node 6 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } Se ... 09Z node 16 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:31.462842Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-15, validity# 1970-01-01T00:02:00.457704Z, action# Type: RESTART_SERVICES Host: "28" Services: "storage" Duration: 0 2024-11-21T17:50:31.462852Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-16, validity# 1970-01-01T00:02:00.457704Z, action# Type: RESTART_SERVICES Host: "29" Services: "storage" Duration: 0 2024-11-21T17:50:31.462860Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-17, validity# 1970-01-01T00:02:00.457704Z, action# Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 0 2024-11-21T17:50:31.473773Z node 16 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:31.473905Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "28" Services: "storage" Duration: 0 } Actions { Type: RESTART_SERVICES Host: "29" Services: "storage" Duration: 0 } Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 0 } Actions { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 0 } Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 0 } Actions { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 0 } PartialPermissionAllowed: true Schedule: false DryRun: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-5" Permissions { Id: "user-p-15" Action { Type: RESTART_SERVICES Host: "28" Services: "storage" Duration: 0 } Deadline: 120457704 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 28 InterconnectPort: 12013 } } } Permissions { Id: "user-p-16" Action { Type: RESTART_SERVICES Host: "29" Services: "storage" Duration: 0 } Deadline: 120457704 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 29 InterconnectPort: 12014 } } } Permissions { Id: "user-p-17" Action { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 0 } Deadline: 120457704 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 30 InterconnectPort: 12015 } } } } 2024-11-21T17:50:31.474098Z node 16 :CMS INFO: User user is done with permissions user-p-15 2024-11-21T17:50:31.474108Z node 16 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:31.474122Z node 16 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:31.474149Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-15, reason# explicit remove 2024-11-21T17:50:31.484976Z node 16 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:31.485052Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-15" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:31.485212Z node 16 :CMS INFO: User user is done with permissions user-p-16 2024-11-21T17:50:31.485221Z node 16 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:31.485235Z node 16 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:31.485271Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-16, reason# explicit remove 2024-11-21T17:50:31.496074Z node 16 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:31.496159Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-16" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:31.496329Z node 16 :CMS INFO: User user is done with permissions user-p-17 2024-11-21T17:50:31.496340Z node 16 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:31.496350Z node 16 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:31.496377Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-17, reason# explicit remove 2024-11-21T17:50:31.507187Z node 16 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:31.507235Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-17" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:31.520018Z node 16 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:31.520061Z node 16 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:31.520079Z node 16 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:31.520147Z node 16 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 0 } Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 0 } Actions { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 0 } PartialPermissionAllowed: true Schedule: false DryRun: false 2024-11-21T17:50:31.520156Z node 16 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 0 2024-11-21T17:50:31.520172Z node 16 :CMS DEBUG: [Nodes Counter] Checking Node: 31, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:31.520182Z node 16 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:31.520186Z node 16 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:31.520189Z node 16 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:31.520192Z node 16 :CMS DEBUG: Ring: 3; State: Ok 2024-11-21T17:50:31.520194Z node 16 :CMS DEBUG: Ring: 4; State: Ok 2024-11-21T17:50:31.520197Z node 16 :CMS DEBUG: Ring: 5; State: Ok 2024-11-21T17:50:31.520201Z node 16 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:31.520216Z node 16 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 0 2024-11-21T17:50:31.520220Z node 16 :CMS DEBUG: [Nodes Counter] Checking Node: 32, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:31.520224Z node 16 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:31.520251Z node 16 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:31.520254Z node 16 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:31.520257Z node 16 :CMS DEBUG: Ring: 3; State: Ok 2024-11-21T17:50:31.520260Z node 16 :CMS DEBUG: Ring: 4; State: Ok 2024-11-21T17:50:31.520265Z node 16 :CMS DEBUG: Ring: 5; State: Restart 2024-11-21T17:50:31.520269Z node 16 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:31.520277Z node 16 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 0 2024-11-21T17:50:31.520281Z node 16 :CMS DEBUG: [Nodes Counter] Checking Node: 33, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2024-11-21T17:50:31.520285Z node 16 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:31.520288Z node 16 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:31.520291Z node 16 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:31.520294Z node 16 :CMS DEBUG: Ring: 3; State: Ok 2024-11-21T17:50:31.520297Z node 16 :CMS DEBUG: Ring: 4; State: Ok 2024-11-21T17:50:31.520302Z node 16 :CMS DEBUG: Ring: 5; State: Restart 2024-11-21T17:50:31.520305Z node 16 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:31.520318Z node 16 :CMS DEBUG: Accepting permission: id# user-p-18, requestId# user-r-6, owner# user 2024-11-21T17:50:31.520328Z node 16 :CMS DEBUG: Accepting permission: id# user-p-19, requestId# user-r-6, owner# user 2024-11-21T17:50:31.520332Z node 16 :CMS DEBUG: Accepting permission: id# user-p-20, requestId# user-r-6, owner# user 2024-11-21T17:50:31.520340Z node 16 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:31.520384Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-18, validity# 1970-01-01T00:02:00.563752Z, action# Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 0 2024-11-21T17:50:31.520395Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-19, validity# 1970-01-01T00:02:00.563752Z, action# Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 0 2024-11-21T17:50:31.520403Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-20, validity# 1970-01-01T00:02:00.563752Z, action# Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 0 2024-11-21T17:50:31.531379Z node 16 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:31.531503Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 0 } Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 0 } Actions { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 0 } PartialPermissionAllowed: true Schedule: false DryRun: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-6" Permissions { Id: "user-p-18" Action { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 0 } Deadline: 120563752 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 31 InterconnectPort: 12016 } } } Permissions { Id: "user-p-19" Action { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 0 } Deadline: 120563752 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 32 InterconnectPort: 12017 } } } Permissions { Id: "user-p-20" Action { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 0 } Deadline: 120563752 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 33 InterconnectPort: 12018 } } } } 2024-11-21T17:50:31.531679Z node 16 :CMS INFO: User user is done with permissions user-p-18 2024-11-21T17:50:31.531692Z node 16 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:31.531706Z node 16 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:31.531733Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-18, reason# explicit remove 2024-11-21T17:50:31.542736Z node 16 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:31.542813Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-18" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:31.542972Z node 16 :CMS INFO: User user is done with permissions user-p-19 2024-11-21T17:50:31.542985Z node 16 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:31.543000Z node 16 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:31.543028Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-19, reason# explicit remove 2024-11-21T17:50:31.553930Z node 16 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:31.553991Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-19" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:31.554171Z node 16 :CMS INFO: User user is done with permissions user-p-20 2024-11-21T17:50:31.554180Z node 16 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:31.554190Z node 16 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:31.554214Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-20, reason# explicit remove 2024-11-21T17:50:31.564992Z node 16 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:31.565050Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-20" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } >> BsControllerConfig::SelectAllGroups >> TCmsTest::ActionIssue [GOOD] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TCmsTest::VDisksEviction [GOOD] >> TPQCachingProxyTest::TestPublishAndForget >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssue [GOOD] Test command err: 2024-11-21T17:50:26.411503Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:26.411782Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:26.413022Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:26.413068Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:26.413349Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:26.413472Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:26.414537Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:26.414584Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:26.414614Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:26.414694Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:26.416278Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:26.416324Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:26.416354Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:26.416386Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:26.433035Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:26.454070Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:26.454150Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:26.455034Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:26.455133Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:26.455193Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:26.455196Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:26.455203Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:26.455206Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:26.455234Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:26.455255Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:26.456128Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2024-11-21T17:50:26.477064Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:26.477113Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:26.477352Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2024-11-21T17:50:26.477390Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:26.502085Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:26.502161Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:26.502258Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029512 } Timestamp: 120029512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029512 } Timestamp: 120029512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029512 } Timestamp: 120029512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029512 } Timestamp: 120029512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029512 } Timestamp: 120029512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029512 } Timestamp: 120029512 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029512 } Timestamp: 120029512 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120029512 } Timestamp: 120029512 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120029512 } } 2024-11-21T17:50:26.552992Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:26.594039Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:26.594098Z node 1 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 10 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2024-11-21T17:50:26.594115Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:26.615848Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:26.615880Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:26.615892Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:26.615930Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:26.615935Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2024-11-21T17:50:26.615944Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:26.615948Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 10, locked nodes: 0, down nodes: 0 2024-11-21T17:50:26.615955Z node 1 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:26.615958Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:26.615960Z node 1 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:26.615964Z node 1 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:26.615981Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:26.615988Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:26.615997Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:26.616023Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.130512Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2024-11-21T17:50:26.626950Z node 1 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:26.627063Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180130512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2024-11-21T17:50:26.627075Z node 1 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.130512Z 2024-11-21T17:50:26.637957Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:26.637997Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:26.638013Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:26.638025Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:26.638063Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:26.638068Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2024-11-21T17:50:26.638075Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:26.638078Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 10, locked nodes: 1, down nodes: 0 2024-11-21T17:50:26.638085Z node 1 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '2' of tenant 'user0': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 10%) 2024-11-21T17:50:26.638093Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:26.648927Z node 1 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:26.649000Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'2\' of tenant \'user0\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 10%" } RequestId: "user-r-2" Deadline: 420232024 } 2024-11-21T17:50:26.649139Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 10 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3 ... ration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:32.628372Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:32.628464Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } PartialPermissionAllowed: false Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } RequestId: "user-r-2" Deadline: 420129512 } 2024-11-21T17:50:32.628612Z node 25 :CMS INFO: Get selected requests for user 2024-11-21T17:50:32.628626Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:32.628661Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: GET RequestId: "user-r-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } Requests { RequestId: "user-r-2" Owner: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY Priority: 0 } } 2024-11-21T17:50:32.640909Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:32.640992Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:32.641010Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:32.641022Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:32.641242Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:32.641251Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2024-11-21T17:50:32.641260Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 34, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:32.641293Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:32.641307Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2024-11-21T17:50:32.641311Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2024-11-21T17:50:32.641328Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2024-11-21T17:50:32.641352Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:32.641394Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:32.652206Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:32.652297Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } RequestId: "user-r-2" Deadline: 420231024 } 2024-11-21T17:50:32.652411Z node 25 :CMS INFO: Get selected requests for user 2024-11-21T17:50:32.652424Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:32.652461Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: GET RequestId: "user-r-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } Requests { RequestId: "user-r-2" Owner: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY Priority: 0 } } 2024-11-21T17:50:32.652530Z node 25 :CMS INFO: User user is done with permissions user-p-1 2024-11-21T17:50:32.652536Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:32.652550Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:32.652576Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2024-11-21T17:50:32.663326Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:32.663379Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:32.675275Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:32.675310Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:32.675324Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:32.675492Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:32.675498Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2024-11-21T17:50:32.675505Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 34, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:32.675533Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:32.675547Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2024-11-21T17:50:32.675551Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:32.675573Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:32.675589Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2024-11-21T17:50:32.675596Z node 25 :CMS INFO: Adding lock for Host ::1:12010 (34) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:32.675601Z node 25 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-2, owner# user 2024-11-21T17:50:32.675605Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-3 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:32.675613Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:32.675644Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.334048Z, action# Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2024-11-21T17:50:32.675655Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.334048Z, action# Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 2024-11-21T17:50:32.675663Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, owner# user 2024-11-21T17:50:32.686598Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:32.686697Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Deadline: 180334048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 34 InterconnectPort: 12010 } } } Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } Deadline: 180334048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2024-11-21T17:50:32.686839Z node 25 :CMS INFO: Get selected requests for user 2024-11-21T17:50:32.686848Z node 25 :CMS DEBUG: Resulting status: WRONG_REQUEST Unknown request user-r-2 2024-11-21T17:50:32.686864Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: GET RequestId: "user-r-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: WRONG_REQUEST Reason: "Unknown request user-r-2" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:11.695686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:11.695707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:11.695712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:11.695716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:11.695730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:11.695733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:11.695741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:11.695823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:11.706671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:11.706692Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:11.708898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:11.709008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:11.709035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:11.712042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:11.712136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:11.712255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:11.712463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:11.713160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:11.713466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:11.713481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:11.713496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:11.713504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:11.713510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:11.713556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:11.714942Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:11.730285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:11.730374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.730453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:11.730500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:11.730508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.731493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:11.731522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:11.731581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.731591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:11.731595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:11.731599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:11.731988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.731999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:11.732004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:11.732314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.732324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.732329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:11.732335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:11.732830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:11.733185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:11.733232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:11.733408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:11.733434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:11.733441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:11.733496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:11.733501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:11.733534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:11.733544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:11.733889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:11.733898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:11.733936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:11.733941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:11.734015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:11.734021Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:11.734031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:11.734035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:11.734041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:11.734045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:11.734049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:11.734053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:11.734064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:11.734070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:11.734073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... tToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:34.023394Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:50:34.023428Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 40us result status StatusSuccess 2024-11-21T17:50:34.023503Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:34.033770Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:778:2600] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T17:50:34.033801Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:700:2600] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T17:50:34.033826Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:778:2600] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732211434021426 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732211434021426 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732211434021426 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T17:50:34.034453Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:778:2600] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T17:50:34.034469Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:700:2600] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] Test command err: 2024-11-21T17:50:25.107709Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:25.110396Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:25.110485Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:25.110848Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:25.111066Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:25.111237Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:25.112757Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:25.112788Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:25.112844Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:25.112869Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:25.113416Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:25.113444Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:25.113469Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:25.113513Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:25.132979Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:25.143771Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:25.143833Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:25.144723Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:25.144787Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:25.144790Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:25.144796Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:25.144798Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:25.144820Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:25.144840Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:25.145676Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2024-11-21T17:50:25.155905Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:25.187403Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:25.187448Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:25.187579Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2024-11-21T17:50:25.187628Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:25.211456Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:25.211525Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:25.211622Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027000 } Timestamp: 120027000 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120027000 } } 2024-11-21T17:50:25.242055Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:25.303438Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:25.303498Z node 1 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesLimit: 2 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2024-11-21T17:50:25.303515Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:25.337652Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:25.337684Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:25.337698Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:25.337753Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:25.337760Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2024-11-21T17:50:25.337772Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 2, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:25.337782Z node 1 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:25.337785Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:25.337787Z node 1 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:25.337792Z node 1 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:25.337810Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:25.337817Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:25.337826Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:25.337859Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.128000Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2024-11-21T17:50:25.348603Z node 1 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:25.348672Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2024-11-21T17:50:25.348680Z node 1 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.128000Z 2024-11-21T17:50:25.359545Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:25.359589Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:25.359606Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:25.359616Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:25.359656Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:25.359660Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2024-11-21T17:50:25.359668Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 2, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:25.359672Z node 1 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:25.359685Z node 1 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2024-11-21T17:50:25.359688Z node 1 :CMS INFO: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:25.359694Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:25.359723Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.229512Z, action# Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2024-11-21T17:50:25.370363Z node 1 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:25.370430Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } Deadline: 180229512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 2 InterconnectPort: 12002 } } } } 2024-11-21T17:50:25.381399Z node 1 :CMS INFO: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:25.381418Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:25.381449Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:25.381465Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:25.381474Z node ... victVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "41" Duration: 60000000 } Deadline: 180131000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 41 InterconnectPort: 12001 } } } } 2024-11-21T17:50:34.303556Z node 41 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.131000Z 2024-11-21T17:50:34.314435Z node 41 :CMS INFO: Adding lock for Host ::1:12001 (41) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:34.314497Z node 41 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:34.314513Z node 41 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:34.314524Z node 41 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:34.314558Z node 41 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:34.314563Z node 41 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2024-11-21T17:50:34.314573Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:34.314579Z node 41 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 1, down: 0, limit: 1) 2024-11-21T17:50:34.314587Z node 41 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:34.325331Z node 41 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:34.325390Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } RequestId: "user-r-2" Deadline: 420232512 } 2024-11-21T17:50:34.336375Z node 41 :CMS INFO: Adding lock for Host ::1:12001 (41) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:34.336419Z node 41 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:34.336437Z node 41 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:34.336447Z node 41 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:34.336485Z node 41 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:34.336490Z node 41 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2024-11-21T17:50:34.336497Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:34.336504Z node 41 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 1, down: 0, limit: 1) 2024-11-21T17:50:34.336512Z node 41 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:34.347497Z node 41 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:34.347571Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } RequestId: "user-r-3" Deadline: 420334024 } 2024-11-21T17:50:34.358886Z node 41 :CMS INFO: Adding lock for Host ::1:12001 (41) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:34.358944Z node 41 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:34.358965Z node 41 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:34.358979Z node 41 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:34.359031Z node 41 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:34.359038Z node 41 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2024-11-21T17:50:34.359050Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:34.359059Z node 41 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 1, down: 0, limit: 1) 2024-11-21T17:50:34.359084Z node 41 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:34.369996Z node 41 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:34.370087Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } RequestId: "user-r-4" Deadline: 420435536 } 2024-11-21T17:50:34.370245Z node 41 :CMS INFO: User user is done with permissions user-p-1 2024-11-21T17:50:34.370257Z node 41 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:34.370269Z node 41 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:34.370295Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2024-11-21T17:50:34.381003Z node 41 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:34.381063Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:34.392076Z node 41 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:34.392149Z node 41 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:34.392191Z node 41 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:34.392197Z node 41 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2024-11-21T17:50:34.392207Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:34.392214Z node 41 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 0, down: 1, limit: 1) 2024-11-21T17:50:34.392224Z node 41 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:34.403125Z node 41 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:34.403147Z node 41 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:34.403211Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } RequestId: "user-r-5" Deadline: 420538560 } 2024-11-21T17:50:34.414300Z node 41 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:34.414372Z node 41 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:34.414414Z node 41 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2024-11-21T17:50:34.414421Z node 41 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2024-11-21T17:50:34.414430Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:34.414437Z node 41 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 0, down: 1, limit: 1) 2024-11-21T17:50:34.414446Z node 41 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:34.425293Z node 41 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:34.425314Z node 41 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:34.425381Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } RequestId: "user-r-6" Deadline: 420640072 } 2024-11-21T17:50:34.436404Z node 41 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:34.436497Z node 41 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:34.436550Z node 41 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2024-11-21T17:50:34.436559Z node 41 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2024-11-21T17:50:34.436573Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:34.436578Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2024-11-21T17:50:34.436585Z node 41 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:34.436602Z node 41 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-7, owner# user 2024-11-21T17:50:34.436609Z node 41 :CMS INFO: Adding lock for Host ::1:12002 (42) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:34.436619Z node 41 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:34.436643Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.741584Z, action# Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2024-11-21T17:50:34.447576Z node 41 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:34.447599Z node 41 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:34.447676Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-7" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } Deadline: 180741584 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 42 InterconnectPort: 12002 } } } } |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::SamePriorityRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2024-11-21T17:50:30.404871Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:30.408181Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:30.408366Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:30.409103Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:30.409216Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:30.409399Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:30.410711Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:30.410855Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:30.410921Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:30.410957Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:30.412081Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:30.412139Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:30.412169Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:30.412252Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:30.430560Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:30.441722Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:30.441811Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:30.442886Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:30.443009Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:30.443013Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:30.443020Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:30.443023Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:30.443029Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:30.443066Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:30.444290Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:30.454678Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:30.486875Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:30.486933Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:30.513410Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:30.513449Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:30.513530Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:30.513844Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... ices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120542048 } } 2024-11-21T17:50:34.598627Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120542048 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120542048 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120542048 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120542048 } Timestamp: 120542048 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120542048 } 2024-11-21T17:50:34.598672Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2024-11-21T17:50:34.598678Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2024-11-21T17:50:34.598683Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2024-11-21T17:50:34.598703Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:34.598762Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T17:50:34.598768Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2024-11-21T17:50:34.598816Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 0.100000s 2024-11-21T17:50:34.598824Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2024-11-21T17:50:34.598836Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2024-11-21T17:50:34.598841Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2024-11-21T17:50:34.598843Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2024-11-21T17:50:34.598846Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2024-11-21T17:50:34.598849Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2024-11-21T17:50:34.598852Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2024-11-21T17:50:34.598855Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2024-11-21T17:50:34.598858Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2024-11-21T17:50:34.598886Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 120443560 ChangeTime: 120443560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2024-11-21T17:50:34.598973Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 120443560 ChangeTime: 120443560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2024-11-21T17:50:34.598983Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 120443560 ChangeTime: 120443560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2024-11-21T17:50:34.598996Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 120443560 ChangeTime: 120443560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2024-11-21T17:50:34.599003Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 120443560 ChangeTime: 120443560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2024-11-21T17:50:34.599009Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 120443560 ChangeTime: 120443560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2024-11-21T17:50:34.599015Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 120443560 ChangeTime: 120443560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2024-11-21T17:50:34.599021Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 120443560 ChangeTime: 120443560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120542 2024-11-21T17:50:34.599028Z node 18 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2024-11-21T17:50:34.609922Z node 18 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:34.609992Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 18 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2024-11-21T17:50:34.610108Z node 18 :CMS INFO: User user removes request user-r-3 2024-11-21T17:50:34.610115Z node 18 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:34.610124Z node 18 :CMS DEBUG: TTxRemoveRequest Execute 2024-11-21T17:50:34.610128Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 18 2024-11-21T17:50:34.610157Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2024-11-21T17:50:34.621054Z node 18 :CMS DEBUG: TTxRemoveRequest Complete 2024-11-21T17:50:34.621126Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2024-11-21T17:50:35.426096Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791467270726528:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:35.426360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012f3/r3tmp/tmpYmPrlF/pdisk_1.dat 2024-11-21T17:50:35.477230Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:35.527603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:35.527634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:35.528524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:35.557138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:35.741184Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791464802683463:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:35.741201Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012f3/r3tmp/tmpEqx5eL/pdisk_1.dat 2024-11-21T17:50:35.755127Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:35.841306Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:35.841341Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:35.842436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:35.844193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:35.845354Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2024-11-21T17:50:36.151746Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:36.151932Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [1:34:2057], tablet id = 1, status = OK 2024-11-21T17:50:36.152003Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:34:2057], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.152056Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:35:2058], server id = [1:35:2058], tablet id = 2, status = OK 2024-11-21T17:50:36.152063Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:35:2058], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.152086Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T17:50:36.152133Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:36:2059], server id = [1:36:2059], tablet id = 3, status = OK 2024-11-21T17:50:36.152140Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:36:2059], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.152194Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:36.152211Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2024-11-21T17:50:36.152248Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:36.152262Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T17:50:36.152265Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.152269Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T17:50:36.152281Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:40:2056], server id = [2:40:2056], tablet id = 4, status = OK 2024-11-21T17:50:36.152288Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:40:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.152302Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2024-11-21T17:50:36.152309Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:43:2056], server id = [3:43:2056], tablet id = 5, status = OK 2024-11-21T17:50:36.152312Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:43:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.152316Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2024-11-21T17:50:36.152321Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:35:2058], server id = [0:0:0], tablet id = 2, status = ERROR 2024-11-21T17:50:36.152323Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.152326Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2024-11-21T17:50:36.152329Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T17:50:36.152336Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:36:2059], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T17:50:36.152337Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.152343Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:40:2056], server id = [0:0:0], tablet id = 4, status = ERROR 2024-11-21T17:50:36.152344Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.152347Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:45:2056], server id = [4:45:2056], tablet id = 6, status = OK 2024-11-21T17:50:36.152350Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:45:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.152352Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:43:2056], server id = [0:0:0], tablet id = 5, status = ERROR 2024-11-21T17:50:36.152355Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.152361Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2024-11-21T17:50:36.152363Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T17:50:36.152370Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:45:2056], server id = [0:0:0], tablet id = 6, status = ERROR 2024-11-21T17:50:36.152372Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.152391Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2024-11-21T17:50:36.152403Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2024-11-21T17:50:36.152406Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T17:50:36.152415Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2024-11-21T17:50:36.152418Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2024-11-21T17:50:36.103694Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:36.103719Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:50:36.107775Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:36.107811Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2024-11-21T17:50:36.107831Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2024-11-21T17:50:36.107835Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T17:50:36.107844Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2024-11-21T17:50:36.340751Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:36.340910Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [1:34:2057], tablet id = 1, status = OK 2024-11-21T17:50:36.340977Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:34:2057], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.340995Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T17:50:36.341010Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T17:50:36.341013Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.341069Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:36.341097Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:36.341119Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2024-11-21T17:50:36.341127Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:41:2056], server id = [3:41:2056], tablet id = 3, status = OK 2024-11-21T17:50:36.341135Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:41:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.341143Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T17:50:36.341147Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T17:50:36.341155Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [4:43:2056], tablet id = 4, status = OK 2024-11-21T17:50:36.341161Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:43:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.341170Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2024-11-21T17:50:36.341174Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T17:50:36.341178Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:41:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T17:50:36.341181Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.341189Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [0:0:0], tablet id = 4, status = ERROR 2024-11-21T17:50:36.341192Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.341202Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2024-11-21T17:50:36.341215Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2024-11-21T17:50:36.351375Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T17:50:36.351412Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T17:50:36.351447Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T17:50:36.351453Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T17:50:36.361687Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2024-11-21T17:50:36.361726Z node 1 :STATISTICS INFO: Node 2 is unavailable 2024-11-21T17:50:36.361733Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T17:50:36.361759Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T17:50:36.361763Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2024-11-21T17:50:36.361766Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T17:50:36.361769Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T17:50:36.361818Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T17:50:36.361822Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2024-11-21T17:50:36.317803Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:36.317959Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [1:34:2057], tablet id = 1, status = OK 2024-11-21T17:50:36.318019Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:34:2057], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.318035Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T17:50:36.318049Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T17:50:36.318052Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.318098Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:36.318128Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:36.318151Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2024-11-21T17:50:36.318161Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:41:2056], server id = [3:41:2056], tablet id = 3, status = OK 2024-11-21T17:50:36.318168Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:41:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.318176Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T17:50:36.318180Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T17:50:36.318189Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [4:43:2056], tablet id = 4, status = OK 2024-11-21T17:50:36.318195Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:43:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.318206Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2024-11-21T17:50:36.318210Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T17:50:36.318215Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:41:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T17:50:36.318218Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.318227Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [0:0:0], tablet id = 4, status = ERROR 2024-11-21T17:50:36.318230Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.318242Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2024-11-21T17:50:36.318255Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2024-11-21T17:50:36.328382Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T17:50:36.328402Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T17:50:36.328423Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T17:50:36.328427Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T17:50:36.338608Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2024-11-21T17:50:36.338641Z node 1 :STATISTICS INFO: Node 2 is unavailable 2024-11-21T17:50:36.338647Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T17:50:36.338667Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T17:50:36.338669Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2024-11-21T17:50:36.338671Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T17:50:36.338674Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T17:50:36.338700Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T17:50:36.338703Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2024-11-21T17:50:36.483044Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:36.483084Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T17:50:36.483166Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T17:50:36.483170Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.483174Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:7:2054], server id = [1:7:2054], tablet id = 1, status = ERROR 2024-11-21T17:50:36.483176Z node 1 :STATISTICS DEBUG: Tablet 1 is not local. 2024-11-21T17:50:36.483182Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 2 2024-11-21T17:50:36.483184Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2024-11-21T17:50:36.483189Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 4, status = ERROR 2024-11-21T17:50:36.483190Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2024-11-21T17:50:36.483195Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 5 2024-11-21T17:50:36.483196Z node 1 :STATISTICS DEBUG: Tablet 5 is not local. 2024-11-21T17:50:36.483200Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2024-11-21T17:50:36.483204Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 7, status = ERROR 2024-11-21T17:50:36.483206Z node 1 :STATISTICS DEBUG: Tablet 7 is not local. 2024-11-21T17:50:36.483209Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 6, status = ERROR 2024-11-21T17:50:36.483210Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.483212Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 8 2024-11-21T17:50:36.483214Z node 1 :STATISTICS DEBUG: Tablet 8 is not local. 2024-11-21T17:50:36.483216Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TSchemeShardTTLTests::CheckCounters [GOOD] |81.5%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest |81.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |81.5%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.5%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] Test command err: 2024-11-21T17:50:29.896035Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:29.896665Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:29.898936Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:29.898993Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:29.899262Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:29.899317Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:29.899360Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:29.899397Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:29.899422Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:29.899491Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:29.901076Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:29.901102Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:29.901133Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:29.901161Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:29.919170Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:29.951282Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:29.951392Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:29.952567Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:29.952705Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:29.952712Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:29.952721Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:29.952725Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:29.952741Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:29.952764Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:29.952804Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:29.954345Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:29.986140Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:29.986195Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:30.013012Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:30.013047Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:30.013120Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:30.013406Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... orage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2024-11-21T17:50:33.670980Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2024-11-21T17:50:33.670996Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:33.671055Z node 17 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T17:50:33.671060Z node 17 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T17:50:33.671083Z node 17 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T17:50:33.671090Z node 17 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:33.671107Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2024-11-21T17:50:33.671114Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:33.671140Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2024-11-21T17:50:33.671157Z node 17 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:33.671169Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:33.671192Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:33.671240Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.028512Z, action# Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2024-11-21T17:50:33.711922Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:33.764205Z node 17 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:33.764314Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Deadline: 180028512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2024-11-21T17:50:33.764337Z node 17 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.028512Z 2024-11-21T17:50:35.234619Z node 25 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:35.235158Z node 25 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:35.236801Z node 25 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:35.236853Z node 25 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:35.237075Z node 25 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:35.237118Z node 25 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:35.237202Z node 25 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:35.237222Z node 25 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:35.237255Z node 25 :CMS DEBUG: Using default config. 2024-11-21T17:50:35.237316Z node 25 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:35.238477Z node 25 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:35.238513Z node 25 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:35.238538Z node 25 :CMS DEBUG: Using default config 2024-11-21T17:50:35.238554Z node 25 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:35.249982Z node 25 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:35.271343Z node 25 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:35.271485Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:35.271508Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:35.271597Z node 25 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:35.271601Z node 25 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:35.271606Z node 25 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:35.271609Z node 25 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:35.271617Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:35.271642Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:35.271652Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:35.271828Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 25 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 26 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 27 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 28 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 29 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 30 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 31 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 32 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:35.292955Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:35.293011Z node 25 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:35.293481Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "" Actions { Type: RESTART_SERVICES Host: "::1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: WRONG_REQUEST Reason: "Missing user in request" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:13.549082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:13.549099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:13.549104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:13.549108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:13.549113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:13.549117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:13.549125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:13.549184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:13.556403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:13.556418Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:13.558551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:13.558570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:13.558598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:13.559882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:13.559929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:13.559999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.560036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:13.560487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:13.560734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:13.560741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:13.560750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:13.560756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:13.560761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:13.560792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.561985Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:13.575034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:13.575098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.575148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:13.575186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:13.575192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.575800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.575829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:13.575866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.575874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:13.575877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:13.575882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:13.576204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.576211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:13.576216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:13.576532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.576540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.576546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:13.576552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:13.577120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:13.577639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:13.577698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:13.577870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:13.577897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:13.577907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:13.577968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:13.577975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:13.578003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:13.578028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:13.578450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:13.578457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:13.578487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:13.578490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:13.578548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:13.578562Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:13.578571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:13.578574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:13.578578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:13.578582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:13.578584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:13.578587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:13.578594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:13.578598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:13.578600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:13.578801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:13.578809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:13.578813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:13.578816Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:13.578819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:13.578831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... at schemeshard: 72057594046678944 2024-11-21T17:50:36.512371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:50:36.512397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:50:36.512471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:50:36.512477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:36.512483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 107:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:50:36.512494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2024-11-21T17:50:36.512499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T17:50:36.512506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2024-11-21T17:50:36.512519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:352:2327] message: TxId: 107 2024-11-21T17:50:36.512524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T17:50:36.512530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T17:50:36.512535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T17:50:36.512564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:50:36.512569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:36.512623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:36.512628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:50:36.512639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:36.513360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T17:50:36.513373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1396:3302] 2024-11-21T17:50:36.513460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 2024-11-21T17:50:36.577887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T17:50:36.577968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2024-11-21T17:50:36.577988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T17:50:36.577997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T17:50:36.578086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T17:50:36.578143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2024-11-21T17:50:36.578160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T17:50:36.578165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T17:50:36.644259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2024-11-21T17:50:36.644291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:36.644324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T17:50:36.644360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1732224703747774 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T17:50:36.644394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1732224703747774 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T17:50:36.644544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:50:36.644596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:50:36.644724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T17:50:36.644734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:50:36.644860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T17:50:36.644864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:50:36.645953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T17:50:36.645992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T17:50:36.646007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:36.646017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2024-11-21T22:31:43.747774Z, at schemeshard: 72057594046678944 2024-11-21T17:50:36.646026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T17:50:36.646034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:36.646038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2024-11-21T22:31:43.747774Z, at schemeshard: 72057594046678944 2024-11-21T17:50:36.646042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T17:50:36.666876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:50:36.708374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T17:50:36.708411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T17:50:36.708449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2024-11-21T17:50:36.708467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T17:50:36.708473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T17:50:36.708540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2024-11-21T17:50:36.708546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T17:50:36.708550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T17:50:36.731419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:50:36.782980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T17:50:36.783013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T17:50:36.783031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2024-11-21T17:50:36.783046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T17:50:36.783050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T17:50:36.783123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2024-11-21T17:50:36.783128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T17:50:36.783131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2024-11-21T17:50:36.930348Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:36.930480Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 2, status = OK 2024-11-21T17:50:36.930536Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.930546Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 3, status = OK 2024-11-21T17:50:36.930550Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.930553Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 4, status = OK 2024-11-21T17:50:36.930559Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.930565Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 5, status = OK 2024-11-21T17:50:36.930568Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.930571Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T17:50:36.930587Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:7:2054], server id = [1:7:2054], tablet id = 1, status = OK 2024-11-21T17:50:36.930592Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:7:2054], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.930598Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T17:50:36.930600Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.930603Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2024-11-21T17:50:36.930611Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 6, status = OK 2024-11-21T17:50:36.930615Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.930619Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T17:50:36.930624Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 5, status = ERROR 2024-11-21T17:50:36.930627Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.930630Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 7, status = OK 2024-11-21T17:50:36.930634Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T17:50:36.930639Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:7:2054], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T17:50:36.930641Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.930644Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 7 2024-11-21T17:50:36.930649Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 7, status = ERROR 2024-11-21T17:50:36.930651Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.940786Z node 1 :STATISTICS ERROR: No result was received from the tablet 2 2024-11-21T17:50:36.940810Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2024-11-21T17:50:36.940835Z node 1 :STATISTICS DEBUG: Tablet 3 has already been processed 2024-11-21T17:50:36.940839Z node 1 :STATISTICS ERROR: No result was received from the tablet 4 2024-11-21T17:50:36.940842Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2024-11-21T17:50:36.940850Z node 1 :STATISTICS DEBUG: Tablet 5 has already been processed 2024-11-21T17:50:36.940856Z node 1 :STATISTICS DEBUG: Tablet 1 has already been processed 2024-11-21T17:50:36.940860Z node 1 :STATISTICS ERROR: No result was received from the tablet 6 2024-11-21T17:50:36.940863Z node 1 :STATISTICS DEBUG: Tablet 6 is not local. 2024-11-21T17:50:36.940866Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T17:50:36.940877Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T17:50:36.940880Z node 1 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2024-11-21T17:50:36.940895Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2024-11-21T17:50:36.940898Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.940903Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2024-11-21T17:50:36.940905Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:36.940908Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 6, status = ERROR 2024-11-21T17:50:36.940910Z node 1 :STATISTICS DEBUG: Skip EvClientConnected >> TPQCachingProxyTest::OutdatedSession >> AggregateStatistics::ShouldBePings >> TPQCachingProxyTest::OutdatedSession [GOOD] >> TPQCachingProxyTest::TestDeregister >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> BsControllerConfig::SelectAllGroups [GOOD] >> AggregateStatistics::ShouldBePings [GOOD] >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap >> TPQCachingProxyTest::TestDeregister [GOOD] |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest |81.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2024-11-21T17:50:37.464013Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:37.464039Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:50:37.467253Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:37.467276Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2024-11-21T17:50:37.467289Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2024-11-21T17:50:37.467294Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T17:50:37.467306Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2024-11-21T17:50:37.348871Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:37.348983Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T17:50:37.450942Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2024-11-21T17:50:37.450971Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T17:50:37.450978Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T17:50:37.451168Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T17:50:37.451174Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:37.451181Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17:2054], server id = [0:0:0], tablet id = 2, status = ERROR 2024-11-21T17:50:37.451184Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T17:50:37.451194Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2024-11-21T17:50:37.451200Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPopulatorTest::Boot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SelectAllGroups [GOOD] Test command err: 2024-11-21T17:50:35.682462Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:35.683076Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:35.683420Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:35.683517Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:35.683639Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:35.683644Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:35.683661Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:35.684393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:35.684414Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:35.684438Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:35.684451Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:35.684461Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:35.684470Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_auditsettings/unittest |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TCmsTest::SamePriorityRequest [GOOD] >> TPopulatorTest::Boot [GOOD] |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2024-11-21T17:50:37.664157Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:37.664178Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:50:37.667360Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:37.667383Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2024-11-21T17:50:37.667388Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 1 2024-11-21T17:50:37.667401Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: session1 >> TPopulatorTestWithResets::UpdateAck >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] |81.6%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] >> TPopulatorTest::RemoveDir ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest [GOOD] Test command err: 2024-11-21T17:50:30.960839Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:30.961222Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2024-11-21T17:50:30.963584Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:30.963634Z node 1 :CMS DEBUG: TTxInitScheme Execute 2024-11-21T17:50:30.964070Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:30.964090Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2024-11-21T17:50:30.964112Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2024-11-21T17:50:30.964135Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2024-11-21T17:50:30.964168Z node 1 :CMS DEBUG: Using default config. 2024-11-21T17:50:30.964252Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2024-11-21T17:50:30.965615Z node 1 :CMS DEBUG: TTxInitScheme Complete 2024-11-21T17:50:30.965648Z node 1 :CMS DEBUG: TTxLoadState Execute 2024-11-21T17:50:30.965666Z node 1 :CMS DEBUG: Using default config 2024-11-21T17:50:30.965693Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:30.983373Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2024-11-21T17:50:31.005037Z node 1 :CMS DEBUG: TTxLoadState Complete 2024-11-21T17:50:31.005185Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:31.006461Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:31.006648Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2024-11-21T17:50:31.006659Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2024-11-21T17:50:31.006671Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2024-11-21T17:50:31.006675Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2024-11-21T17:50:31.006743Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2024-11-21T17:50:31.006770Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2024-11-21T17:50:31.006844Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2024-11-21T17:50:31.008646Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2024-11-21T17:50:31.029731Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2024-11-21T17:50:31.029778Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2024-11-21T17:50:31.057418Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:31.057456Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:31.057533Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:31.057832Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T17:50:36.591142Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:36.591154Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:36.591188Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.029512Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 2024-11-21T17:50:36.591215Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:36.642209Z node 25 :CMS DEBUG: Running CleanupWalleTasks 2024-11-21T17:50:36.698937Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:36.699053Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Deadline: 180029512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2024-11-21T17:50:36.699065Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.029512Z 2024-11-21T17:50:36.710508Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:36.710611Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:36.710634Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:36.710647Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:36.710794Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:36.710803Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 2024-11-21T17:50:36.710814Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T17:50:36.710841Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2024-11-21T17:50:36.710860Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:36.710919Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:36.722040Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:36.722133Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } RequestId: "user-r-2" Deadline: 420131512 } 2024-11-21T17:50:36.722268Z node 25 :CMS INFO: User user is done with permissions user-p-1 2024-11-21T17:50:36.722279Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:36.722293Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:36.722316Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2024-11-21T17:50:36.733140Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:36.733197Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:36.744729Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:36.744764Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:36.744781Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:36.744931Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:36.744944Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2024-11-21T17:50:36.744953Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:36.744989Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:36.745007Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2024-11-21T17:50:36.745015Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:36.745025Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:36.745058Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.234536Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 2024-11-21T17:50:36.745067Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2024-11-21T17:50:36.755932Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:36.756025Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } Deadline: 180234536 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2024-11-21T17:50:36.756146Z node 25 :CMS INFO: User user is done with permissions user-p-2 2024-11-21T17:50:36.756154Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T17:50:36.756166Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T17:50:36.756189Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2024-11-21T17:50:36.766964Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T17:50:36.767022Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T17:50:36.778359Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T17:50:36.778390Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T17:50:36.778406Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T17:50:36.778535Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2024-11-21T17:50:36.778546Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2024-11-21T17:50:36.778554Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T17:50:36.778584Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T17:50:36.778600Z node 25 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-2, owner# user 2024-11-21T17:50:36.778606Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-3 until 1970-01-01T00:03:00Z) 2024-11-21T17:50:36.778614Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T17:50:36.778645Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.337560Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 2024-11-21T17:50:36.778652Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, owner# user 2024-11-21T17:50:36.789473Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T17:50:36.789567Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } Deadline: 180337560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2024-11-21T17:50:37.973362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:37.973387Z node 1 :IMPORT WARN: Table profiles were not loaded |81.6%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn >> TPopulatorTest::RemoveDir [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:37.795324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:37.795344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:37.795347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:37.795351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:37.795362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:37.795364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:37.795370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:37.795428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:37.804859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:37.804882Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:37.807420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:37.807977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:37.808002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:37.809641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:37.809843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:37.809908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:37.809966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:37.810756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:37.810962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:37.810971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:37.811000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:37.811006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:37.811010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:37.811020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:37.811903Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:37.823451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:37.823513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:37.823568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:37.823621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:37.823627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:37.824298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:37.824316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:37.824350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:37.824357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:37.824360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:37.824363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:37.824659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:37.824669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:37.824673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:37.825003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:37.825010Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:37.825015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:37.825019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:37.825441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:37.825782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:37.825829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:37.825950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:37.825966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:37.825971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:37.826005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:37.826009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:37.826032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:37.826041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:37.826354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:37.826359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:37.826388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:37.826391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:37.826449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:37.826454Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:37.826463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:37.826466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:37.826471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:37.826476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:37.826480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:37.826484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:37.826493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:37.826499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:37.826503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:37.826740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:37.826750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:37.826754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:37.826757Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:37.826759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:37.826767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:38.248729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:38.248733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T17:50:38.248738Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2024-11-21T17:50:38.248742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:38.248866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:38.248876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:38.248879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T17:50:38.248883Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2024-11-21T17:50:38.248887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2024-11-21T17:50:38.248896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2024-11-21T17:50:38.249538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2024-11-21T17:50:38.249569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2024-11-21T17:50:38.249678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:38.249700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:38.249707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2024-11-21T17:50:38.249724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:38.249728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2024-11-21T17:50:38.249734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 134 2024-11-21T17:50:38.249893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T17:50:38.250168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T17:50:38.250488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T17:50:38.250507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:38.250528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2024-11-21T17:50:38.250545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:38.250551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 FAKE_COORDINATOR: Erasing txId 175 2024-11-21T17:50:38.250916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:38.250923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:38.250942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2024-11-21T17:50:38.250958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:38.250961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2024-11-21T17:50:38.250964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2024-11-21T17:50:38.250996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T17:50:38.251001Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2024-11-21T17:50:38.251004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2024-11-21T17:50:38.251114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:38.251125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:38.251128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T17:50:38.251131Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2024-11-21T17:50:38.251134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:38.251235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:38.251241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:38.251244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T17:50:38.251246Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2024-11-21T17:50:38.251248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2024-11-21T17:50:38.251255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2024-11-21T17:50:38.251755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T17:50:38.251764Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2024-11-21T17:50:38.251771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2024-11-21T17:50:38.251774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T17:50:38.251777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2024-11-21T17:50:38.251780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T17:50:38.251783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2024-11-21T17:50:38.251785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2024-11-21T17:50:38.251794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2024-11-21T17:50:38.251850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:38.251853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T17:50:38.251860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2024-11-21T17:50:38.252070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:38.252075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T17:50:38.252086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:38.252116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T17:50:38.252168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T17:50:38.252632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:50:38.252666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2024-11-21T17:50:38.252846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2024-11-21T17:50:38.252850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2024-11-21T17:50:38.252968Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2024-11-21T17:50:38.252984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2024-11-21T17:50:38.252987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2617:4609] TestWaitNotification: OK eventTxId 175 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2024-11-21T17:50:38.284812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:38.284840Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2024-11-21T17:50:38.304069Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 311, preserialized size# 48 2024-11-21T17:50:38.304107Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2024-11-21T17:50:38.304347Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:38.304361Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:38.304367Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:38.304569Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 217, preserialized size# 2 2024-11-21T17:50:38.304576Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T17:50:38.305172Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 321, preserialized size# 53 2024-11-21T17:50:38.305181Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2024-11-21T17:50:38.305220Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 222, preserialized size# 2 2024-11-21T17:50:38.305223Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T17:50:38.325833Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2024-11-21T17:50:38.325858Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2121] Successful handshake: replica# [1:15:2062] 2024-11-21T17:50:38.325866Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2121] Resume sync: replica# [1:15:2062], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:38.325879Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2024-11-21T17:50:38.325882Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2122] Successful handshake: replica# [1:18:2065] 2024-11-21T17:50:38.325887Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2122] Resume sync: replica# [1:18:2065], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:38.325898Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2024-11-21T17:50:38.325901Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:93:2120] Successful handshake: replica# [1:12:2059] 2024-11-21T17:50:38.325905Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:93:2120] Resume sync: replica# [1:12:2059], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:38.325925Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:1099535966835:0] }: sender# [1:94:2121] 2024-11-21T17:50:38.325940Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:94:2121] 2024-11-21T17:50:38.325957Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:92:2119] 2024-11-21T17:50:38.325969Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T17:50:38.325990Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:94:2121] 2024-11-21T17:50:38.326002Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:2199047594611:0] }: sender# [1:95:2122] 2024-11-21T17:50:38.326008Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T17:50:38.326014Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:92:2119] 2024-11-21T17:50:38.326023Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:95:2122] 2024-11-21T17:50:38.326032Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 0 2024-11-21T17:50:38.326040Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T17:50:38.326061Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:95:2122] 2024-11-21T17:50:38.326067Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 0 2024-11-21T17:50:38.326073Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T17:50:38.326083Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:24339059:0] }: sender# [1:93:2120] 2024-11-21T17:50:38.326090Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:92:2119] 2024-11-21T17:50:38.326097Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 0 2024-11-21T17:50:38.326105Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:93:2120] 2024-11-21T17:50:38.326110Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T17:50:38.326115Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 0 2024-11-21T17:50:38.326124Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:93:2120] 2024-11-21T17:50:38.326129Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 0 2024-11-21T17:50:38.326139Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:1099535966835:0] }: sender# [1:94:2121] 2024-11-21T17:50:38.326144Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T17:50:38.326150Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:92:2119] 2024-11-21T17:50:38.326162Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:2199047594611:0] }: sender# [1:95:2122] 2024-11-21T17:50:38.326167Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 0 2024-11-21T17:50:38.326174Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2024-11-21T17:50:38.326180Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:92:2119] 2024-11-21T17:50:38.326188Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:94:2121], cookie# 0 2024-11-21T17:50:38.326192Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:94:2121], cookie# 0 2024-11-21T17:50:38.326197Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2024-11-21T17:50:38.326203Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:94:2121], cookie# 100 2024-11-21T17:50:38.326209Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 0 2024-11-21T17:50:38.326212Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:94:2121], cookie# 0 2024-11-21T17:50:38.326217Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 100 2024-11-21T17:50:38.326223Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:24339059:0] }: sender# [1:93:2120] 2024-11-21T17:50:38.326229Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:92:2119] 2024-11-21T17:50:38.326237Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2122], cookie# 0 2024-11-21T17:50:38.326240Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 0 2024-11-21T17:50:38.326244Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2024-11-21T17:50:38.326250Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2122], cookie# 100 2024-11-21T17:50:38.326255Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2024-11-21T17:50:38.326260Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2024-11-21T17:50:38.326396Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 0 2024-11-21T17:50:38.326402Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 0 2024-11-21T17:50:38.326406Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 100 2024-11-21T17:50:38.326410Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2024-11-21T17:50:38.326414Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2024-11-21T17:50:38.326517Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:93:2120], cookie# 0 2024-11-21T17:50:38.326524Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 0 2024-11-21T17:50:38.326529Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:93:2120], cookie# 100 2024-11-21T17:50:38.326533Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 100 2024-11-21T17:50:38.326577Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 0 2024-11-21T17:50:38.326582Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 0 2024-11-21T17:50:38.326627Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 100 2024-11-21T17:50:38.326631Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 100 TestWaitNotification: OK eventTxId 100 >> BsControllerConfig::PDiskCreate [GOOD] >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::DeleteStoragePool >> TPopulatorTest::MakeDir >> BsControllerConfig::ReassignGroupDisk [GOOD] >> TSchemeShardAuditSettings::AlterSubdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2024-11-21T17:50:38.557827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:38.557853Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2024-11-21T17:50:38.577452Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 311, preserialized size# 48 2024-11-21T17:50:38.577485Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2024-11-21T17:50:38.577696Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:38.577709Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:38.577715Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:38.577888Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 217, preserialized size# 2 2024-11-21T17:50:38.577895Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2024-11-21T17:50:38.577910Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2024-11-21T17:50:38.577918Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2024-11-21T17:50:38.577925Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2024-11-21T17:50:38.577955Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:93:2120], cookie# 100 2024-11-21T17:50:38.577960Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:38.577965Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:38.577970Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:38.577989Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:94:2121], cookie# 100 2024-11-21T17:50:38.577994Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2024-11-21T17:50:38.577999Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2024-11-21T17:50:38.578004Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2024-11-21T17:50:38.578009Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2024-11-21T17:50:38.578020Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:95:2122], cookie# 100 2024-11-21T17:50:38.578058Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:93:2120], cookie# 100 2024-11-21T17:50:38.578137Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:94:2121], cookie# 100 2024-11-21T17:50:38.578141Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2024-11-21T17:50:38.578164Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:95:2122], cookie# 100 2024-11-21T17:50:38.578167Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T17:50:38.578543Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 321, preserialized size# 53 2024-11-21T17:50:38.578551Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2024-11-21T17:50:38.578564Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:38.578568Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:38.578572Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:38.578645Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 222, preserialized size# 2 2024-11-21T17:50:38.578649Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2024-11-21T17:50:38.578661Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [Owner ... oard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:12:2059], cookie# 101 2024-11-21T17:50:38.579436Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:15:2062], cookie# 101 2024-11-21T17:50:38.579443Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:18:2065], cookie# 101 2024-11-21T17:50:38.579450Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:93:2120], cookie# 101 2024-11-21T17:50:38.579454Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T17:50:38.579459Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T17:50:38.579464Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T17:50:38.579480Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:94:2121], cookie# 101 2024-11-21T17:50:38.579484Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 5 2024-11-21T17:50:38.579492Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 101 2024-11-21T17:50:38.579497Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 101 2024-11-21T17:50:38.579502Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 101 2024-11-21T17:50:38.579526Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:95:2122], cookie# 101 2024-11-21T17:50:38.579534Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 101 2024-11-21T17:50:38.579571Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 101 2024-11-21T17:50:38.579575Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2024-11-21T17:50:38.579599Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 101 2024-11-21T17:50:38.579603Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T17:50:38.579832Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 101, event size# 219, preserialized size# 2 2024-11-21T17:50:38.579840Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 6 2024-11-21T17:50:38.579852Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T17:50:38.579857Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T17:50:38.579862Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T17:50:38.579893Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000002, drop txId: 101, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/Root/DirB" PathId: 2 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 101, event size# 381, preserialized size# 0 2024-11-21T17:50:38.579897Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 101, is deletion# true, version: 0 2024-11-21T17:50:38.579904Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:12:2059], cookie# 101 2024-11-21T17:50:38.579907Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:15:2062], cookie# 101 2024-11-21T17:50:38.579910Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:18:2065], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:50:38.579921Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:93:2120], cookie# 101 2024-11-21T17:50:38.579924Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T17:50:38.579927Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T17:50:38.579930Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T17:50:38.579950Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:94:2121], cookie# 101 2024-11-21T17:50:38.579953Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2024-11-21T17:50:38.579957Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2024-11-21T17:50:38.579960Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2024-11-21T17:50:38.579963Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2024-11-21T17:50:38.579980Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:95:2122], cookie# 101 2024-11-21T17:50:38.579994Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:93:2120], cookie# 101 2024-11-21T17:50:38.580000Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:94:2121], cookie# 101 2024-11-21T17:50:38.580002Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 18446744073709551615 2024-11-21T17:50:38.580030Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:95:2122], cookie# 101 2024-11-21T17:50:38.580033Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::PDiskCreate [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:207:2066] recipient: [1:183:2075] 2024-11-21T17:50:35.388025Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:35.388587Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:35.388829Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:35.388884Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:35.388965Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:35.388968Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:35.388985Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:35.389532Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:35.389550Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:35.389574Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:35.389584Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:35.389592Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:35.389597Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:232:2066] recipient: [1:20:2067] 2024-11-21T17:50:35.399975Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:35.400017Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:35.410329Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:35.410378Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:35.410393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:35.410404Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:35.410440Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:35.410447Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:35.410454Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:35.410461Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:35.420746Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:35.420810Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:35.421014Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:35.421024Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:35.421047Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:35.423289Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2024-11-21T17:50:35.423428Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1000 Path# /dev/disk2 2024-11-21T17:50:35.423433Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1000 Path# /dev/disk3 2024-11-21T17:50:35.423436Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2024-11-21T17:50:35.423439Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1001 Path# /dev/disk1 2024-11-21T17:50:35.423442Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1000 Path# /dev/disk3 2024-11-21T17:50:35.423445Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2024-11-21T17:50:35.423447Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1000 Path# /dev/disk2 2024-11-21T17:50:35.423450Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1000 Path# /dev/disk3 2024-11-21T17:50:35.423452Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2024-11-21T17:50:35.423455Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1002 Path# /dev/disk1 2024-11-21T17:50:35.423457Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1000 Path# /dev/disk3 2024-11-21T17:50:35.423460Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2024-11-21T17:50:35.423463Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2024-11-21T17:50:35.423466Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1001 Path# /dev/disk3 2024-11-21T17:50:35.423471Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1002 Path# /dev/disk1 2024-11-21T17:50:35.423474Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2024-11-21T17:50:35.423478Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /dev/disk3 2024-11-21T17:50:35.423481Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1001 Path# /dev/disk1 2024-11-21T17:50:35.423483Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2024-11-21T17:50:35.423486Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1002 Path# /dev/disk1 2024-11-21T17:50:35.423489Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1002 Path# /dev/disk2 2024-11-21T17:50:35.423492Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /dev/disk3 2024-11-21T17:50:35.423494Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1002 Path# /dev/disk1 2024-11-21T17:50:35.423497Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2024-11-21T17:50:35.423499Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1000 Path# /dev/disk2 2024-11-21T17:50:35.423502Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1001 Path# /dev/disk1 2024-11-21T17:50:35.423505Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1002 Path# /dev/disk1 2024-11-21T17:50:35.423507Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1002 Path# /dev/disk2 2024-11-21T17:50:35.423510Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1001 Path# /dev/disk3 2024-11-21T17:50:35.423512Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1002 Path# /dev/disk1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:207:2066] recipient: [11:185:2075] 2024-11-21T17:50:37.196800Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:37.196959Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:37.197170Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:37.197261Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:37.197367Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:37.197372Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:37.197402Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:37.198201Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:37.198222Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:37.198252Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:37.198265Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:37.198276Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:37.198284Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:232:2066] recipient: [11:20:2067] 2024-11-21T17:50:37.208774Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:37.208832Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:37.219193Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:37.219253Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:37.219268Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:37.219279Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:37.219313Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:37.219322Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:37.219328Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:37.219336Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:37.229721Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:37.229779Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:37.229958Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:37.229964Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:37.229990Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:37.230151Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } } } Command { QueryBaseConfig { } } } 2024-11-21T17:50:37.230244Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1000 Path# /dev/disk3 2024-11-21T17:50:37.230250Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1001 Path# /dev/disk1 2024-11-21T17:50:37.230253Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1000 Path# /dev/disk2 2024-11-21T17:50:37.230257Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1000 Path# /dev/disk3 2024-11-21T17:50:37.230261Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1000 Path# /dev/disk2 2024-11-21T17:50:37.230265Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1001 Path# /dev/disk1 2024-11-21T17:50:37.230270Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1000 Path# /dev/disk1 2024-11-21T17:50:37.230275Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1000 Path# /dev/disk3 2024-11-21T17:50:37.230280Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1001 Path# /dev/disk1 2024-11-21T17:50:37.230284Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1001 Path# /dev/disk1 2024-11-21T17:50:37.230288Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1000 Path# /dev/disk2 2024-11-21T17:50:37.230292Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1001 Path# /dev/disk3 2024-11-21T17:50:37.230296Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1002 Path# /dev/disk2 2024-11-21T17:50:37.230300Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1000 Path# /dev/disk3 2024-11-21T17:50:37.230304Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1002 Path# /dev/disk3 2024-11-21T17:50:37.230308Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1001 Path# /dev/disk2 2024-11-21T17:50:37.230315Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1002 Path# /dev/disk2 2024-11-21T17:50:37.230320Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1000 Path# /dev/disk3 2024-11-21T17:50:37.230324Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1001 Path# /dev/disk3 2024-11-21T17:50:37.230328Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1001 Path# /dev/disk1 2024-11-21T17:50:37.230333Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1002 Path# /dev/disk1 2024-11-21T17:50:37.230337Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1002 Path# /dev/disk2 2024-11-21T17:50:37.230342Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2024-11-21T17:50:37.230347Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1002 Path# /dev/disk2 2024-11-21T17:50:37.230351Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1001 Path# /dev/disk3 2024-11-21T17:50:37.230355Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1002 Path# /dev/disk2 2024-11-21T17:50:37.230359Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1001 Path# /dev/disk3 2024-11-21T17:50:37.230363Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1002 Path# /dev/disk1 2024-11-21T17:50:37.230367Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1002 Path# /dev/disk2 2024-11-21T17:50:37.230372Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1002 Path# /dev/disk1 |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind >> TPopulatorTest::MakeDir [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess >> TWebLoginService::AuditLogLogout |81.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TS3WrapperTests::HeadObject >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] >> TS3WrapperTests::HeadUnknownObject >> TS3WrapperTests::AbortMultipartUpload |81.6%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |81.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest |81.6%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:266:2068] recipient: [1:243:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:266:2068] recipient: [1:243:2077] Leader for TabletID 72057594037932033 is [1:268:2079] sender: [1:269:2068] recipient: [1:243:2077] 2024-11-21T17:50:34.638526Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:34.639010Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:34.639304Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:34.639379Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:34.639495Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:34.639502Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:34.639532Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:34.640357Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:34.640383Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:34.640409Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:34.640420Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:34.640430Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:34.640437Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:268:2079] sender: [1:294:2068] recipient: [1:22:2069] 2024-11-21T17:50:34.650889Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:34.650934Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:34.661242Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:34.661288Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:34.661309Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:34.661318Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:34.661341Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:34.661348Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:34.661352Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:34.661358Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:34.671562Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:34.671611Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:34.671770Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:34.671775Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:34.671791Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:34.673199Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T17:50:34.673309Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1000 Path# /dev/disk 2024-11-21T17:50:34.673313Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1000 Path# /dev/disk 2024-11-21T17:50:34.673316Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1000 Path# /dev/disk 2024-11-21T17:50:34.673319Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1000 Path# /dev/disk 2024-11-21T17:50:34.673322Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1000 Path# /dev/disk 2024-11-21T17:50:34.673324Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1000 Path# /dev/disk 2024-11-21T17:50:34.673326Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1000 Path# /dev/disk 2024-11-21T17:50:34.673328Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /dev/disk 2024-11-21T17:50:34.673334Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1000 Path# /dev/disk 2024-11-21T17:50:34.673337Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /dev/disk 2024-11-21T17:50:34.673339Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1000 Path# /dev/disk 2024-11-21T17:50:34.673341Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2024-11-21T17:50:34.684932Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { Success: true } Success: true ConfigTxSeqNo: 2 Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:266:2068] recipient: [13:243:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:266:2068] recipient: [13:243:2077] Leader for TabletID 72057594037932033 is [13:268:2079] sender: [13:269:2068] recipient: [13:243:2077] 2024-11-21T17:50:37.045645Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:37.045793Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:37.045993Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:37.046066Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:37.046167Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:37.046174Z node 13 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:37.046207Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:37.047080Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:37.047127Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:37.047156Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:37.047171Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:37.047185Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:37.047193Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [13:268:2079] sender: [13:294:2068] recipient: [13:22:2069] 2024-11-21T17:50:37.057627Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:37.057675Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:37.067967Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:37.068009Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:37.068023Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:37.068032Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:37.068052Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:37.068057Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:37.068061Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:37.068087Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:37.079049Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:37.079090Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:37.079234Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:37.079239Z node 13 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:37.079256Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:37.079365Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T17:50:37.079435Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 24:1000 Path# /dev/disk 2024-11-21T17:50:37.079439Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2024-11-21T17:50:37.079442Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2024-11-21T17:50:37.079444Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2024-11-21T17:50:37.079447Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1000 Path# /dev/disk 2024-11-21T17:50:37.079449Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1000 Path# /dev/disk 2024-11-21T17:50:37.079452Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1000 Path# /dev/disk 2024-11-21T17:50:37.079455Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1000 Path# /dev/disk 2024-11-21T17:50:37.079458Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1000 Path# /dev/disk 2024-11-21T17:50:37.079460Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1000 Path# /dev/disk 2024-11-21T17:50:37.079463Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1000 Path# /dev/disk 2024-11-21T17:50:37.079466Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2024-11-21T17:50:37.090919Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2024-11-21T17:50:38.990948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:38.990975Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2024-11-21T17:50:39.011191Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 311, preserialized size# 48 2024-11-21T17:50:39.011228Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2024-11-21T17:50:39.011445Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:39.011459Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:39.011466Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:39.011646Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 217, preserialized size# 2 2024-11-21T17:50:39.011652Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2024-11-21T17:50:39.011668Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2024-11-21T17:50:39.011676Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2024-11-21T17:50:39.011684Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2024-11-21T17:50:39.011719Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:93:2120], cookie# 100 2024-11-21T17:50:39.011725Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:39.011730Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:39.011735Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:39.011756Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:94:2121], cookie# 100 2024-11-21T17:50:39.011761Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2024-11-21T17:50:39.011766Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2024-11-21T17:50:39.011772Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2024-11-21T17:50:39.011777Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2024-11-21T17:50:39.011795Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:95:2122], cookie# 100 2024-11-21T17:50:39.011841Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:93:2120], cookie# 100 2024-11-21T17:50:39.011940Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:94:2121], cookie# 100 2024-11-21T17:50:39.011945Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2024-11-21T17:50:39.011976Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:95:2122], cookie# 100 2024-11-21T17:50:39.011980Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T17:50:39.012405Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 321, preserialized size# 53 2024-11-21T17:50:39.012413Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2024-11-21T17:50:39.012429Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:39.012435Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:39.012440Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:39.012522Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 222, preserialized size# 2 2024-11-21T17:50:39.012527Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2024-11-21T17:50:39.012538Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 100 2024-11-21T17:50:39.012543Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 100 2024-11-21T17:50:39.012551Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 100 2024-11-21T17:50:39.012557Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:39.012562Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:39.012579Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:93:2120], cookie# 100 2024-11-21T17:50:39.012584Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T17:50:39.012592Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:94:2121], cookie# 100 2024-11-21T17:50:39.012596Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2024-11-21T17:50:39.012601Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 100 2024-11-21T17:50:39.012606Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 100 2024-11-21T17:50:39.012611Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 100 2024-11-21T17:50:39.012620Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2122], cookie# 100 2024-11-21T17:50:39.012652Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 100 2024-11-21T17:50:39.012677Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 100 2024-11-21T17:50:39.012681Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2024-11-21T17:50:39.012700Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 100 2024-11-21T17:50:39.012704Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 >> TS3WrapperTests::HeadObject [GOOD] >> TWebLoginService::AuditLogLogout [GOOD] |81.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest |81.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TS3WrapperTests::AbortMultipartUpload [GOOD] |81.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T17:50:39.222816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:39.222840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:39.222844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:39.222849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:39.222862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:39.222865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:39.222874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:39.222942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:39.233061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:39.233081Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:39.235557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:39.235585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:39.235612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:39.238079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:39.238123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:39.238217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.238267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:39.238895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.239148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:39.239158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.239192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:39.239198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.239204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:39.239215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.240280Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:39.256267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:39.256358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.256423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:39.256478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:39.256485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.262168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.262215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:39.262278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.262288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:39.262293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:39.262298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:39.262861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.262873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:39.262878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:39.263268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.263280Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.263287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:39.263294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:39.264003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:39.264457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:39.264513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:39.264704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.264730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:39.264740Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:39.264794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:39.264801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:39.264840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:39.264853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:39.265309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:39.265319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.265366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.265372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:39.265445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.265452Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:39.265464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:39.265468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:39.265474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:39.265479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:39.265484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:39.265488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:39.265499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:39.265505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:39.265509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:39.265828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:39.265847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:39.265852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:39.265857Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:39.265862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:39.265878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:39.266516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:39.266608Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.267160Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:39.268456Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:274:2266]) 2024-11-21T17:50:39.268513Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T17:50:39.268578Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:63982, port: 63982 2024-11-21T17:50:39.268962Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:50:39.284901Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:63982. Invalid credentials 2024-11-21T17:50:39.285271Z node 1 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2024-11-21T17:50:39.285398Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:39.286181Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2024-11-21T17:50:39.262208Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T17:50:39.285223Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:63982. Invalid credentials, login_user=user1@ldap AUDIT LOG checked line: 2024-11-21T17:50:39.285223Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:63982. Invalid credentials, login_user=user1@ldap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:14.110068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:14.110083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:14.110086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:14.110089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:14.110092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:14.110095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:14.110100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:14.110181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:14.118173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:14.118188Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:14.119819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:14.119925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:14.119954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:14.122500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:14.122603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:14.122703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.122907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:14.123764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.124031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:14.124041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.124067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:14.124073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:14.124080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:14.124122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:14.125351Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:14.140445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:14.140504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.140550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:14.140579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:14.140584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.141049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.141073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:14.141104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.141113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:14.141115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:14.141119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:14.141409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.141415Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:14.141418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:14.141622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.141627Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.141630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:14.141634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:14.142002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:14.142277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:14.142314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:14.142453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:14.142469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:14.142473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:14.142511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:14.142515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:14.142534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:14.142541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:14.142848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:14.142854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:14.142884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:14.142889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:14.142957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:14.142962Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:14.142969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:14.142972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:14.142975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:14.142978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:14.142982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:14.142985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:14.142992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:14.142996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:14.142998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:50:38.960820Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 416611830026 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:50:38.960832Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:38.960845Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 416611830026 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:50:38.960851Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:38.960857Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 416611830026 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:50:38.960867Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:38.960870Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T17:50:38.961305Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:50:38.961332Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:50:38.961721Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:38.961810Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 316 } } 2024-11-21T17:50:38.961816Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:38.961830Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 316 } } 2024-11-21T17:50:38.961841Z node 97 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 316 } } 2024-11-21T17:50:38.961955Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 416611830111 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:50:38.961959Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:38.961966Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 416611830111 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:50:38.961970Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:38.961973Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 427 RawX2: 416611830111 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:50:38.961978Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:38.961981Z node 97 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:38.961983Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:38.961987Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:38.961990Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T17:50:38.962334Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:38.962521Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:38.962566Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:38.962571Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2024-11-21T17:50:38.962578Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:50:38.962582Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2024-11-21T17:50:38.962591Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T17:50:38.962594Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2024-11-21T17:50:38.963221Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:50:38.963230Z node 97 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:50:38.963241Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:50:38.963244Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:50:38.963248Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:50:38.963252Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:50:38.963256Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:50:38.963259Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:50:38.963280Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:38.963283Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:50:38.963962Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:50:38.963972Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:50:38.964032Z node 97 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:50:38.964052Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:50:38.964056Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [97:522:2486] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:50:38.964118Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:38.964162Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 57us result status StatusSuccess 2024-11-21T17:50:38.964283Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] >> TS3WrapperTests::CopyPartUpload ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 3095, MsgBus: 2647 2024-11-21T17:50:28.643648Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791434030940229:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:28.643745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b1f/r3tmp/tmpMHk0db/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3095, node 1 2024-11-21T17:50:28.698146Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:28.698259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:28.698268Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:28.698270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:28.698303Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2647 TClient is connected to server localhost:2647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:28.744894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:28.744923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:28.745995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:28.774972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.787827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.847456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.859844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.872933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.890659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791434030941777:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:28.890680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:28.921814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.975903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.983903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.990995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.997376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:29.004840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:29.013107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791438325909568:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:29.013125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791438325909573:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:29.013133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:29.013694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:29.017881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791438325909575:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:50:29.162534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:50:29.208218Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd7xhktp3zv24svm9y90stta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NDUwMWItNmFmYjE1YmEtZGFjOWFiYTItNmQwN2MxM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.208721Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd7xhktpfepfqgqfb9nx0qkf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzgzNDNmYWQtYjdlZjg4OTYtNzE5ODU3MjctNTIyZTkyOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.208740Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd7xhktp0qfhn1pgptysrnng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTBmNjI4LWQ1MzI0MTMyLTg3NjM4NzhjLTk5ODgwNGMw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.209015Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd7xhktp8ftw6eb4s16tjmn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODQwZmZlOTQtYzVhN2U0MWYtMjVjOWY1OTctYTVkYjMyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.209113Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd7xhktp8er8tssg6h5tydjg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzEzMmJhNWYtZjNlMjc0LWE4MDdmZDNhLWU0NTU1N2Jj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.210443Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd7xhktqe0sp3cv2jy7ns0ac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM3MzljODUtZDEyMTUyOS05NjdhZGExNC02MmNjZmZkYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.210554Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd7xhktq8dsqnz0hdeyjm2n1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAxYjc0NDEtMjcwOWZhNGQtYjQ4YWFkYjAtNTY2ODIxOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.210625Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd7xhktq2h4v6fpj9abdywkr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWJhZTY2MzQtNWY2OWMwOTEtYmZlYzUyYTUtMjg1ZDcyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.210699Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd7xhktpb4gbgrfgkpzy9k30, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTk3YjYzMjItODU0MWM1LWIwM2JiMjQyLTFhZDc4MTEw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.211644Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd7xhktp3zv24svm9y90stta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NDUwMWItNmFmYjE1YmEtZGFjOWFiYTItNmQwN2MxM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.212104Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd7xhktp8er8tssg6h5tydjg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzEzMmJhNWYtZjNlMjc0LWE4MDdmZDNhLWU0NTU1N2Jj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.212193Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd7xhktp8ftw6eb4s16tjmn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODQwZmZlOTQtYzVhN2U0MWYtMjVjOWY1OTctYTVkYjMyZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.212310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jd7xhktp0qfhn1pgptysrnng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTBmNjI4LWQ1MzI0MTMyLTg3NjM4NzhjLTk5ODgwNGMw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.212381Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd7xhktpanyzcs60vcar39rk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUxZTM5ZTYtNjc0ZGNjMi02NGE3NDg1 ... WE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.739166Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721617. Ctx: { TraceId: 01jd7xhx4h8y9xt0wtevdgyrhc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDI1OTZmY2ItZjljZGFhZDItMzRkZjgzYjMtOGJhYTAwYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.740877Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721621. Ctx: { TraceId: 01jd7xhx4m0f8fmg2r1df7qhmd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njc5OTE2MGEtY2Y4NzVjNGItN2UyZDAwY2QtOTU3NzVhYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.740883Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721620. Ctx: { TraceId: 01jd7xhx4mbrey5wjzf9emyhrh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMWVmZjYtNjgwMzVlYmQtNGM0NWY0MzMtNjExOGIzN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.741210Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721618. Ctx: { TraceId: 01jd7xhx4m8wtjnmwdb18szcx4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2UyZTdhZjEtNWM4YzA3ZTQtODNjM2FhMWQtZjk5ZGEwNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.741253Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721619. Ctx: { TraceId: 01jd7xhx4m9t5at4vvpqgx40xg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTlmMDljOTgtY2Y1NWI3Y2ItODE4YmE2YjUtNDg4MWY4YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.741661Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721622. Ctx: { TraceId: 01jd7xhx4m0f8fmg2r1df7qhmd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njc5OTE2MGEtY2Y4NzVjNGItN2UyZDAwY2QtOTU3NzVhYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.742007Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721623. Ctx: { TraceId: 01jd7xhx4mbrey5wjzf9emyhrh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMWVmZjYtNjgwMzVlYmQtNGM0NWY0MzMtNjExOGIzN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.742339Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721624. Ctx: { TraceId: 01jd7xhx4m9t5at4vvpqgx40xg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTlmMDljOTgtY2Y1NWI3Y2ItODE4YmE2YjUtNDg4MWY4YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.742366Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721625. Ctx: { TraceId: 01jd7xhx4m8wtjnmwdb18szcx4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2UyZTdhZjEtNWM4YzA3ZTQtODNjM2FhMWQtZjk5ZGEwNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.742862Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721626. Ctx: { TraceId: 01jd7xhx4m9t5at4vvpqgx40xg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTlmMDljOTgtY2Y1NWI3Y2ItODE4YmE2YjUtNDg4MWY4YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.743045Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721627. Ctx: { TraceId: 01jd7xhx4m8wtjnmwdb18szcx4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2UyZTdhZjEtNWM4YzA3ZTQtODNjM2FhMWQtZjk5ZGEwNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.743849Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721628. Ctx: { TraceId: 01jd7xhx4pb4320txwnykfdmdk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDJiN2JhOTktY2FmNGQ0ZTItZjczNDZlZjQtNjFkOTFlNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.744670Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721631. Ctx: { TraceId: 01jd7xhx4pb4320txwnykfdmdk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDJiN2JhOTktY2FmNGQ0ZTItZjczNDZlZjQtNjFkOTFlNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.744878Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721629. Ctx: { TraceId: 01jd7xhx4qesf5r7xs60yymbyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTAxZmYwOGQtZmIxMjFkMzYtMTIwMzEyNDItOGM0Yjk2NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.744969Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721630. Ctx: { TraceId: 01jd7xhx4qf9r24z3xxwzphjvb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njc5OTE2MGEtY2Y4NzVjNGItN2UyZDAwY2QtOTU3NzVhYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.745204Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721632. Ctx: { TraceId: 01jd7xhx4pb4320txwnykfdmdk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDJiN2JhOTktY2FmNGQ0ZTItZjczNDZlZjQtNjFkOTFlNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.745752Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721633. Ctx: { TraceId: 01jd7xhx4qesf5r7xs60yymbyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTAxZmYwOGQtZmIxMjFkMzYtMTIwMzEyNDItOGM0Yjk2NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.745876Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721634. Ctx: { TraceId: 01jd7xhx4qf9r24z3xxwzphjvb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njc5OTE2MGEtY2Y4NzVjNGItN2UyZDAwY2QtOTU3NzVhYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.746005Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721635. Ctx: { TraceId: 01jd7xhx4qesf5r7xs60yymbyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTAxZmYwOGQtZmIxMjFkMzYtMTIwMzEyNDItOGM0Yjk2NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.746210Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721636. Ctx: { TraceId: 01jd7xhx4qf9r24z3xxwzphjvb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njc5OTE2MGEtY2Y4NzVjNGItN2UyZDAwY2QtOTU3NzVhYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.746435Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721637. Ctx: { TraceId: 01jd7xhx4qesf5r7xs60yymbyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTAxZmYwOGQtZmIxMjFkMzYtMTIwMzEyNDItOGM0Yjk2NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T17:50:38.748323Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721638. Ctx: { TraceId: 01jd7xhx4v9fkstsk9v0vbgre4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMWVmZjYtNjgwMzVlYmQtNGM0NWY0MzMtNjExOGIzN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.748514Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721639. Ctx: { TraceId: 01jd7xhx4vfjw54gn9rzxcf7sh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTlmMDljOTgtY2Y1NWI3Y2ItODE4YmE2YjUtNDg4MWY4YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.749322Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721640. Ctx: { TraceId: 01jd7xhx4vb762kg689zzafgc9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2UyZTdhZjEtNWM4YzA3ZTQtODNjM2FhMWQtZjk5ZGEwNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.749386Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721642. Ctx: { TraceId: 01jd7xhx4vfjw54gn9rzxcf7sh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTlmMDljOTgtY2Y1NWI3Y2ItODE4YmE2YjUtNDg4MWY4YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.749419Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721641. Ctx: { TraceId: 01jd7xhx4v9fkstsk9v0vbgre4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMWVmZjYtNjgwMzVlYmQtNGM0NWY0MzMtNjExOGIzN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.749740Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721644. Ctx: { TraceId: 01jd7xhx4vfjw54gn9rzxcf7sh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTlmMDljOTgtY2Y1NWI3Y2ItODE4YmE2YjUtNDg4MWY4YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.749941Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721645. Ctx: { TraceId: 01jd7xhx4v9fkstsk9v0vbgre4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmMWVmZjYtNjgwMzVlYmQtNGM0NWY0MzMtNjExOGIzN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.750013Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721643. Ctx: { TraceId: 01jd7xhx4w539ytg0vqq0wgwzh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDI1OTZmY2ItZjljZGFhZDItMzRkZjgzYjMtOGJhYTAwYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.750142Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721646. Ctx: { TraceId: 01jd7xhx4vb762kg689zzafgc9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2UyZTdhZjEtNWM4YzA3ZTQtODNjM2FhMWQtZjk5ZGEwNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.750748Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721647. Ctx: { TraceId: 01jd7xhx4w539ytg0vqq0wgwzh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDI1OTZmY2ItZjljZGFhZDItMzRkZjgzYjMtOGJhYTAwYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.750774Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721648. Ctx: { TraceId: 01jd7xhx4vb762kg689zzafgc9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2UyZTdhZjEtNWM4YzA3ZTQtODNjM2FhMWQtZjk5ZGEwNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.751197Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721649. Ctx: { TraceId: 01jd7xhx4w539ytg0vqq0wgwzh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDI1OTZmY2ItZjljZGFhZDItMzRkZjgzYjMtOGJhYTAwYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:38.751252Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721650. Ctx: { TraceId: 01jd7xhx4vb762kg689zzafgc9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2UyZTdhZjEtNWM4YzA3ZTQtODNjM2FhMWQtZjk5ZGEwNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS 2024-11-21T17:50:38.775870Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439791455127139456:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:38.775963Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2024-11-21T17:50:39.471862Z node 1 :S3_WRAPPER NOTICE: Request: uuid# B0AA34D9-605E-4487-A8EF-248EA17C4ACD, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:8065 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DBB827C9-054A-478D-BD9A-C39CEE5908EB amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2024-11-21T17:50:39.473303Z node 1 :S3_WRAPPER NOTICE: Response: uuid# B0AA34D9-605E-4487-A8EF-248EA17C4ACD, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T17:50:39.473446Z node 1 :S3_WRAPPER NOTICE: Request: uuid# B7FB27EE-F324-407C-9768-A6AC8FD1C508, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:8065 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 191EE96E-F769-4D44-A99E-3DD0F0C56D47 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2024-11-21T17:50:39.474051Z node 1 :S3_WRAPPER NOTICE: Response: uuid# B7FB27EE-F324-407C-9768-A6AC8FD1C508, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } |81.7%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |81.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T17:50:39.328681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:39.328706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:39.328711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:39.328716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:39.328729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:39.328733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:39.328741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:39.328814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:39.339041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:39.339063Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:39.341593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:39.341622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:39.341651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:39.344203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:39.344280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:39.344374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.344427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:39.345149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.345389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:39.345400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.345435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:39.345441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.345446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:39.345457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.346799Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:39.363812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:39.363897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.363962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:39.364014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:39.364021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.364798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.364835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:39.364886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.364895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:39.364900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:39.364905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:39.365338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.365351Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:39.365356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:39.365696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.365704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.365710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:39.365717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:39.366330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:39.366714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:39.366763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:39.366943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.366968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:39.366978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:39.367032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:39.367040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:39.367068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:39.367080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:39.367492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:39.367500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.367541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.367547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:39.367614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.367622Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:39.367634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:39.367638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:39.367644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:39.367649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:39.367653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:39.367657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:39.367669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:39.367674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:39.367679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:39.367984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:39.367999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:39.368004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:39.368009Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:39.368014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:39.368027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:39.368881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:39.368981Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:39.369120Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:39.370294Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:272:2264]) 2024-11-21T17:50:39.370831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:39.374471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:39.374501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:39.374504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:39.374516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:39.374524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:50:39.374528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:39.374531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:39.374535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2024-11-21T17:50:39.374538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T17:50:39.374708Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:39.375733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:39.375774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2024-11-21T17:50:39.375811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:39.375816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.375852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.375857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:50:39.376056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:39.376070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:39.376075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:39.376083Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T17:50:39.376088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:39.376104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:50:39.376154Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:39.376540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T17:50:39.376620Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T17:50:39.376891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T17:50:39.376896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T17:50:39.431405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzMyMjU0NjM5LCJpYXQiOjE3MzIyMTE0MzksInN1YiI6InVzZXIxIn0.Sv4D8CD3lK1TEV-VJ9KTW_f6-JBGOBLWFapw9uud9fwitD2LHTxT0swa6L3Xd0T5uxCggQkb4q15Ev4Q0w4HUNDJCqtYjZPHHkvuhUg4k7hPdm2cpwkj0MQCaTzIMABkKfDnbnuwXpV8VuIS1-WoqfYaYPw31JWgkp5cWJh_CNy_sEeoyIFLehBhXy3W_ZFa1JT-j9o7El85lt3RMIztd7jFeo_HbLVywgznnTsZpiFAul-QZSqnSDZoDe_PSHy08PMchcVgj9DmjxITltZ4MAWMpN7hyvtIkbczclwDODtWPs9pWg7ztNm6m4tkAMu6jlhzHXBy6FVDrWdHqzepIw", at schemeshard: 72057594046678944 2024-11-21T17:50:39.431477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:39.431484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.431548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.431553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T17:50:39.431928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2024-11-21T17:50:39.432065Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:39.432091Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 30us result status StatusSuccess 2024-11-21T17:50:39.432183Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2gfSq0BIF0FAc8VrViHb\ndBQQpq9A7YalP7EvnxOn2USQESukJK35dtWaxdHs04jsOQDyT3Ce/mvDGTaZx0mJ\n4NU5MPLpCQqUSoHoOJRMUlYv8SKdxynY9/LcrZ5fhVsXl8cL6ys7FvSoBVSZ4xz4\nh6lPxyuxvvacbPNjxaR5t3PVzk4BZhNX6AeIZ628NttR34j9ef2v0xE3zpSgMqHt\ngpjhVXWiCfonAD7oiSjbrivLmAfb1ifj5xSzUdf8Tt4ghPVthSBcEQeBGGTWQrVq\n8apEpCAA/6+bRhNC38afpRjTJCFO9k9Ek+Q/kfsgmOVeLUa5k6FeWeJ2or+/XuV/\npwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1732297839427 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:39.432273Z node 1 :HTTP WARN: 127.0.0.1:0 POST /logout 2024-11-21T17:50:39.432281Z node 1 :HTTP ERROR: Logout: No ydb_session_id cookie 2024-11-21T17:50:39.432315Z node 1 :HTTP WARN: 127.0.0.1:0 POST /logout 2024-11-21T17:50:39.432657Z node 1 :TICKET_PARSER ERROR: Ticket **** (589A015B): Token is not in correct format 2024-11-21T17:50:39.432667Z node 1 :HTTP ERROR: Logout: Token is not in correct format 2024-11-21T17:50:39.432709Z node 1 :HTTP WARN: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2024-11-21T17:50:39.364828Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T17:50:39.375768Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user=user1 2024-11-21T17:50:39.431763Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1 2024-11-21T17:50:39.432881Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJh****epIw (58B1B10C), operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2024-11-21T17:50:39.432881Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJh****epIw (58B1B10C), operation=LOGOUT, status=SUCCESS ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T17:50:39.242627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:39.242649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:39.242653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:39.242657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:39.242669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:39.242673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:39.242680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:39.242746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:39.260957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:39.260978Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:39.267538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:39.267566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:39.267592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:39.273111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:39.273168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:39.273274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.273333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:39.274098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.274347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:39.274359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.274396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:39.274403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.274409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:39.274423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.275702Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:39.296387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:39.296494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.296586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:39.296652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:39.296663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.297687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.297735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:39.297800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.297813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:39.297817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:39.297822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:39.298325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.298336Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:39.298340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:39.298740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.298753Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.298759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:39.298767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:39.299358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:39.299846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:39.299909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:39.300113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.300142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:39.300153Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:39.300219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:39.300245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:39.300285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:39.300299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:39.300809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:39.300820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.300869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.300875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:39.300957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.300964Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:39.300978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:39.300983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:39.300989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:39.300995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:39.301000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:39.301004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:39.301016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:39.301022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:39.301026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:39.301339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:39.301359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:39.301364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:39.301369Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:39.301374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:39.301392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:39.302152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:39.302266Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.302785Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:39.303656Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:274:2266]) 2024-11-21T17:50:39.303701Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T17:50:39.303771Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:7070, port: 7070 2024-11-21T17:50:39.304168Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:50:39.315508Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=user1, attributes: 1.1 2024-11-21T17:50:39.356375Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: uid=user1,dc=search,dc=yandex,dc=net 2024-11-21T17:50:39.356821Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:39.357702Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:39.357867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T17:50:39.357874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T17:50:39.574501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzMyMjU0NjM5LCJleHRlcm5hbF9hdXRoZW50aWNhdGlvbiI6ImxkYXAiLCJpYXQiOjE3MzIyMTE0MzksInN1YiI6InVzZXIxIn0.jL648woD-hpnytMZe1K1klYPENxKWx98B3gG6el8EYIElaLRlcembfxmUkEyjvG9Fi5kQB4KaNkp0Yly9vkWPcXU6S0aGCGZl6lX1_qa5phWNzjDzDjnToncRvHWVElmJ1MxDqCqNpDeOLObqOrQTmcAE7qlg2kxUZB5V6b6oyY01LEzwT4Sxpd2aXREHOmQJ4_J8_8ZWAiFWgt9WJyPPHztGxVVrznQuN4OD46Q1TQ0hCmKeU3YG5oc-54z__JhM9KepSIVtdjYtT_tJtwTCPQ0daaQCG9MkPlPF-9EKspRaXoOttB9M5g_OUYHwSsQKxS1s3rRJUy2MfUiBMEbmQ", at schemeshard: 72057594046678944 2024-11-21T17:50:39.574715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:39.574725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.574774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.574779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T17:50:39.575015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(2): 2024-11-21T17:50:39.297728Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T17:50:39.574667Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1@ldap AUDIT LOG checked line: 2024-11-21T17:50:39.574667Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1@ldap ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2024-11-21T17:50:39.596309Z node 1 :S3_WRAPPER NOTICE: Request: uuid# A8E30EBA-5864-43F9-A65B-9657CA5DD4C3, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:19248 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2B217F21-C945-43F0-BAA9-21963C9EA704 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2024-11-21T17:50:39.597587Z node 1 :S3_WRAPPER NOTICE: Response: uuid# A8E30EBA-5864-43F9-A65B-9657CA5DD4C3, response# No response body. >> TS3WrapperTests::GetUnknownObject >> TS3WrapperTests::CopyPartUpload [GOOD] >> TS3WrapperTests::AbortUnknownUpload ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2024-11-21T17:50:39.624177Z node 1 :S3_WRAPPER NOTICE: Request: uuid# EE310B87-3A74-4D9E-9695-64769254D8F1, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:19801 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A64CB00A-9930-48B8-801A-B0A43E6DEFC7 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2024-11-21T17:50:39.625781Z node 1 :S3_WRAPPER NOTICE: Response: uuid# EE310B87-3A74-4D9E-9695-64769254D8F1, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2024-11-21T17:50:39.625908Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 7D30F354-F15E-405F-B670-09591DFFF93C, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:19801 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 51A4B5D8-2078-480E-A7EA-2330F3D290D0 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2024-11-21T17:50:39.626500Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 7D30F354-F15E-405F-B670-09591DFFF93C, response# AbortMultipartUploadResult { } 2024-11-21T17:50:39.626571Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 4F9ABFFB-D30E-47B4-8BDF-CBCADE3BBD0D, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:19801 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A155C600-D830-4D4B-B777-2B70F2A88EB0 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2024-11-21T17:50:39.627160Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 4F9ABFFB-D30E-47B4-8BDF-CBCADE3BBD0D, response# No response body. >> TS3WrapperTests::GetUnknownObject [GOOD] >> TS3WrapperTests::AbortUnknownUpload [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:39.214393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:39.214417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:39.214422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:39.214427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:39.214441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:39.214445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:39.214453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:39.214528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:39.224977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:39.224997Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:39.227571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:39.228082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:39.228104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:39.230671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:39.231081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:39.231214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.231306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:39.232585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.232920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:39.232933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.232977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:39.232985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.232991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:39.233010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.234465Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:39.255249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:39.255356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.255435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:39.255507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:39.255515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.256532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.256566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:39.256624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.256635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:39.256640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:39.256646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:39.257130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.257144Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:39.257150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:39.257524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.257535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.257542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:39.257550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:39.258186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:39.258609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:39.258668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:39.258863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.258892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:39.258902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:39.258965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:39.258974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:39.259005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:39.259019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:39.259468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:39.259478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.259525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.259532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:39.259615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.259624Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:39.259636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:39.259640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:39.259646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:39.259651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:39.259656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:39.259660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:39.259672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:39.259678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:39.259683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:39.260013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:39.260032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:39.260037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:39.260042Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:39.260048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:39.260066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ags: 2 } ExecLevel: 0 TxId: 175 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:39.641403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:39.641416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:39.641420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T17:50:39.641424Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2024-11-21T17:50:39.641430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:39.641543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:39.641553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:39.641556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T17:50:39.641560Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2024-11-21T17:50:39.641563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2024-11-21T17:50:39.641576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2024-11-21T17:50:39.642212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2024-11-21T17:50:39.642246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2024-11-21T17:50:39.642419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:39.642441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:39.642449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2024-11-21T17:50:39.642455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.642459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2024-11-21T17:50:39.642486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 130 2024-11-21T17:50:39.642517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:39.642525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2024-11-21T17:50:39.642808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T17:50:39.642892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2024-11-21T17:50:39.643609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:39.643619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:39.643652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2024-11-21T17:50:39.643676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:39.643681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2024-11-21T17:50:39.643686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2024-11-21T17:50:39.643698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T17:50:39.643704Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2024-11-21T17:50:39.643713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2024-11-21T17:50:39.643720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T17:50:39.643725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2024-11-21T17:50:39.643729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T17:50:39.643733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2024-11-21T17:50:39.643737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2024-11-21T17:50:39.643750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2024-11-21T17:50:39.643756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2024-11-21T17:50:39.643760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 103 2024-11-21T17:50:39.643764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 26], 18446744073709551615 2024-11-21T17:50:39.643949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:39.643961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:39.643966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2024-11-21T17:50:39.643971Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2024-11-21T17:50:39.643975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:39.644137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:39.644150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T17:50:39.644154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2024-11-21T17:50:39.644159Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2024-11-21T17:50:39.644163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2024-11-21T17:50:39.644178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2024-11-21T17:50:39.644432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:39.644443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T17:50:39.644467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2024-11-21T17:50:39.644531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:39.644539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T17:50:39.644552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:39.645094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T17:50:39.645696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T17:50:39.645731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:50:39.645748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2024-11-21T17:50:39.646036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2024-11-21T17:50:39.646047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2024-11-21T17:50:39.646248Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2024-11-21T17:50:39.646285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2024-11-21T17:50:39.646290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2463:4455] TestWaitNotification: OK eventTxId 175 |81.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2024-11-21T17:50:40.007920Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 02DC5C99-3CBD-45A0-9B48-73F42656CAEB, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:10392 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F74917EB-BD0C-4DA0-B5B2-96AD6E1A1B9C amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2024-11-21T17:50:40.010564Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 02DC5C99-3CBD-45A0-9B48-73F42656CAEB, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T17:50:40.010675Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 8EA5D757-3BA1-4BEB-95A0-DECDC6038752, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:10392 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4288A239-93D1-466C-9CF5-838A1CD04E5D amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2024-11-21T17:50:40.011480Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 8EA5D757-3BA1-4BEB-95A0-DECDC6038752, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2024-11-21T17:50:40.011584Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 9DF07565-38BF-4F4A-90E2-4103CEA39580, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:10392 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C8B7E02B-ABDE-401F-80C8-0096F57D4A65 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2024-11-21T17:50:40.012110Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 9DF07565-38BF-4F4A-90E2-4103CEA39580, response# UploadPartCopyResult { } 2024-11-21T17:50:40.012191Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 27BBE22B-A4E7-442D-9469-1ABB6C9DCFE6, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:10392 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E36252A2-4454-4BD3-8052-08153D255D95 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2024-11-21T17:50:40.012915Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 27BBE22B-A4E7-442D-9469-1ABB6C9DCFE6, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2024-11-21T17:50:40.012990Z node 1 :S3_WRAPPER NOTICE: Request: uuid# A2E2C2B5-1741-42FC-A4A8-0EB836155AF0, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:10392 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 69D3884B-262E-4984-BB0D-CCAE01036D6E amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2024-11-21T17:50:40.013513Z node 1 :S3_WRAPPER NOTICE: Response: uuid# A2E2C2B5-1741-42FC-A4A8-0EB836155AF0, response# GetObjectResult { } |81.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2024-11-21T17:50:40.183267Z node 1 :S3_WRAPPER NOTICE: Request: uuid# B1ABC389-2655-4015-9A97-0A58BFEF555D, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:1700 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 61B9574B-DB6B-4649-9B9A-9C9471C68C4D amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2024-11-21T17:50:40.184698Z node 1 :S3_WRAPPER NOTICE: Response: uuid# B1ABC389-2655-4015-9A97-0A58BFEF555D, response# No response body. ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2024-11-21T17:50:40.216584Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 40A44F37-78B2-4768-9727-45456C703E46, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:26629 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FE339ADC-C5CA-4094-8D6F-EAB1B1DC1D5B amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2024-11-21T17:50:40.220971Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 40A44F37-78B2-4768-9727-45456C703E46, response# >> TS3WrapperTests::CompleteUnknownUpload >> TS3WrapperTests::PutObject [GOOD] >> TS3WrapperTests::CompleteUnknownUpload [GOOD] |81.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest |81.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism >> TWebLoginService::AuditLogLdapLoginBadUser >> TWebLoginService::AuditLogLoginBadPassword >> TS3WrapperTests::GetObject |81.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLoginSuccess >> TS3WrapperTests::MultipartUpload [GOOD] >> TBoardSubscriberTest::NotAvailableByShutdown >> TBoardSubscriberTest::SimpleSubscriber ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2024-11-21T17:50:40.711516Z node 1 :S3_WRAPPER NOTICE: Request: uuid# F5C450B6-C648-4DD4-805A-59A12BE9D82D, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:25806 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4FE4269C-272F-405C-A987-B68DD1D68D03 amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2024-11-21T17:50:40.712890Z node 1 :S3_WRAPPER NOTICE: Response: uuid# F5C450B6-C648-4DD4-805A-59A12BE9D82D, response# ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2024-11-21T17:50:40.728055Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 6093C5DE-CA2D-451C-9B5F-A6976EA90E10, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:12393 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DDF6CD04-6017-4A5A-8A40-367EB6FDF4E0 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2024-11-21T17:50:40.729383Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 6093C5DE-CA2D-451C-9B5F-A6976EA90E10, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } |81.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TS3WrapperTests::GetObject [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> TSchemeShardLoginTest::BasicLogin |81.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TS3WrapperTests::UploadUnknownPart >> TBoardSubscriberTest::DropByDisconnect >> TWebLoginService::AuditLogLoginBadPassword [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2024-11-21T17:50:41.068128Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 373A966D-802F-4697-B246-02FF52B33A17, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:2317 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 02FD18A2-E0A2-4D26-A2BD-B305ABDA65C0 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2024-11-21T17:50:41.069615Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 373A966D-802F-4697-B246-02FF52B33A17, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2024-11-21T17:50:41.069744Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 515049F6-68CE-4B2A-A04B-1DF76863D50A, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:2317 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 730B6196-5BDF-4863-BE31-D62C39F82EE9 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2024-11-21T17:50:41.070361Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 515049F6-68CE-4B2A-A04B-1DF76863D50A, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T17:50:41.070440Z node 1 :S3_WRAPPER NOTICE: Request: uuid# C31DA804-17F5-4C45-BF5D-AB694A4FFD00, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:2317 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6F2B036D-728A-44F5-B6BC-EABB42716D93 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2024-11-21T17:50:41.071326Z node 1 :S3_WRAPPER NOTICE: Response: uuid# C31DA804-17F5-4C45-BF5D-AB694A4FFD00, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T17:50:41.071413Z node 1 :S3_WRAPPER NOTICE: Request: uuid# E5D5FC0F-455E-4E08-B822-B8DEF789D308, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:2317 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 52DCF8E5-1375-41FE-A61A-22B01CF012CA amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2024-11-21T17:50:41.071908Z node 1 :S3_WRAPPER NOTICE: Response: uuid# E5D5FC0F-455E-4E08-B822-B8DEF789D308, response# GetObjectResult { } |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TS3WrapperTests::UploadUnknownPart [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:41.060091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:41.060114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:41.060118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:41.060121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:41.060135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:41.060138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:41.060145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:41.060212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:41.070270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:41.070291Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:41.073333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:41.074112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:41.074146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:41.075796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:41.075978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:41.076076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.076146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:41.077410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.077621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.077629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.077656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:41.077661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.077665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:41.077676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.078971Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:41.096809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:41.096894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.096967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:41.097027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:41.097037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.099513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.099559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:41.099626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.099641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:41.099646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:41.099652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:41.104694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.104741Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:41.104750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:41.105358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.105369Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.105375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.105382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.106085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:41.106540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:41.106596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:41.106773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.106799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:41.106809Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.106870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:41.106878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.106917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.106931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:41.107366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.107374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.107416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.107421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:41.107501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.107508Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:41.107520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:41.107524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.107531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:41.107535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.107540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:41.107544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:41.107556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:41.107561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:41.107565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:41.107903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:41.107917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:41.107922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:41.107927Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:41.107932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.107948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:41.108840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:41.108966Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:41.109095Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:41.110412Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:41.111110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:41.111180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Login authentication is disabled, at schemeshard: 72057594046678944 2024-11-21T17:50:41.111389Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:41.112096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Login authentication is disabled" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:41.112133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Login authentication is disabled, operation: CREATE USER, path: /MyRoot 2024-11-21T17:50:41.112314Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T17:50:41.112378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T17:50:41.112383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T17:50:41.135364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Error: "Login authentication is disabled", at schemeshard: 72057594046678944 2024-11-21T17:50:41.135402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.135409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.135458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.135463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T17:50:41.135594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 2024-11-21T17:50:41.135659Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:41.135683Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 29us result status StatusSuccess 2024-11-21T17:50:41.135811Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3jkz8FIiKfIlaJBrBlkQ\nArCLl6HxWoo0xVs8HXlyQFrNMkdPW2gh5+bwUaDK7Yw7Fa9aZbC4srYSPQiphCxN\nLdF50jaOIzFqWNWyMb7ooUDxrHhLIivzVktbgj7qRiXc3pwkwxPUjZWGCVn93fN/\n/98fLBJBzKZy8Sp3Vqxfjdymj+173NcKlu6n497SH/+sTSXa4hnNp9/ZLB6P4gcO\nxnaJ1AtcMKFba0e8RG/DH1c+iovIAkB7mrHAU1yqB5HfazXi5UKxDKwUjuCe2tax\nqGe6o6KUzO+vX2YTyg8yDAec4+IbLouvUCMysyKGk72wFNVb1bDnnISOqf3omBYV\nPwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1732297841134 } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2024-11-21T17:50:41.136188Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 96858480-6A74-4295-AB49-2275A72ACC1D, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:19174 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 74308B25-93DB-47A3-8314-184E6345C5AF amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2024-11-21T17:50:41.137588Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 96858480-6A74-4295-AB49-2275A72ACC1D, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T17:50:41.137734Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 57C10C7F-F8D8-401F-B4B8-20B1890897ED, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:19174 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 571C073F-243B-4EF1-828C-BEA74D521A3E amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2024-11-21T17:50:41.138388Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 57C10C7F-F8D8-401F-B4B8-20B1890897ED, response# GetObjectResult { } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T17:50:41.088861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:41.088887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:41.088892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:41.088898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:41.088911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:41.088915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:41.088923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:41.089008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:41.099672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:41.099696Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:41.102351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:41.102385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:41.102418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:41.105458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:41.105515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:41.105613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.105665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:41.106422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.106685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.106695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.106730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:41.106737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.106743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:41.106755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.108026Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:41.130082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:41.130176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.130244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:41.130302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:41.130310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.131083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.131121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:41.131184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.131194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:41.131198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:41.131202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:41.131727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.131742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:41.131747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:41.132357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.132369Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.132374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.132381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.132919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:41.133301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:41.133352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:41.133534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.133553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:41.133560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.133611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:41.133618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.133654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.133663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:41.134092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.134103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.134145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.134150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:41.134221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.134229Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:41.134239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:41.134243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.134248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:41.134252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.134257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:41.134260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:41.134272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:41.134278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:41.134281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:41.134586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:41.134603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:41.134607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:41.134611Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:41.134616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.134631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:41.135270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:41.135378Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.135908Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:41.137066Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:274:2266]) 2024-11-21T17:50:41.137120Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T17:50:41.137185Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29495, port: 29495 2024-11-21T17:50:41.137559Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T17:50:41.163209Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=bad_user, attributes: 1.1 2024-11-21T17:50:41.163341Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user bad_user does not exist. LDAP search for filter uid=bad_user on server ldap://localhost:29495 return no entries 2024-11-21T17:50:41.163577Z node 1 :HTTP ERROR: Login fail for bad_user@ldap: Could not login via LDAP 2024-11-21T17:50:41.163694Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:41.164278Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2024-11-21T17:50:41.131115Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T17:50:41.163538Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP user bad_user does not exist. LDAP search for filter uid=bad_user on server ldap://localhost:29495 return no entries, login_user=bad_user@ldap AUDIT LOG checked line: 2024-11-21T17:50:41.163538Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP user bad_user does not exist. LDAP search for filter uid=bad_user on server ldap://localhost:29495 return no entries, login_user=bad_user@ldap >> TBoardSubscriberTest::ReconnectReplica >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TSchemeShardLoginTest::BasicLogin [GOOD] >> TBoardSubscriberTest::SimpleSubscriber [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2024-11-21T17:50:41.555769Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 5415011D-98A3-4FCC-BBCF-0B5227892F4F, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:29962 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 003BB379-F5EA-435D-B52B-C542DA4A39DC amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2024-11-21T17:50:41.557452Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 5415011D-98A3-4FCC-BBCF-0B5227892F4F, response# ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLoginBadPassword [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T17:50:41.115518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:41.115542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:41.115547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:41.115551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:41.115566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:41.115570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:41.115579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:41.115655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:41.125736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:41.125759Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:41.129412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:41.129450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:41.129482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:41.139213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:41.139304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:41.139421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.139490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:41.140572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.140925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.140940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.140984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:41.140992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.140999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:41.141017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.142458Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:41.159027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:41.159119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.159208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:41.159262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:41.159270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.160154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.160195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:41.160268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.160280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:41.160285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:41.160290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:41.160712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.160723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:41.160727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:41.161031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.161041Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.161046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.161053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.161568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:41.161960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:41.162021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:41.162210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.162233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:41.162242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.162298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:41.162304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.162338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.162349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:41.162709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.162716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.162758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.162764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:41.162835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.162841Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:41.162852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:41.162857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.162862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:41.162867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.162871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:41.162875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:41.162884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:41.162890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:41.162893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:41.163180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:41.163193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:41.163198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:41.163202Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:41.163207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.163220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:41.163930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:41.164016Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:41.164112Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:41.165222Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:41.165770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:41.169017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:41.169070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:41.169076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:41.169094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:41.169105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:50:41.169110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:41.169114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:41.169120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2024-11-21T17:50:41.169124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T17:50:41.169344Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:41.170373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:41.170421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2024-11-21T17:50:41.170486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.170492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.170530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.170534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:50:41.170644Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:41.170667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:41.170676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:41.170683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:41.170688Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T17:50:41.170694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.170710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:50:41.171074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T17:50:41.171185Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T17:50:41.171565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T17:50:41.171573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T17:50:41.301921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2024-11-21T17:50:41.302015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.302024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.302090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.302097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T17:50:41.302469Z node 1 :HTTP ERROR: Login fail for user1: Invalid password 2024-11-21T17:50:41.302584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(3): 2024-11-21T17:50:41.160188Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T17:50:41.170414Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user=user1 2024-11-21T17:50:41.302403Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Invalid password, login_user=user1 AUDIT LOG checked line: 2024-11-21T17:50:41.302403Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Invalid password, login_user=user1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLoginSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T17:50:41.127972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:41.127998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:41.128003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:41.128007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:41.128021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:41.128025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:41.128033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:41.128109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:41.138107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:41.138124Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:41.142644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:41.142673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:41.142703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:41.145660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:41.145718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:41.145819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.145870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:41.146988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.147293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.147308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.147347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:41.147355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.147361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:41.147377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.148970Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:41.166549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:41.166657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.166724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:41.166774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:41.166781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.167731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.167769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:41.167821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.167829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:41.167833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:41.167837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:41.168348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.168362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:41.168366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:41.169812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.169825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.169830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.169837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.170362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:41.170743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:41.170788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:41.170952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.170977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:41.170986Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.171045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:41.171051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.171077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.171088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:41.171495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.171503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.171541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.171546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:41.171609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.171615Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:41.171626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:41.171630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.171635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:41.171640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.171644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:41.171647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:41.171657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:41.171662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:41.171665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:41.171951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:41.171965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:41.171970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:41.171974Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:41.171979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.171993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:41.174191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:41.174296Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:41.174413Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:41.175711Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:41.176397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:41.179782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:41.179827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:41.179833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:41.179848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:41.179859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:50:41.179865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:41.179870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:41.179875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2024-11-21T17:50:41.179879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T17:50:41.180101Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:41.183350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:41.183403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2024-11-21T17:50:41.183487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.183496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.183537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.183543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:50:41.183670Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:41.183718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:41.183731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:41.183740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:41.183745Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T17:50:41.183751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.183774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:50:41.184426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T17:50:41.184553Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T17:50:41.184957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T17:50:41.184968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T17:50:41.374404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzMyMjU0NjQxLCJpYXQiOjE3MzIyMTE0NDEsInN1YiI6InVzZXIxIn0.AmayrxlUICMUiqFXjHcJ1Z_HikKzyJu_bvGsKg7S_19jUgI-Lw5zxZ8tg34qoZTvBYKgd2k4i4h2w4Dr5spD5p3ap8e8ZN6X-zmwF4z7oI93D0rt-QA6fYA2YymvspxGaK1bnb-2YdgKm2jaFM-z41qBLftB6SkHr7AlOn8PyfRfeNvU0rLelEsTowG-mDCx0KwaQO9sY33hffBDL1tGb-DVNiha_yRYdxRcM7jT_NQE5utVrMKQAoJg_yAoLu9HqFPRX85O4E9uiqLY2q9bhhLwDw_4iQkGvPwy4hdjSbBwhMfhTxDpAyWvh0tgVQy5IakAjOChVw5LiTsT3sEH1g", at schemeshard: 72057594046678944 2024-11-21T17:50:41.374601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.374613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.374697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.374703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T17:50:41.375417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(3): 2024-11-21T17:50:41.167762Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T17:50:41.183394Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user=user1 2024-11-21T17:50:41.375034Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1 AUDIT LOG checked line: 2024-11-21T17:50:41.375034Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1 |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest |81.8%| [TA] $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |81.8%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::BasicLogin [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:41.430601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:41.430627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:41.430632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:41.430636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:41.430648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:41.430651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:41.430658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:41.430735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:41.442708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:41.442731Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:41.446351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:41.447362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:41.447399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:41.449503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:41.449700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:41.449794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.449866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:41.450714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.450981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.450993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.451031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:41.451038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.451044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:41.451059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.452330Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:41.468647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:41.468740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.468815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:41.468873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:41.468881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.471921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.471963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:41.472022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.472033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:41.472038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:41.472044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:41.476570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.476592Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:41.476597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:41.479362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.479375Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.479381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.479390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.479842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:41.481385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:41.481436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:41.481572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:41.481599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:41.481604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.481663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:41.481670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:41.481696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.481706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:41.482090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.482095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.482129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.482133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:41.482194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:41.482199Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:41.482209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:41.482214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.482217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:41.482220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:41.482223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:41.482226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:41.482234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:41.482239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:41.482243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:41.482485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:41.482494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:41.482497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:41.482500Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:41.482503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.482511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:41.483168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:41.483247Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:41.483338Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:41.484224Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:41.484788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:41.487836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:41.487886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:41.487893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:41.487909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:41.487920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:50:41.487926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:41.487930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:41.487936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2024-11-21T17:50:41.487943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T17:50:41.488164Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:41.489599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:41.489634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2024-11-21T17:50:41.489694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.489698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.489731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.489736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:50:41.489897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:41.489909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:41.489914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:41.489918Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T17:50:41.489924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:41.489947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:50:41.490042Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:41.493394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T17:50:41.493488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T17:50:41.493493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T17:50:41.581524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzMyMjU0NjQxLCJpYXQiOjE3MzIyMTE0NDEsInN1YiI6InVzZXIxIn0.I65y27gIRHLMjxVgUS4vEzz_uQURl2jZDxjdJUs4kD2Pp5n2Vxomc9TBMsk2SyZQPY0C5riJeFDu5J2TujDZuEUVufsrd6T3-sD0FVWOjSWQlrpIaiFnC33TTSHKR1Vzz2xA-I99-qV6fDUrhEVjbpr5ipV6_Gbw_coP1FFwQ59Wrn5aaQO45S0ttOWAI5QH29Zn-VkdCpdprYqIKl99vuEOWUnYCTyR3Eto5eSWbjRaXTytpWgPJVfEIhOZvlSu_Job88Qc8T9R5u7zMP2RdVDaLPxPuyYt1MdejqXM0Rw1haTXd-WhTBrN4mFzvzvDookatMoLrR2f70NETXLqag", at schemeshard: 72057594046678944 2024-11-21T17:50:41.581593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:41.581598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:41.581642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:41.581646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T17:50:41.581735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2024-11-21T17:50:41.581793Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:41.581809Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2024-11-21T17:50:41.581907Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2+8nCsnTlrUbN++gDJG2\no7bGQTkUp8oty0gtidGVtEORzCsIOPRnLBcSIT9WtKB79NQuxJub/3GLuz7Sz/xq\nxdSGlbjfMO7TIfpKSMTn4J3yFxttu4d4ODYA0Mqu+XdWIjuKzD8H8JNr4iYfqn0l\nXrm7iXBos6ETg/aUnr2+kL8NqRoPFdO0hyNg5hQloi2hl7CkfcPrMVcA12ms5JYM\neFo+mi8qmbej6ORsLeFuWSQSvFekYO7l/Iim3nngXITj55va8HCGoAQwcISaX8PB\na+Keit9v2h9IYhY2jzWdRkcBYYlSdiuHCpsp4AYSedfWeWTlJrgfgcRsau2eDqr4\nYwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1732297841577 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL >> TSchemeShardColumnTableTTL::CreateColumnTable >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable >> TSchemeShardTTLTests::BuildIndexShouldSucceed >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase >> TSchemeShardTTLTestsWithReboots::CreateTable >> TSchemeShardTTLTests::ConditionalErase >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |81.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |81.8%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] |81.8%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |81.8%| [TA] {RESULT} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |81.9%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::LockedWriteBulkUpsertConflict >> DataShardSnapshots::VolatileSnapshotMerge >> DataShardSnapshots::MvccSnapshotAndSplit >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:42.928756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:42.928781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:42.928787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:42.928793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:42.928798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:42.928803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:42.928812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:42.928887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:42.940548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:42.940566Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:42.943456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:42.944157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:42.944189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:42.945939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:42.946177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:42.946301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.946376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:42.947387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.947634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:42.947644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.947681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:42.947688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:42.947694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:42.947706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.949172Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:42.965626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:42.965700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.965762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:42.965802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:42.965810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.966563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.966590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:42.966648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.966659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:42.966663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:42.966668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:42.967097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.967109Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:42.967114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:42.967520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.967531Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.967536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.967542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.968094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:42.968542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:42.968596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:42.968766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.968791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:42.968798Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.968853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:42.968860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.968891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:42.968902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:42.969414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:42.969426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:42.969475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.969480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:42.969584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.969592Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:42.969604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:42.969608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.969613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:42.969619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.969623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:42.969628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:42.969640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:42.969646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:42.969650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:42.969920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:42.969933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:42.969937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:42.969942Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:42.969946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:42.969959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... UG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:50:43.057028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2024-11-21T17:50:43.057120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.057132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:43.057137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:50:43.057180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T17:50:43.057197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:50:43.057673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:43.057680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:43.057724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.057727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:50:43.057733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.057737Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:50:43.057847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:43.057857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:43.057859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:43.057863Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T17:50:43.057866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:50:43.057873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T17:50:43.058328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:43.069014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 175 } } 2024-11-21T17:50:43.069029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:43.069046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 175 } } 2024-11-21T17:50:43.069066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 175 } } FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:50:43.069195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:43.069200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:43.069210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:43.069214Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:43.069229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:43.069241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.069245Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.069249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:43.069256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:50:43.069767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.069831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.069876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.069883Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:50:43.069895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:50:43.069899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:43.069905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:50:43.069916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 102 2024-11-21T17:50:43.069921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:43.069926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:50:43.069930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:50:43.069950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:43.070295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:43.070302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:451:2415] TestWaitNotification: OK eventTxId 102 2024-11-21T17:50:43.070386Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:43.070433Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 51us result status StatusSuccess 2024-11-21T17:50:43.070530Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:12.669632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:12.669657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.669662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:12.669667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:12.669673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:12.669677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:12.669686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:12.669759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:12.680754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:12.680773Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:12.683804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:12.684384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:12.684420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:12.688395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:12.688595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:12.688714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.688792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:12.689790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.690065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.690080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.690122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:12.690130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.690136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:12.690149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.691616Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:12.707634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:12.707698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.707754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:12.707788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:12.707793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.709330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.709355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:12.709395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.709405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:12.709408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:12.709412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:12.709821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.709836Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:12.709839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:12.711658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.711673Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.711680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.711687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.712312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:12.713355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:12.713410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:12.713585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:12.713611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:12.713617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.713670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:12.713677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:12.713706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.713716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:12.714472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:12.714481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:12.714524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:12.714529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:12.714606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:12.714612Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:12.714622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:12.714626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.714631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:12.714635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:12.714640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:12.714643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:12.714654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:12.714658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:12.714662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:12.714926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.714955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:12.714959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:12.714964Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:12.714969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:12.714981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... _TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2024-11-21T17:50:34.906681Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2024-11-21T17:50:34.906688Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2024-11-21T17:50:34.906723Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T17:50:34.906728Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2024-11-21T17:50:34.906731Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2024-11-21T17:50:34.916914Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:50:37.357153Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0011 2024-11-21T17:50:37.388187Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0009 2024-11-21T17:50:37.429166Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T17:50:37.429247Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2024-11-21T17:50:37.429269Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2024-11-21T17:50:37.429279Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2024-11-21T17:50:37.429330Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T17:50:37.429337Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2024-11-21T17:50:37.429342Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2024-11-21T17:50:37.439500Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:50:39.862193Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0011 2024-11-21T17:50:39.883100Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0009 2024-11-21T17:50:39.906833Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T17:50:39.906927Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2024-11-21T17:50:39.906953Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2024-11-21T17:50:39.906965Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2024-11-21T17:50:39.907003Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T17:50:39.907012Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2024-11-21T17:50:39.907017Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2024-11-21T17:50:39.917232Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:50:42.285730Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [3:560:2521], attempt# 1 2024-11-21T17:50:42.288758Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvReset: self# [3:559:2520] 2024-11-21T17:50:42.290018Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [3:560:2521], sender# [3:559:2520] 2024-11-21T17:50:42.290039Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [3:559:2520] 2024-11-21T17:50:42.290063Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [3:560:2521], sender# [3:559:2520], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } 2024-11-21T17:50:42.290126Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [3:560:2521], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [6e3e0a41fdab8add833862f1bd2954c3,1d8dd09e584ce6a47582a31b591900e2,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:26960 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E5E77E44-9FB9-44F1-B394-E993B64B3B1A amz-sdk-request: attempt=1 content-length: 459 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2024-11-21T17:50:42.291163Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [3:560:2521], result# 2024-11-21T17:50:42.291218Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [3:559:2520], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T17:50:42.294104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 438 RawX2: 12884904297 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:50:42.294127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:42.294152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 438 RawX2: 12884904297 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:50:42.294165Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 438 RawX2: 12884904297 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T17:50:42.294180Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.294184Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.294189Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:42.294197Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T17:50:42.294243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:42.294842Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.294925Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.294934Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2024-11-21T17:50:42.294946Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2024-11-21T17:50:42.294950Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T17:50:42.294957Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2024-11-21T17:50:42.294970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2024-11-21T17:50:42.294989Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T17:50:42.294995Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T17:50:42.295000Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2024-11-21T17:50:42.295024Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:42.295693Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T17:50:42.295710Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2024-11-21T17:50:42.296125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:42.296135Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:579:2537] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:42.470043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:42.470075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:42.470080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:42.470086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:42.470092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:42.470096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:42.470105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:42.470219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:42.481737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:42.481768Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:42.485567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:42.486441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:42.486490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:42.488134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:42.488341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:42.488474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.488552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:42.489564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.489861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:42.489874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.489915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:42.489924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:42.489930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:42.489947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.491375Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:42.511275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:42.511368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.511439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:42.511486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:42.511496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.512400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.512432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:42.512500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.512512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:42.512517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:42.512523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:42.515680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.515707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:42.515716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:42.520000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.520030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.520038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.520050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.520789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:42.524748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:42.524839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:42.525082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.525135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:42.525148Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.525231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:42.525242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.525281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:42.525296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:42.525988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:42.525998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:42.526041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.526045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:42.526119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.526126Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:42.526138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:42.526142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.526147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:42.526151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.526154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:42.526156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:42.526168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:42.526173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:42.526175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:42.526527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:42.526546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:42.526550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:42.526556Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:42.526561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:42.526578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... dSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:42.603444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:50:42.603473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T17:50:42.603685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.603704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:42.603711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:50:42.603784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T17:50:42.603806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:50:42.604999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:42.605011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:42.605080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.605085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:50:42.605182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.605190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:50:42.605422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:42.605435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:42.605440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:42.605446Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T17:50:42.605451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:42.605467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T17:50:42.605539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 214 } } 2024-11-21T17:50:42.605545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:42.605560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 214 } } 2024-11-21T17:50:42.605572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 214 } } 2024-11-21T17:50:42.605679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:42.605684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:42.605695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:42.605700Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:42.605707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:42.605718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.605722Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.605729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:42.605735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:50:42.606594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:42.606705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.606724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.606776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.606783Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:50:42.606796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:50:42.606801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:42.606807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:50:42.606823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T17:50:42.606830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:42.606835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:50:42.606839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:50:42.606860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:42.607273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:42.607286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:386:2360] TestWaitNotification: OK eventTxId 102 2024-11-21T17:50:42.607409Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:42.607463Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 63us result status StatusSuccess 2024-11-21T17:50:42.607597Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:42.829895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:42.829921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:42.829925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:42.829929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:42.829933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:42.829936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:42.829942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:42.830024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:42.839337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:42.839361Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:42.843763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:42.844508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:42.844548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:42.846636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:42.846910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:42.847043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.847130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:42.848382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.848678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:42.848691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.848732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:42.848739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:42.848746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:42.848761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.850444Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:42.868768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:42.868847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.868912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:42.868953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:42.868960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.872030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.872083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:42.872148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.872161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:42.872166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:42.872171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:42.872959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.872978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:42.872983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:42.873472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.873483Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.873489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.873496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.874126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:42.874636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:42.874692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:42.874862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.874890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:42.874896Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.874955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:42.874963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.874996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:42.875008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:42.875545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:42.875554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:42.875600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.875605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:42.875686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.875692Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:42.875703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:42.875711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.875717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:42.875722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.875726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:42.875730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:42.875743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:42.875749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:42.875753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:42.876039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:42.876052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:42.876056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:42.876061Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:42.876066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:42.876079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... D DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2024-11-21T17:50:42.939601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.939627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:42.939636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2024-11-21T17:50:42.939666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2024-11-21T17:50:42.939692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:42.939705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2024-11-21T17:50:42.941367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:42.941382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:42.941425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:42.941471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.941477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:50:42.941483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:50:42.941498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.941506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:50:42.941859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:42.941874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:42.941880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:42.941886Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:50:42.941892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:50:42.942065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:42.942072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:42.942074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:42.942076Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:50:42.942079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:42.942086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T17:50:42.942128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 417 } } 2024-11-21T17:50:42.942134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:42.942150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 417 } } 2024-11-21T17:50:42.942164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 417 } } 2024-11-21T17:50:42.942329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:42.942337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:42.942352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:42.942357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:42.942364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:42.942376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.942380Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.942384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:42.942388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T17:50:42.943258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:42.943301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:42.943345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.943591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.943663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.943672Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:50:42.943683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:42.943688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:42.943695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T17:50:42.943709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 101 2024-11-21T17:50:42.943720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:42.943726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:42.943731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:50:42.943751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:42.944178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:42.944191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:343:2318] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T17:50:42.945019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:42.945071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.945148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2024-11-21T17:50:42.945677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:42.945714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:42.956297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:42.956324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:42.956330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:42.956335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:42.956341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:42.956345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:42.956354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:42.956455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:42.967690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:42.967713Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:42.972329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:42.973167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:42.973201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:42.974576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:42.974793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:42.974898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.974972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:42.976171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.976462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:42.976477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.976513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:42.976522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:42.976530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:42.976543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.977805Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:42.995555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:42.995628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.995686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:42.995726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:42.995734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.996444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.996480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:42.996525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.996534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:42.996538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:42.996543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:42.996964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.996979Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:42.996983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:42.997331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.997341Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.997347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.997352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.997853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:42.998255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:42.998305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:42.998456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.998479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:42.998486Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.998537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:42.998543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.998570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:42.998581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:42.999013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:42.999022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:42.999065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.999069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:42.999162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.999169Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:42.999180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:42.999187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.999192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:42.999197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.999201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:42.999204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:42.999216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:42.999222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:42.999226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:42.999488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:42.999504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:42.999509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:42.999514Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:42.999518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:42.999531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:50:43.181073Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:381:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:50:43.181354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2024-11-21T17:50:43.181392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:50:43.181430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T17:50:43.181436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2024-11-21T17:50:43.181441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T17:50:43.181509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.181533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:43.181541Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T17:50:43.181547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T17:50:43.181994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.182005Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T17:50:43.182016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T17:50:43.182019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T17:50:43.182023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T17:50:43.182033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:121:2147] message: TxId: 281474976710760 2024-11-21T17:50:43.182037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T17:50:43.182040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T17:50:43.182043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T17:50:43.182054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:43.182485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T17:50:43.182506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T17:50:43.182517Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T17:50:43.182529Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:381:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:50:43.182961Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:50:43.182981Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:381:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:50:43.182989Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T17:50:43.183405Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:50:43.183424Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:381:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:50:43.183428Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T17:50:43.183448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:43.183453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:468:2432] TestWaitNotification: OK eventTxId 102 2024-11-21T17:50:43.183573Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:43.183632Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 70us result status StatusSuccess 2024-11-21T17:50:43.183758Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:43.208359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:43.208384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:43.208390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:43.208395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:43.208401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:43.208405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:43.208413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:43.208499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:43.218766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:43.218789Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:43.221543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:43.222336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:43.222371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:43.223751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:43.223938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:43.224044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.224113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:43.225139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.225451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:43.225467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.225509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:43.225518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:43.225525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:43.225543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.227498Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:43.241651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:43.241706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.241744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:43.241773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:43.241777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.242406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.242432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:43.242483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.242492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:43.242496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:43.242501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:43.242987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.243000Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:43.243005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:43.243439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.243451Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.243457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.243463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.244003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:43.244433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:43.244485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:43.244645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.244669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:43.244680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.244731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:43.244737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.244763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:43.244774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:43.245196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:43.245207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:43.245246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.245251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:43.245312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.245319Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:43.245330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:43.245334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.245340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:43.245344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.245349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:43.245352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:43.245365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:43.245369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:43.245373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:43.245657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:43.245669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:43.245673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:43.245677Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:43.245680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:43.245691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:43.246263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:43.246360Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:43.246663Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:43.247879Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:43.248433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:43.248524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.248660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2024-11-21T17:50:43.248830Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:43.249474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:43.249503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2024-11-21T17:50:43.249628Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T17:50:43.250230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:43.250280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.250337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2024-11-21T17:50:43.250782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:43.250805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 |81.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:43.364608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:43.364630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:43.364634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:43.364638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:43.364643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:43.364645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:43.364652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:43.364735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:43.374990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:43.375018Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:43.378316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:43.378942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:43.378977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:43.380449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:43.380702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:43.380819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.380900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:43.381813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.382071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:43.382079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.382109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:43.382114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:43.382119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:43.382131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.383653Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:43.397937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:43.398013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.398070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:43.398105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:43.398111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.398826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.398848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:43.398905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.398916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:43.398921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:43.398926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:43.399319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.399328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:43.399331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:43.399606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.399614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.399619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.399624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.400050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:43.400437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:43.400482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:43.400633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.400653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:43.400662Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.400707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:43.400712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.400740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:43.400754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:43.401271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:43.401284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:43.401332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.401338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:43.401420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.401427Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:43.401440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:43.401444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.401450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:43.401455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.401460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:43.401464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:43.401476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:43.401482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:43.401487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:43.401812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:43.401827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:43.401833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:43.401839Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:43.401844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:43.401862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... atedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:43.479516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 212 } } 2024-11-21T17:50:43.479525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 212 } } 2024-11-21T17:50:43.479720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:43.479729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2024-11-21T17:50:43.479744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:43.479749Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:43.479758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:43.479767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.479770Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T17:50:43.479773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:43.479777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2024-11-21T17:50:43.479846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:43.479851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T17:50:43.479859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:43.479863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:43.479869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:50:43.479874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.479876Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.479880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:43.479883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T17:50:43.481003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:43.481032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:43.481041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:43.482286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:43.482340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T17:50:43.482405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.482460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T17:50:43.482542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.482598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T17:50:43.482607Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2024-11-21T17:50:43.482622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2024-11-21T17:50:43.482627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2024-11-21T17:50:43.482633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2024-11-21T17:50:43.482701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.482706Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:50:43.482711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2024-11-21T17:50:43.482715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T17:50:43.482719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2024-11-21T17:50:43.482739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 101 2024-11-21T17:50:43.482744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T17:50:43.482750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:43.482755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:50:43.482834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:43.482841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2024-11-21T17:50:43.482844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2024-11-21T17:50:43.482849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:50:43.482853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2024-11-21T17:50:43.482856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2024-11-21T17:50:43.482863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:43.483560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:43.483571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:383:2348] TestWaitNotification: OK eventTxId 101 2024-11-21T17:50:43.483698Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:43.483752Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 65us result status StatusSuccess 2024-11-21T17:50:43.483891Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.9%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |81.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 9701, MsgBus: 18667 2024-11-21T17:50:27.698005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791430243273748:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:27.698213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b32/r3tmp/tmpK4GhrM/pdisk_1.dat 2024-11-21T17:50:27.733022Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9701, node 1 2024-11-21T17:50:27.742763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:27.742773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:27.742774Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:27.742799Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18667 TClient is connected to server localhost:18667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:27.798765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:27.798788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:27.799895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:27.820129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:27.824792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:27.883725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:27.902769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:27.914163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:27.942809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791430243275302:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:27.942848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:27.963636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:27.969422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:27.975643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:27.983159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:27.989781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:27.996878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.005588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791434538243090:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:28.005621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:28.005628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791434538243095:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:28.006239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:28.010496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791434538243097:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:50:28.150396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.205279Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd7xhjvb71z1vj0tm1h2hpjh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzY0NzEzNTctYTJiOGM1Y2MtYTMzZmU2MDYtODA1MjNkOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.205291Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd7xhjvb30pq7knfrerj8r0t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNmNjAyZDktMjAyNTk5N2ItYTg1NTc5MjktZjM1ZTE4Mjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.205447Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd7xhjvbd46vsayzx2qat47n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZThkMTEzNjYtZmI0OWRiNS1hMWY4OTNkZC0zMDZkYzUwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.206382Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd7xhjvb9m8t9b3qqd22csb5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWEzMzM0OWUtMzZkYzMxY2EtZmJlNWNhZWUtOTBmODExN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.206481Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd7xhjvb6zmzj5jtg7k08pmq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWJmOTE3ZTAtNWViODYyNTYtOWQwZDU0YjAtYmJmMmNiMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.208054Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd7xhjvb5zjqk7gmkre8013m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA2OTdiY2YtNzI0NzMyYzItYjQzMmNkOWQtMmZjNDBiNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.208137Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd7xhjvbf5024abhkmr5dnsf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQ3YTczYzQtOTYzMTZkNzQtMjE3YmM3YTItMzEwZGFhN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.208194Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd7xhjvbbhnvbjg4cqs1vy52, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWFjNmFhNDAtNWRiMjg2MzItY2NlMDBhYjktZWE4MmIxYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.208280Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd7xhjvb863a1dbyw5mp7t9d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZiNWY2ZWYtMjBkOTUxMDktNmZlZTJlYzUtNWRmOWZkY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.208363Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd7xhjvb6rz4anmrzqkdm72s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThjYjU2ODgtOGExYjBmYjItMWQ3MzQzODctZTE2NGVmN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.209767Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd7xhjvbd46vsayzx2qat47n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZThkMTEzNjYtZmI0OWRiNS1hMWY4OTNkZC0zMDZkYzUwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.210189Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd7xhjvb71z1vj0tm1h2hpjh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzY0NzEzNTctYTJiOGM1Y2MtYTMzZmU2MDYtODA1MjNkOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.210878Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jd7xhjvb6zmzj5jtg7k08pmq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWJmOTE3ZTAtNWViODYyNTYtOWQwZDU0YjAtYmJmMmNiMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.211124Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd7xhjvb30pq7knfrerj8r0t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNmNjAyZ ... sion/3?node_id=3&id=OTJhN2I2YTQtODg0ZTg5MGYtMjg5MjUxYzktZWYzOTQ1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.272895Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721475. Ctx: { TraceId: 01jd7xj1j77prhmxs3m8ct18d6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmNiYTg4MTEtMzg3Y2VmODUtZGZmYjQxM2MtZTk5MzcxNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.273888Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721477. Ctx: { TraceId: 01jd7xj1j7dw2mc4zg70qxatvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTJhN2I2YTQtODg0ZTg5MGYtMjg5MjUxYzktZWYzOTQ1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.273964Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721479. Ctx: { TraceId: 01jd7xj1j77prhmxs3m8ct18d6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmNiYTg4MTEtMzg3Y2VmODUtZGZmYjQxM2MtZTk5MzcxNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.274092Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721478. Ctx: { TraceId: 01jd7xj1j9d717zmgvtbwwbbth, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjJmM2YxNTYtMTEzMDY3MWItYzk4ZjMxY2QtNGJlZDExNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.274377Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721480. Ctx: { TraceId: 01jd7xj1j7dw2mc4zg70qxatvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTJhN2I2YTQtODg0ZTg5MGYtMjg5MjUxYzktZWYzOTQ1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.274395Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721481. Ctx: { TraceId: 01jd7xj1j77prhmxs3m8ct18d6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmNiYTg4MTEtMzg3Y2VmODUtZGZmYjQxM2MtZTk5MzcxNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.274808Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721482. Ctx: { TraceId: 01jd7xj1j9d717zmgvtbwwbbth, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjJmM2YxNTYtMTEzMDY3MWItYzk4ZjMxY2QtNGJlZDExNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.274886Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721483. Ctx: { TraceId: 01jd7xj1j7dw2mc4zg70qxatvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTJhN2I2YTQtODg0ZTg5MGYtMjg5MjUxYzktZWYzOTQ1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.275380Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721484. Ctx: { TraceId: 01jd7xj1ja4753e9zcw92d3263, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGIxNDA3MTEtNmMzNzcyMGQtNDYwYWJiYmQtMzA5YzU1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.275691Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721485. Ctx: { TraceId: 01jd7xj1jb3pwdas2na04btrjj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzZlOGRlOTctNDE5ODYzYmUtYjMxMjljMmUtOGRlMTAzODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.276042Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721486. Ctx: { TraceId: 01jd7xj1ja4753e9zcw92d3263, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGIxNDA3MTEtNmMzNzcyMGQtNDYwYWJiYmQtMzA5YzU1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.276079Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721487. Ctx: { TraceId: 01jd7xj1jb3pwdas2na04btrjj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzZlOGRlOTctNDE5ODYzYmUtYjMxMjljMmUtOGRlMTAzODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.277499Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721488. Ctx: { TraceId: 01jd7xj1jc2wpbqv1w78ccz7zf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjY1MzAyNTMtNWI1ODliOGYtNDQzYWJjNTItM2UxNTMzODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.278087Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721489. Ctx: { TraceId: 01jd7xj1jc0nxzptjbpwtnfzts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmNiYTg4MTEtMzg3Y2VmODUtZGZmYjQxM2MtZTk5MzcxNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.278103Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721490. Ctx: { TraceId: 01jd7xj1jdc0fjxwxv2bx74w6a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjJmM2YxNTYtMTEzMDY3MWItYzk4ZjMxY2QtNGJlZDExNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.278380Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721492. Ctx: { TraceId: 01jd7xj1jc2wpbqv1w78ccz7zf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjY1MzAyNTMtNWI1ODliOGYtNDQzYWJjNTItM2UxNTMzODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.278825Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721493. Ctx: { TraceId: 01jd7xj1jc0nxzptjbpwtnfzts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmNiYTg4MTEtMzg3Y2VmODUtZGZmYjQxM2MtZTk5MzcxNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.279042Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721494. Ctx: { TraceId: 01jd7xj1jdc0fjxwxv2bx74w6a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjJmM2YxNTYtMTEzMDY3MWItYzk4ZjMxY2QtNGJlZDExNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.279083Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721491. Ctx: { TraceId: 01jd7xj1jd9saydsbzv5j9zs46, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTJhN2I2YTQtODg0ZTg5MGYtMjg5MjUxYzktZWYzOTQ1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.279188Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721495. Ctx: { TraceId: 01jd7xj1jc0nxzptjbpwtnfzts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmNiYTg4MTEtMzg3Y2VmODUtZGZmYjQxM2MtZTk5MzcxNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.279573Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721496. Ctx: { TraceId: 01jd7xj1jef2y66k4zr0efqavb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzZlOGRlOTctNDE5ODYzYmUtYjMxMjljMmUtOGRlMTAzODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.279966Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721497. Ctx: { TraceId: 01jd7xj1jdc0fjxwxv2bx74w6a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjJmM2YxNTYtMTEzMDY3MWItYzk4ZjMxY2QtNGJlZDExNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.280195Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721498. Ctx: { TraceId: 01jd7xj1jd9saydsbzv5j9zs46, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTJhN2I2YTQtODg0ZTg5MGYtMjg5MjUxYzktZWYzOTQ1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.280551Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721499. Ctx: { TraceId: 01jd7xj1jef2y66k4zr0efqavb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzZlOGRlOTctNDE5ODYzYmUtYjMxMjljMmUtOGRlMTAzODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.280564Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721500. Ctx: { TraceId: 01jd7xj1jd9saydsbzv5j9zs46, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTJhN2I2YTQtODg0ZTg5MGYtMjg5MjUxYzktZWYzOTQ1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.281863Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721501. Ctx: { TraceId: 01jd7xj1jg2m9tp0ns4vnsrn78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGIxNDA3MTEtNmMzNzcyMGQtNDYwYWJiYmQtMzA5YzU1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2024-11-21T17:50:43.282447Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721502. Ctx: { TraceId: 01jd7xj1jg2m9tp0ns4vnsrn78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGIxNDA3MTEtNmMzNzcyMGQtNDYwYWJiYmQtMzA5YzU1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.282670Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721504. Ctx: { TraceId: 01jd7xj1jg2m9tp0ns4vnsrn78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGIxNDA3MTEtNmMzNzcyMGQtNDYwYWJiYmQtMzA5YzU1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.282940Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721506. Ctx: { TraceId: 01jd7xj1jg2m9tp0ns4vnsrn78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGIxNDA3MTEtNmMzNzcyMGQtNDYwYWJiYmQtMzA5YzU1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.283130Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721505. Ctx: { TraceId: 01jd7xj1jje2d68v0z0dznq8nz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjJmM2YxNTYtMTEzMDY3MWItYzk4ZjMxY2QtNGJlZDExNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.283132Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721503. Ctx: { TraceId: 01jd7xj1jhcgaz7gn84jyzkwm8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjY1MzAyNTMtNWI1ODliOGYtNDQzYWJjNTItM2UxNTMzODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.283810Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721507. Ctx: { TraceId: 01jd7xj1jje2d68v0z0dznq8nz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjJmM2YxNTYtMTEzMDY3MWItYzk4ZjMxY2QtNGJlZDExNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.283819Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721508. Ctx: { TraceId: 01jd7xj1jhcgaz7gn84jyzkwm8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjY1MzAyNTMtNWI1ODliOGYtNDQzYWJjNTItM2UxNTMzODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.284011Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721509. Ctx: { TraceId: 01jd7xj1jje2d68v0z0dznq8nz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjJmM2YxNTYtMTEzMDY3MWItYzk4ZjMxY2QtNGJlZDExNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:43.284062Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721510. Ctx: { TraceId: 01jd7xj1jhcgaz7gn84jyzkwm8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjY1MzAyNTMtNWI1ODliOGYtNDQzYWJjNTItM2UxNTMzODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |81.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |81.9%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbIndexTable::OnlineBuild [GOOD] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> DataShardSnapshots::LockedWriteReuseAfterCommit >> YdbIndexTable::OnlineBuildWithDataColumn >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicReaderTests::TestRun_ReadOneMessage >> TSchemeShardUserAttrsTest::VariousUse >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TSchemeShardUserAttrsTest::MkDir >> DataShardSnapshots::LockedWriteBulkUpsertConflict [GOOD] |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |81.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> TSchemeShardUserAttrsTest::Boot >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> TSchemeShardUserAttrsTest::VariousUse [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> DataShardSnapshots::LockedWriteDistributedCommitAborted >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] >> TSchemeShardUserAttrsTest::MkDir [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] >> TSchemeShardUserAttrsTest::Boot [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotSplit |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |81.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate >> DataShardSnapshots::LockedWriteReuseAfterCommit [GOOD] >> TSchemeShardTTLTests::ConditionalErase [GOOD] >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict >> DataShardSnapshots::MvccSnapshotLockedWrites >> DataShardSnapshots::LockedWriteDistributedCommitSuccess >> TSchemeShardUserAttrsTest::SetAttrs >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot |82.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:44.590236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:44.590264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:44.590269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:44.590274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:44.590289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:44.590292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:44.590302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:44.590375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:44.599198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:44.599218Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:44.602280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:44.603105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:44.603141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:44.604650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:44.604837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:44.604944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.605019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:44.606349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.606543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:44.606552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.606576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:44.606581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:44.606585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:44.606593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.607548Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:44.619825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:44.619887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.619938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:44.619977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:44.619983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.620805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.620846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:44.620901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.620914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:44.620917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:44.620921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:44.621584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.621597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:44.621601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:44.622017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.622027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.622033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:44.622039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:44.622493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:44.622873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:44.622917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:44.623089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.623111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:44.623119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:44.623170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:44.623174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:44.623198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:44.623206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:44.623534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:44.623539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:44.623571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.623576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:44.623669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.623676Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:44.623686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:44.623691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:44.623695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:44.623699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:44.623702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:44.623704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:44.623712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:44.623716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:44.623719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:44.623952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:44.623961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:44.623964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:44.623967Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:44.623970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:44.623979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:44.624514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:44.624584Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:44.624751Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:44.625584Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:44.626019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:44.626056Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2024-11-21T17:50:44.626065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2024-11-21T17:50:44.626068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2024-11-21T17:50:44.626178Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:44.626726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:44.626751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2024-11-21T17:50:44.626827Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T17:50:44.626856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:50:44.626860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:50:44.626914Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:50:44.626929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:44.626933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:281:2273] TestWaitNotification: OK eventTxId 101 2024-11-21T17:50:44.626988Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:44.627011Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 28us result status StatusPathDoesNotExist 2024-11-21T17:50:44.627050Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:45.112447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:45.112473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:45.112478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:45.112482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:45.112488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:45.112491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:45.112500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:45.112583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:45.122980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:45.123005Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:45.125899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:45.126659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:45.126692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:45.128173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:45.128412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:45.128511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:45.128587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:45.129596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:45.129859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:45.129871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:45.129923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:45.129931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:45.129937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:45.129954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.131235Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:45.148207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:45.148335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.148397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:45.148447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:45.148456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.149281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:45.149313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:45.149359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.149369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:45.149374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:45.149379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:45.149813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.149826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:45.149831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:45.150186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.150198Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.150204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:45.150212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:45.150818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:45.151257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:45.151314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:45.151494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:45.151520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:45.151527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:45.151579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:45.151587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:45.151622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:45.151635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:45.152032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:45.152040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:45.152084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:45.152091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:45.152178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.152186Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:45.152197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:45.152202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:45.152208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:45.152214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:45.152219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:45.152223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:45.152256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:45.152262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:45.152266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:45.152577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:45.152593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:45.152599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:45.152604Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:45.152609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:45.152623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:45.153265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:45.153373Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:43.220993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:43.221017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:43.221022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:43.221026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:43.221032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:43.221036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:43.221044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:43.221133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:43.231603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:43.231622Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:43.234256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:43.235045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:43.235075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:43.236532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:43.236794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:43.236897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.236970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:43.237986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.238235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:43.238245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.238281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:43.238287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:43.238294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:43.238307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.239588Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:43.254528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:43.254586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.254637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:43.254668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:43.254672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.255343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.255369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:43.255406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.255413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:43.255415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:43.255418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:43.255728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.255735Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:43.255738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:43.255993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.255999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.256004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.256010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.256396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:43.256812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:43.256865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:43.257034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.257059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:43.257068Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.257121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:43.257128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.257155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:43.257167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:43.257622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:43.257630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:43.257671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.257676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:43.257745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.257751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:43.257761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:43.257765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.257770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:43.257775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.257780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:43.257784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:43.257794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:43.257799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:43.257803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:43.258070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:43.258088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:43.258092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:43.258097Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:43.258101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:43.258116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Size 619 rowCount 2 cpuUsage 0 2024-11-21T17:50:45.760436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2024-11-21T17:50:45.760468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.760517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T17:50:45.760554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409546, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640229500 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T17:50:45.760567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409550, request: TableId: 6 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640229500 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T17:50:45.760575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 5 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640229500 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T17:50:45.760585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640229500 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T17:50:45.760594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 3 Expiration { ColumnId: 2 WallClockTimestamp: 1600463040229500 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T17:50:45.760602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409551, request: TableId: 7 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640229500 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T17:50:45.760813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:50:45.760977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2024-11-21T17:50:45.761051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:45.761112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:45.761122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T17:50:45.761154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:50:45.761169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.761176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:50:45.761529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.761539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:50:45.762609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.762620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T17:50:45.762764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.762770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:45.762795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T17:50:45.762818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T17:50:45.762849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.762856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.229500Z, at schemeshard: 72057594046678944 2024-11-21T17:50:45.762863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.762867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.229500Z, at schemeshard: 72057594046678944 2024-11-21T17:50:45.762880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.762883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:50:45.763254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T17:50:45.763269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.763274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.231500Z, at schemeshard: 72057594046678944 2024-11-21T17:50:45.763286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T17:50:45.763291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T17:50:45.763294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T17:50:45.763302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T17:50:45.763306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T17:50:45.763315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.763322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.231500Z, at schemeshard: 72057594046678944 2024-11-21T17:50:45.763326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T17:50:45.763332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.763336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.231500Z, at schemeshard: 72057594046678944 2024-11-21T17:50:45.763339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T17:50:45.825301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2024-11-21T17:50:45.825380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 5 shard idx 72057594046678944:4 data size 43 row count 1 2024-11-21T17:50:45.825399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], pathId map=TTLEnabledTable4, is column=0, is olap=0 2024-11-21T17:50:45.825408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 5: RowCount 1, DataSize 43 2024-11-21T17:50:45.825446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 603 row count 2 2024-11-21T17:50:45.825452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTable3, is column=0, is olap=0 2024-11-21T17:50:45.825455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 2, DataSize 603 2024-11-21T17:50:45.825466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T17:50:45.825470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable1, is column=0, is olap=0 2024-11-21T17:50:45.825473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T17:50:45.825482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T17:50:45.825486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=TTLEnabledTable2, is column=0, is olap=0 2024-11-21T17:50:45.825488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 3: RowCount 0, DataSize 0 2024-11-21T17:50:45.825496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 6 shard idx 72057594046678944:5 data size 619 row count 2 2024-11-21T17:50:45.825499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], pathId map=TTLEnabledTable5, is column=0, is olap=0 2024-11-21T17:50:45.825503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409550 followerId=0, pathId 6: RowCount 2, DataSize 619, with borrowed parts 2024-11-21T17:50:45.836326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.836352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2024-11-21T17:50:45.836789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T17:50:45.836807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:45.836813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.233500Z, at schemeshard: 72057594046678944 2024-11-21T17:50:45.836821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:44.997339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:44.997363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:44.997367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:44.997370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:44.997375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:44.997378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:44.997383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:44.997448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:45.006485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:45.006509Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:45.008968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:45.009600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:45.009638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:45.010996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:45.011191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:45.011262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:45.011313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:45.012311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:45.012538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:45.012546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:45.012584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:45.012589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:45.012593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:45.012604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.013658Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:45.027596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:45.027702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.027770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:45.027817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:45.027825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.028671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:45.028702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:45.028755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.028765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:45.028770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:45.028775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:45.029325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.029344Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:45.029351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:45.029741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.029751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.029757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:45.029764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:45.030380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:45.030817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:45.030876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:45.031106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:45.031136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:45.031143Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:45.031216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:45.031224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:45.031255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:45.031267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:45.031671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:45.031678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:45.031735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:45.031742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:45.031829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.031836Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:45.031849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:45.031853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:45.031859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:45.031864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:45.031868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:45.031872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:45.031884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:45.031890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:45.031894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:45.032262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:45.032278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:45.032282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:45.032287Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:45.032292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:45.032304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Subscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2024-11-21T17:50:45.061434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:50:45.061437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:50:45.061516Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:50:45.061540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:50:45.061545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:371:2363] 2024-11-21T17:50:45.061571Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:50:45.061588Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:45.061594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:45.061597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:371:2363] 2024-11-21T17:50:45.061609Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:50:45.061615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:45.061618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:371:2363] 2024-11-21T17:50:45.061630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:45.061633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:371:2363] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2024-11-21T17:50:45.061698Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:45.061719Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 30us result status StatusSuccess 2024-11-21T17:50:45.061808Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:45.061866Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:45.061879Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 15us result status StatusSuccess 2024-11-21T17:50:45.061918Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:45.061968Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:45.061979Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 12us result status StatusSuccess 2024-11-21T17:50:45.062009Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:45.062046Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:45.062056Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 11us result status StatusSuccess 2024-11-21T17:50:45.062090Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:45.062131Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:45.062144Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 15us result status StatusSuccess 2024-11-21T17:50:45.062172Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:03.032257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:03.032277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.032280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:03.032283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:03.032286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:03.032289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:03.032302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.032376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:03.043365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:03.043384Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.045421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:03.045520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:03.045544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:03.047603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:03.047664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:03.047755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.047927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.048584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.048835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.048845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.048853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:03.048858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.048861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:03.048887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:03.049722Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.064594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.064656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.064700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:03.064736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:03.064742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.065330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.065353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:03.065385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.065393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:03.065398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:03.065402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:03.065844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.065856Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:03.065860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:03.066211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.066222Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.066227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.066232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.066820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:03.067248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:03.067294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:03.067460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.067483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.067490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.067535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:03.067542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.067569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.067578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.067958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.067969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.067996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.068000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:03.068053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.068059Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:03.068068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:03.068071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.068076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:03.068080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.068084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:03.068087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:03.068097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.068101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:03.068105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... date queue, at schemeshard: 72057594046678944 2024-11-21T17:50:45.081555Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:50:45.081563Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:50:45.081730Z node 146 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:45.081741Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:45.081744Z node 146 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:50:45.081748Z node 146 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T17:50:45.081751Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:45.081764Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:50:45.081908Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T17:50:45.081914Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:50:45.081919Z node 146 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T17:50:45.081935Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T17:50:45.081955Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2024-11-21T17:50:45.082009Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:45.082024Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 627065227369 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:45.082033Z node 146 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2024-11-21T17:50:45.082052Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T17:50:45.082061Z node 146 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T17:50:45.082064Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:50:45.082073Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:45.082080Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:45.082085Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2024-11-21T17:50:45.082091Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:50:45.082095Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T17:50:45.082098Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T17:50:45.082105Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:45.082110Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2024-11-21T17:50:45.082114Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:50:45.082116Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:50:45.082718Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:50:45.082731Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:50:45.082890Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:45.082905Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:50:45.082911Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:50:45.082951Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:50:45.083020Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:45.083065Z node 146 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:45.083070Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:45.083098Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:45.083115Z node 146 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:45.083120Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [146:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2024-11-21T17:50:45.083122Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [146:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T17:50:45.083263Z node 146 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:45.083274Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:45.083277Z node 146 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:50:45.083281Z node 146 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:50:45.083284Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:45.083347Z node 146 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:45.083354Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:45.083357Z node 146 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:50:45.083360Z node 146 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:50:45.083363Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:45.083371Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2024-11-21T17:50:45.083376Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [146:122:2148] 2024-11-21T17:50:45.083419Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:45.083423Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:50:45.083431Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:45.083841Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:45.084160Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:45.084194Z node 146 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T17:50:45.084205Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2024-11-21T17:50:45.084284Z node 146 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T17:50:45.084697Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:50:45.084706Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:50:45.084770Z node 146 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:50:45.084785Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:50:45.084789Z node 146 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [146:863:2797] TestWaitNotification: OK eventTxId 1003 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:44.593075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:44.593098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:44.593101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:44.593105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:44.593117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:44.593120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:44.593127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:44.593185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:44.600627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:44.600648Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:44.603074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:44.603588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:44.603610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:44.604676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:44.604828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:44.604913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.604967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:44.605696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.605909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:44.605915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.605945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:44.605950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:44.605954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:44.605962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.606842Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:44.617191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:44.617261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.617338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:44.617384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:44.617389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.618029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.618061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:44.618100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.618108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:44.618111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:44.618115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:44.618391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.618399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:44.618401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:44.618598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.618603Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.618607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:44.618612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:44.618966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:44.619238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:44.619280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:44.619408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.619423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:44.619430Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:44.619465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:44.619469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:44.619491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:44.619500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:44.619745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:44.619749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:44.619779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.619783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:44.619851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.619856Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:44.619864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:44.619866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:44.619870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:44.619873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:44.619876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:44.619878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:44.619884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:44.619888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:44.619891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:44.620076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:44.620085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:44.620088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:44.620091Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:44.620094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:44.620103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 95Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:50:44.863600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:44.863605Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:44.863608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:50:44.863619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:50:44.863623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:44.863628Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2024-11-21T17:50:44.863632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:50:44.863636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T17:50:44.863639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T17:50:44.863790Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:44.863801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:44.863806Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:44.863810Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:50:44.863814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:44.863943Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:44.863956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:44.863960Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:44.863964Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:50:44.863967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:50:44.864224Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:44.864258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:44.864263Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:44.864267Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T17:50:44.864271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:44.864283Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:50:44.864714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:44.864957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:44.865315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T17:50:44.865370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:50:44.865377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:50:44.865444Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:50:44.865461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:44.865466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:331:2323] TestWaitNotification: OK eventTxId 101 2024-11-21T17:50:44.865530Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:44.865556Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 34us result status StatusSuccess 2024-11-21T17:50:44.865622Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2024-11-21T17:50:44.866206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:44.866241Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2024-11-21T17:50:44.866253Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2024-11-21T17:50:44.866708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:44.866736Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T17:50:44.866784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:50:44.866789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:50:44.866842Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:50:44.866856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:44.866860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:339:2331] TestWaitNotification: OK eventTxId 103 2024-11-21T17:50:44.866918Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:44.866944Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 31us result status StatusSuccess 2024-11-21T17:50:44.866999Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:44.900024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:44.900058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:44.900064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:44.900068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:44.900075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:44.900079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:44.900089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:44.900190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:44.911218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:44.911247Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:44.914566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:44.915382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:44.915430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:44.917069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:44.917267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:44.917385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.917469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:44.918543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.918844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:44.918859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.918907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:44.918916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:44.918922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:44.918941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.920319Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:44.938133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:44.938265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.938343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:44.938398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:44.938407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.939312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.939339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:44.939383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.939391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:44.939395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:44.939398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:44.939733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.939741Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:44.939744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:44.939983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.939988Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.939992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:44.939997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:44.940418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:44.940853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:44.940904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:44.941084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.941105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:44.941110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:44.941154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:44.941159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:44.941186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:44.941196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:44.941571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:44.941577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:44.941612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.941616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:44.941687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.941692Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:44.941700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:44.941703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:44.941707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:44.941710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:44.941713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:44.941716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:44.941725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:44.941729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:44.941732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:44.941957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:44.941966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:44.941969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:44.941972Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:44.941975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:44.941984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 6678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T17:50:44.970301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:44.970309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2024-11-21T17:50:44.970826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2024-11-21T17:50:44.970855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000010 2024-11-21T17:50:44.970945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T17:50:44.970966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.970985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:44.970995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 112:0, step: 5000010, at schemeshard: 72057594046678944 2024-11-21T17:50:44.971016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 112:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.971023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2024-11-21T17:50:44.971027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2024-11-21T17:50:44.971035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:50:44.971042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:44.971047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2024-11-21T17:50:44.971054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2024-11-21T17:50:44.971057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2024-11-21T17:50:44.971061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2024-11-21T17:50:44.971070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:44.971075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2024-11-21T17:50:44.971079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 3], 7 2024-11-21T17:50:44.971081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:50:44.971339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T17:50:44.971621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:44.971630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:44.971651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:44.971672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.971676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 3 2024-11-21T17:50:44.971680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 4 FAKE_COORDINATOR: Erasing txId 112 2024-11-21T17:50:44.971765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:44.971774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:44.971778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2024-11-21T17:50:44.971783Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2024-11-21T17:50:44.971790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:50:44.971856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:44.971864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T17:50:44.971867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2024-11-21T17:50:44.971871Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:50:44.971874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:44.971884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2024-11-21T17:50:44.971916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:44.971920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:50:44.971928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:50:44.972429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T17:50:44.972494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T17:50:44.972504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2024-11-21T17:50:44.972554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2024-11-21T17:50:44.972558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2024-11-21T17:50:44.972626Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2024-11-21T17:50:44.972638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2024-11-21T17:50:44.972641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:488:2480] TestWaitNotification: OK eventTxId 112 2024-11-21T17:50:44.972716Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:44.972737Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 30us result status StatusSuccess 2024-11-21T17:50:44.972788Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000008 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2024-11-21T17:50:44.973240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 2 PathVersion: 8 } ApplyIf { PathId: 3 PathVersion: 7 } ApplyIf { PathId: 4 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:44.973261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.973272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:50:44.973633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:44.973652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:44.792898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:44.792917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:44.792920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:44.792923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:44.792927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:44.792929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:44.792935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:44.793000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:44.800792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:44.800811Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:44.803049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:44.803643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:44.803669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:44.805239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:44.805473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:44.805571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.805645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:44.806586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.806802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:44.806810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.806836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:44.806840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:44.806844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:44.806854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.807740Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:44.820386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:44.820471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.820522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:44.820571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:44.820577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.821389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.821419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:44.821469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.821479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:44.821483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:44.821488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:44.821998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.822012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:44.822017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:44.822398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.822408Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.822414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:44.822421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:44.822864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:44.823219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:44.823274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:44.823427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.823446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:44.823454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:44.823495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:44.823499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:44.823523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:44.823535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:44.824002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:44.824012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:44.824056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.824062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:44.824152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.824158Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:44.824170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:44.824174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:44.824179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:44.824185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:44.824190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:44.824193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:44.824205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:44.824210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:44.824214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:44.824542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:44.824556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:44.824560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:44.824563Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:44.824568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:44.824583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... BUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T17:50:44.842260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2024-11-21T17:50:44.842278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000005 2024-11-21T17:50:44.842454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.842469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:44.842473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 105:0, step: 5000005, at schemeshard: 72057594046678944 2024-11-21T17:50:44.842487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.842494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T17:50:44.842497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:50:44.842503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:44.842507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:44.842511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T17:50:44.842515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:50:44.842518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T17:50:44.842521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T17:50:44.842526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:44.842529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2024-11-21T17:50:44.842532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:50:44.842534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:50:44.842595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:50:44.842624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:50:44.842848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:44.842853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:44.842867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:44.842882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.842885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 1 2024-11-21T17:50:44.842888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 4 FAKE_COORDINATOR: Erasing txId 105 2024-11-21T17:50:44.842945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:50:44.842951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:50:44.842956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:50:44.842959Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:50:44.842961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:44.842998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:50:44.843005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:50:44.843008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:50:44.843012Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:50:44.843016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:44.843024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2024-11-21T17:50:44.843069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:44.843073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:50:44.843078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:44.843388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:50:44.843540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:50:44.843550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T17:50:44.843586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T17:50:44.843593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T17:50:44.843658Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T17:50:44.843677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:50:44.843682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:396:2388] TestWaitNotification: OK eventTxId 105 2024-11-21T17:50:44.843744Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:44.843759Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 21us result status StatusPathDoesNotExist 2024-11-21T17:50:44.843784Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:50:44.843836Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:44.843846Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 11us result status StatusSuccess 2024-11-21T17:50:44.843885Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:02.989344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:02.989364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:02.989369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:02.989373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:02.989379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:02.989382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:02.989398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:02.989477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:03.000779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:03.000803Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.003144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:03.003256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:03.003290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:03.006003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:03.006079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:03.006169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.006369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.007050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.007306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.007315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.007326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:03.007332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.007337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:03.007371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:03.008695Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.025639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.025721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.025781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:03.025821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:03.025829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.026560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.026585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:03.026631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.026642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:03.026646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:03.026652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:03.027088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.027101Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:03.027106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:03.027481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.027490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.027496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.027503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.028138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:03.028555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:03.028605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:03.028795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.028823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.028831Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.028888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:03.028895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.028923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.028934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.029325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.029334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.029371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.029376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:03.029452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.029458Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:03.029469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:03.029473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.029480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:03.029485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.029489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:03.029493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:03.029504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.029509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:03.029513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 94046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:44.978393Z node 155 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:50:44.978397Z node 155 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2024-11-21T17:50:44.978401Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:44.978510Z node 155 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:44.978518Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:44.978524Z node 155 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:50:44.978528Z node 155 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T17:50:44.978531Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:44.978538Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:50:44.978966Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T17:50:44.978977Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:50:44.978982Z node 155 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T17:50:44.979000Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T17:50:44.979023Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2024-11-21T17:50:44.979184Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:44.979204Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 665719933034 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:44.979211Z node 155 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2024-11-21T17:50:44.979234Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T17:50:44.979244Z node 155 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T17:50:44.979248Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:50:44.979257Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:44.979266Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:44.979271Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2024-11-21T17:50:44.979277Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:50:44.979282Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T17:50:44.979286Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T17:50:44.979294Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:50:44.979302Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2024-11-21T17:50:44.979307Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:50:44.979310Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:50:44.979756Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:50:44.979769Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:50:44.979810Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:50:44.979819Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:44.979833Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T17:50:44.980183Z node 155 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:44.980190Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:44.980222Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:50:44.980262Z node 155 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:44.980268Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [155:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2024-11-21T17:50:44.980273Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [155:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 2024-11-21T17:50:44.980409Z node 155 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:44.980419Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:44.980424Z node 155 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:50:44.980429Z node 155 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:50:44.980433Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:44.980523Z node 155 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:44.980532Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:44.980538Z node 155 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:50:44.980541Z node 155 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:50:44.980544Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:50:44.980553Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2024-11-21T17:50:44.980558Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [155:121:2147] 2024-11-21T17:50:44.980607Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:44.980611Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:50:44.980620Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:44.981028Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:44.981313Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:50:44.981335Z node 155 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T17:50:44.981345Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2024-11-21T17:50:44.981402Z node 155 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T17:50:44.981716Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:50:44.981724Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:50:44.981779Z node 155 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:50:44.981794Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:50:44.981799Z node 155 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [155:713:2673] TestWaitNotification: OK eventTxId 1003 >> TSchemeShardUserAttrsTest::UserConditionsAtAlter >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:46.855735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:46.855753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:46.855756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:46.855759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:46.855763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:46.855765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:46.855770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:46.855854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:46.862438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:46.862455Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:46.864568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:46.865046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:46.865068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:46.866061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:46.866218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:46.866287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:46.866329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:46.866927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:46.867119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:46.867128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:46.867159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:46.867163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:46.867167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:46.867186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:46.867911Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:46.878406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:46.878485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:46.878527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:46.878557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:46.878562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:46.879130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:46.879150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:46.879192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:46.879199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:46.879202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:46.879205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:46.879507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:46.879513Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:46.879516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:46.879746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:46.879751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:46.879754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:46.879760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:46.880121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:46.880371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:46.880412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:46.880526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:46.880542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:46.880546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:46.880577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:46.880581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:46.880600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:46.880607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:46.880879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:46.880884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:46.880912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:46.880915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:46.880977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:46.880982Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:46.880992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:46.880995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:46.881000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:46.881004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:46.881008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:46.881012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:46.881020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:46.881025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:46.881028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:46.881207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:46.881215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:46.881218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:46.881221Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:46.881225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:46.881232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... CHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:50:46.888704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:46.888706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:317:2309] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 101 2024-11-21T17:50:46.888745Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:46.888766Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 26us result status StatusSuccess 2024-11-21T17:50:46.888850Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2024-11-21T17:50:46.889213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "MyRoot" UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:46.889225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:46.889236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:46.889250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:46.889254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:46.889582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:46.889596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2024-11-21T17:50:46.889615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:46.889618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:46.889622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2024-11-21T17:50:46.889634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:46.889866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2024-11-21T17:50:46.889881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2024-11-21T17:50:46.889917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:46.889928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:46.889931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2024-11-21T17:50:46.889950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:50:46.889952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:50:46.889959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:46.889963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T17:50:46.889967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:46.889970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:50:46.889972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:50:46.889974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:50:46.889978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:46.889981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2024-11-21T17:50:46.889983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2024-11-21T17:50:46.890238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:46.890244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:46.890263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:46.890268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T17:50:46.890316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:50:46.890321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:50:46.890324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:50:46.890326Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2024-11-21T17:50:46.890329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:46.890336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T17:50:46.890554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T17:50:46.890593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:50:46.890597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:50:46.890638Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:50:46.890651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:46.890655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:336:2328] TestWaitNotification: OK eventTxId 103 2024-11-21T17:50:46.890693Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:46.890708Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 20us result status StatusSuccess 2024-11-21T17:50:46.890760Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardSnapshots::MvccSnapshotLockedWrites [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:47.384445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:47.384462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:47.384465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:47.384468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:47.384471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:47.384474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:47.384479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:47.384535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:47.391257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:47.391276Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:47.393776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:47.394520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:47.394554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:47.395866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:47.396026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:47.396108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:47.396165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:47.397006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:47.397253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:47.397263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:47.397298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:47.397304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:47.397309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:47.397321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:47.398442Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:47.410790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:47.410871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:47.410914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:47.410949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:47.410954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:47.411594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:47.411614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:47.411648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:47.411655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:47.411658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:47.411660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:47.412092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:47.412113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:47.412116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:47.412607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:47.412623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:47.412629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:47.412636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:47.413168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:47.413690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:47.413743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:47.413907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:47.413932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:47.413938Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:47.413987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:47.413995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:47.414025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:47.414036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:47.414494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:47.414503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:47.414542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:47.414547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:47.414623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:47.414630Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:47.414640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:47.414644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:47.414649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:47.414654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:47.414659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:47.414663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:47.414676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:47.414681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:47.414684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:47.414960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:47.414974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:47.414978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:47.414983Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:47.414987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:47.414998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T17:50:47.425325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:50:47.425330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:50:47.425379Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:47.425393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:47.425397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:327:2319] TestWaitNotification: OK eventTxId 102 2024-11-21T17:50:47.425450Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:47.425466Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 21us result status StatusSuccess 2024-11-21T17:50:47.425511Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2024-11-21T17:50:47.425943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirA" UserAttributes { Key: "AttrA2" Value: "ValA2" } } ApplyIf { PathId: 2 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:47.425962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:47.425977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T17:50:47.425999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:47.426004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:47.426491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:47.426539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2024-11-21T17:50:47.426577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:47.426583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:47.426591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2024-11-21T17:50:47.426612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:47.427035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2024-11-21T17:50:47.427065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2024-11-21T17:50:47.427130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:47.427149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:47.427157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2024-11-21T17:50:47.427209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:50:47.427214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:50:47.427225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:47.427234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T17:50:47.427243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:50:47.427248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:50:47.427253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:50:47.427257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:50:47.427272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:47.427278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2024-11-21T17:50:47.427282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T17:50:47.427685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:47.427695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:47.427724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:47.427730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T17:50:47.427821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:50:47.427831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:50:47.427836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:50:47.427841Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:50:47.427846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:47.427860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T17:50:47.428173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T17:50:47.428227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:50:47.428249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:50:47.428313Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:50:47.428329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:47.428334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:344:2336] TestWaitNotification: OK eventTxId 103 2024-11-21T17:50:47.428402Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:47.428426Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 33us result status StatusSuccess 2024-11-21T17:50:47.428487Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> EscapingBasics::HideSecretsShouldWork [GOOD] >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> Cache::Test5 >> EntityId::Distinct [GOOD] >> EntityId::MaxId [GOOD] >> EntityId::Order >> Cache::Test1 [GOOD] >> Cache::Test2 [GOOD] >> Cache::Test3 [GOOD] >> Cache::Test4 [GOOD] >> SplitterBasic::LimitExceed [GOOD] |82.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> BsControllerConfig::MoveGroups [GOOD] >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] |82.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EntityId::MaxId [GOOD] |82.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |82.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2845:2115] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2845:2115] Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2967:2106] recipient: [1:2845:2115] 2024-11-21T17:50:31.358901Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:31.359772Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:31.360211Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:31.360331Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:31.360505Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:31.360514Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:31.360560Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:31.361513Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:31.361548Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:31.361577Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:31.361595Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:31.361607Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:31.361617Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2992:2106] recipient: [1:60:2107] 2024-11-21T17:50:31.374542Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:31.374607Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:31.384993Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:31.385050Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:31.385080Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:31.385092Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:31.385121Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:31.385131Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:31.385136Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:31.385145Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:31.395483Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:31.395552Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:31.395728Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:31.395737Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:31.395766Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:31.397953Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 100 PDiskFilter { Property { Type: ROT } } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "second storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } } 2024-11-21T17:50:31.398245Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 50:1000 Path# /dev/disk2 2024-11-21T17:50:31.398258Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 50:1001 Path# /dev/disk1 2024-11-21T17:50:31.398262Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 49:1000 Path# /dev/disk3 2024-11-21T17:50:31.398268Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 49:1001 Path# /dev/disk2 2024-11-21T17:50:31.398271Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 49:1002 Path# /dev/disk1 2024-11-21T17:50:31.398275Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 48:1000 Path# /dev/disk2 2024-11-21T17:50:31.398279Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 48:1001 Path# /dev/disk1 2024-11-21T17:50:31.398283Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 47:1000 Path# /dev/disk2 2024-11-21T17:50:31.398286Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 47:1001 Path# /dev/disk1 2024-11-21T17:50:31.398290Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 46:1000 Path# /dev/disk2 2024-11-21T17:50:31.398293Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 46:1001 Path# /dev/disk1 2024-11-21T17:50:31.398296Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 45:1000 Path# /dev/disk2 2024-11-21T17:50:31.398300Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 45:1001 Path# /dev/disk1 2024-11-21T17:50:31.398304Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 44:1000 Path# /dev/disk2 2024-11-21T17:50:31.398314Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 44:1001 Path# /dev/disk1 2024-11-21T17:50:31.398318Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 43:1000 Path# /dev/disk2 2024-11-21T17:50:31.398322Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 43:1001 Path# /dev/disk1 2024-11-21T17:50:31.398325Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 42:1000 Path# /dev/disk2 2024-11-21T17:50:31.398329Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 42:1001 Path# /dev/disk1 2024-11-21T17:50:31.398332Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 41:1000 Path# /dev/disk2 2024-11-21T17:50:31.398336Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 41:1001 Path# /dev/disk1 2024-11-21T17:50:31.398339Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 40:1000 Path# /dev/disk2 2024-11-21T17:50:31.398344Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 40:1001 Path# /dev/disk1 2024-11-21T17:50:31.398347Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisk ... disks.cpp:355} Create new pdisk PDiskId# 72:1001 Path# /dev/disk1 2024-11-21T17:50:40.591190Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1000 Path# /dev/disk1 2024-11-21T17:50:40.591194Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 60:1000 Path# /dev/disk2 2024-11-21T17:50:40.591199Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 70:1000 Path# /dev/disk3 2024-11-21T17:50:40.591204Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 70:1001 Path# /dev/disk1 2024-11-21T17:50:40.591208Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 69:1000 Path# /dev/disk1 2024-11-21T17:50:40.591213Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 68:1000 Path# /dev/disk1 2024-11-21T17:50:40.591217Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 70:1002 Path# /dev/disk2 2024-11-21T17:50:40.591222Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 67:1000 Path# /dev/disk3 2024-11-21T17:50:40.591227Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 68:1001 Path# /dev/disk3 2024-11-21T17:50:40.591231Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 67:1001 Path# /dev/disk2 2024-11-21T17:50:40.591237Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 67:1002 Path# /dev/disk1 2024-11-21T17:50:40.591242Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 61:1000 Path# /dev/disk2 2024-11-21T17:50:40.591247Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1001 Path# /dev/disk3 2024-11-21T17:50:40.591252Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 66:1000 Path# /dev/disk2 2024-11-21T17:50:40.591260Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 68:1002 Path# /dev/disk2 2024-11-21T17:50:40.591265Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 62:1001 Path# /dev/disk3 2024-11-21T17:50:40.591269Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 65:1000 Path# /dev/disk3 2024-11-21T17:50:40.591274Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 65:1001 Path# /dev/disk1 2024-11-21T17:50:40.591279Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1002 Path# /dev/disk2 2024-11-21T17:50:40.591284Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 64:1000 Path# /dev/disk3 2024-11-21T17:50:40.591288Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 69:1001 Path# /dev/disk3 2024-11-21T17:50:40.591293Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 64:1001 Path# /dev/disk2 2024-11-21T17:50:40.591298Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 64:1002 Path# /dev/disk1 2024-11-21T17:50:40.591302Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 69:1002 Path# /dev/disk2 2024-11-21T17:50:40.591307Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 63:1001 Path# /dev/disk3 2024-11-21T17:50:40.591311Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 62:1002 Path# /dev/disk1 2024-11-21T17:50:40.591316Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 63:1002 Path# /dev/disk1 2024-11-21T17:50:40.591320Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 66:1001 Path# /dev/disk3 2024-11-21T17:50:40.591325Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 65:1002 Path# /dev/disk2 2024-11-21T17:50:40.591329Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 57:1001 Path# /dev/disk1 2024-11-21T17:50:40.591334Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1002 Path# /dev/disk2 2024-11-21T17:50:40.591338Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 61:1001 Path# /dev/disk3 2024-11-21T17:50:40.591342Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 60:1001 Path# /dev/disk1 2024-11-21T17:50:40.591346Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 61:1002 Path# /dev/disk1 2024-11-21T17:50:40.591351Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1002 Path# /dev/disk2 2024-11-21T17:50:40.591355Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 60:1002 Path# /dev/disk3 2024-11-21T17:50:40.591361Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 97:1002 Path# /dev/disk3 2024-11-21T17:50:40.591365Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 66:1002 Path# /dev/disk1 2024-11-21T17:50:40.591370Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1002 Path# /dev/disk2 2024-11-21T17:50:40.591375Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 59:1001 Path# /dev/disk3 2024-11-21T17:50:40.591379Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 58:1001 Path# /dev/disk1 2024-11-21T17:50:40.591384Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 59:1002 Path# /dev/disk1 2024-11-21T17:50:40.591390Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1002 Path# /dev/disk2 2024-11-21T17:50:40.591395Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 58:1002 Path# /dev/disk3 2024-11-21T17:50:40.591399Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 53:1001 Path# /dev/disk1 2024-11-21T17:50:40.591404Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 56:1001 Path# /dev/disk1 2024-11-21T17:50:40.591410Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1002 Path# /dev/disk2 2024-11-21T17:50:40.591414Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 56:1002 Path# /dev/disk3 2024-11-21T17:50:40.591419Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1001 Path# /dev/disk1 2024-11-21T17:50:40.591423Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1001 Path# /dev/disk3 2024-11-21T17:50:40.591428Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 55:1000 Path# /dev/disk1 2024-11-21T17:50:40.591432Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1002 Path# /dev/disk1 2024-11-21T17:50:40.591437Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1001 Path# /dev/disk3 2024-11-21T17:50:40.591441Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 54:1000 Path# /dev/disk1 2024-11-21T17:50:40.591446Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 55:1001 Path# /dev/disk2 2024-11-21T17:50:40.591451Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1002 Path# /dev/disk2 2024-11-21T17:50:40.591456Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 54:1001 Path# /dev/disk3 2024-11-21T17:50:40.591460Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1002 Path# /dev/disk1 2024-11-21T17:50:40.591465Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 54:1002 Path# /dev/disk2 2024-11-21T17:50:40.591470Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1002 Path# /dev/disk2 2024-11-21T17:50:40.591474Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 57:1002 Path# /dev/disk3 2024-11-21T17:50:40.591479Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 51:1000 Path# /dev/disk2 2024-11-21T17:50:40.591484Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 53:1002 Path# /dev/disk3 2024-11-21T17:50:40.591488Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 52:1001 Path# /dev/disk1 2024-11-21T17:50:40.591495Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 52:1002 Path# /dev/disk3 2024-11-21T17:50:40.591500Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1002 Path# /dev/disk1 2024-11-21T17:50:40.591504Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1002 Path# /dev/disk3 2024-11-21T17:50:40.591510Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 51:1001 Path# /dev/disk3 2024-11-21T17:50:40.591514Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1002 Path# /dev/disk2 2024-11-21T17:50:40.591518Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 55:1002 Path# /dev/disk3 2024-11-21T17:50:40.591523Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 51:1002 Path# /dev/disk1 2024-11-21T17:50:40.654959Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2024-11-21T17:50:40.681180Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2024-11-21T17:50:40.694539Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2024-11-21T17:50:40.707662Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2024-11-21T17:50:40.721322Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2024-11-21T17:50:40.741594Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2024-11-21T17:50:40.755306Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } |82.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EntityId::MinId [GOOD] >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] |82.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] |82.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> Cache::Test4 [GOOD] |82.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> Cache::Test2 [GOOD] |82.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit >> DataShardSnapshots::MvccSnapshotLockedWritesRestart [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable |82.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> Cache::Test5 [GOOD] >> EntityId::CheckId [GOOD] |82.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> TSchemeShardUserAttrsTest::SpecialAttributes >> TExternalDataSourceTest::DropTableTwice >> DataShardSnapshots::LockedWriteDistributedCommitFreeze [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:42.893234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:42.893253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:42.893257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:42.893260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:42.893264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:42.893266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:42.893271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:42.893335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:42.901881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:42.901899Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:42.905432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:42.906300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:42.906333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:42.907735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:42.908020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:42.908103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.908180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:42.909473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.909755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:42.909770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.909808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:42.909816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:42.909822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:42.909837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.911214Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:42.927462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:42.927532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.927596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:42.927635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:42.927643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.928496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.928537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:42.928591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.928603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:42.928608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:42.928613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:42.929138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.929153Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:42.929158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:42.929591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.929604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.929610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.929618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.930271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:42.930777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:42.930840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:42.931084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:42.931115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:42.931129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.931205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:42.931212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:42.931248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:42.931261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:42.931759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:42.931770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:42.931817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:42.931823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:42.931896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:42.931903Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:42.931915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:42.931919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.931925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:42.931930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:42.931934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:42.931938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:42.931970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:42.931977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:42.931980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:42.932321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:42.932344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:42.932349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:42.932355Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:42.932360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:42.932378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.368163Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369432Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369467Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369484Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369502Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369515Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369531Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369545Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369560Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369694Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369708Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369720Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369734Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369744Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369753Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369784Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.369903Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.370448Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.370470Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.370481Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.370494Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.370505Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.370514Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.370525Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.370553Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.370562Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:50:49.370578Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:49.370582Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:49.370587Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T17:50:49.370602Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:2579:3845] message: TxId: 101 2024-11-21T17:50:49.370607Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:49.370622Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:49.370625Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:50:49.370770Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2024-11-21T17:50:49.370949Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.372270Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:49.372287Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:2580:3846] TestWaitNotification: OK eventTxId 101 2024-11-21T17:50:49.372428Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:49.372492Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 71us result status StatusSuccess 2024-11-21T17:50:49.372625Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" ColumnCodec: ColumnCodecPlain } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] >> ReadIteratorExternalBlobs::NotExtBlobs >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |82.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:49.907927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:49.907949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:49.907952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:49.907956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:49.907960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:49.907963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:49.907970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:49.908043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:49.915706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:49.915728Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:49.918343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:49.919083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:49.919124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:49.920566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:49.920790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:49.920893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:49.920964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:49.921963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:49.922245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:49.922257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:49.922304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:49.922312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:49.922318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:49.922332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.923545Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:49.937709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:49.937819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.937887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:49.937936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:49.937943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.938799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:49.938826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:49.938866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.938874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:49.938877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:49.938880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:49.939186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.939192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:49.939208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:49.939442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.939448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.939452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:49.939457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:49.939844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:49.940068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:49.940106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:49.940262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:49.940284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:49.940292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:49.940329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:49.940333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:49.940356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:49.940366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:49.940705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:49.940710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:49.940743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:49.940746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:49.940820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.940827Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:49.940842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:49.940847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:49.940852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:49.940857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:49.940861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:49.940865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:49.940877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:49.940883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:49.940886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:49.941110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:49.941119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:49.941123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:49.941126Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:49.941129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:49.941138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:49.945328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:49.945331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T17:50:49.945334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:50:49.945383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.945387Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:49.945392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2024-11-21T17:50:49.945407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:49.945468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:49.945474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:49.945477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:49.945480Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T17:50:49.945483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:49.945535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:49.945540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:49.945542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:49.945544Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T17:50:49.945546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:49.945551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T17:50:49.945809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:50:49.945829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 2024-11-21T17:50:49.945995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:49.946007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:49.946013Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2024-11-21T17:50:49.946032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T17:50:49.946052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:49.946058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:49.946166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:49.946191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:50:49.946366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:49.946371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:49.946388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:49.946397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:49.946399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T17:50:49.946402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:50:49.946421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.946424Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:50:49.946431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:50:49.946434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:49.946437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T17:50:49.946440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:49.946443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:50:49.946445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:50:49.946452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:49.946455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T17:50:49.946458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:50:49.946460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:50:49.946552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:49.946560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:49.946563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:49.946567Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:50:49.946571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:49.946719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:49.946729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:49.946732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:49.946735Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:50:49.946739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:49.946748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T17:50:49.947100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:49.947130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2024-11-21T17:50:49.947457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:49.947479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.947487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2024-11-21T17:50:49.947787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:49.947804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 |82.1%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:49.929523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:49.929545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:49.929550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:49.929554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:49.929567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:49.929571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:49.929579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:49.929643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:49.937479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:49.937497Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:49.939535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:49.940014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:49.940036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:49.941061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:49.941181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:49.941254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:49.941299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:49.942014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:49.942208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:49.942214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:49.942239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:49.942244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:49.942248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:49.942256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.943060Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:49.953234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:49.953299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.953360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:49.953399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:49.953404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.953978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:49.954002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:49.954033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.954039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:49.954042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:49.954046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:49.954299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.954305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:49.954307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:49.954500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.954505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.954508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:49.954512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:49.954875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:49.955151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:49.955185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:49.955316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:49.955330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:49.955335Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:49.955368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:49.955372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:49.955395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:49.955404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:49.955667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:49.955671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:49.955697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:49.955700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:49.955755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:49.955759Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:49.955767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:49.955769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:49.955773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:49.955776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:49.955779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:49.955781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:49.955788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:49.955792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:49.955795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:49.955968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:49.955975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:49.955978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:49.955981Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:49.955984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:49.955992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 125 TestWaitNotification wait txId: 126 2024-11-21T17:50:50.194484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2024-11-21T17:50:50.194486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 2024-11-21T17:50:50.194540Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2024-11-21T17:50:50.194555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2024-11-21T17:50:50.194560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:337:2329] 2024-11-21T17:50:50.194577Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2024-11-21T17:50:50.194587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2024-11-21T17:50:50.194589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:337:2329] 2024-11-21T17:50:50.194596Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2024-11-21T17:50:50.194602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2024-11-21T17:50:50.194604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:337:2329] TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 2024-11-21T17:50:50.194652Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:50.194679Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 38us result status StatusSuccess 2024-11-21T17:50:50.194753Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:50.194827Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:50.194835Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 10us result status StatusSuccess 2024-11-21T17:50:50.194855Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:50.194896Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:50.194903Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 7us result status StatusSuccess 2024-11-21T17:50:50.194946Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } Children { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:50.194981Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:50.194988Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 8us result status StatusSuccess 2024-11-21T17:50:50.195012Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:50.195038Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:50.195045Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 8us result status StatusSuccess 2024-11-21T17:50:50.195062Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 23567, MsgBus: 21554 2024-11-21T17:50:27.670950Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791430470169496:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:27.671130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b3d/r3tmp/tmpK5M7Fz/pdisk_1.dat 2024-11-21T17:50:27.714461Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23567, node 1 2024-11-21T17:50:27.719404Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:27.719415Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:27.719416Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:27.719440Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21554 TClient is connected to server localhost:21554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:27.772286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:27.772321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:27.773353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:27.794697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:27.802753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:27.816699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:27.829587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:27.837577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:27.918089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791430470171040:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:27.918121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:27.936337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:27.942299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:27.951696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:27.962020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:27.968298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:27.975820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:27.984600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791430470171534:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:27.984629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:27.984643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791430470171539:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:27.985150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:27.988886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791430470171541:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:50:28.157953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.245388Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd7xhjwk3dx5edn8761sagky, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJmZTQyZmMtZDU3ZTQ4MDUtNWE4OTVlMTctMjg3ZTEyMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.245389Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd7xhjwk975awk49gy2yeywx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzEyZTFlZTctZmFiODYxZTctOGE1MDE0YzItOWU4MjRiN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.245508Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd7xhjwj9nsh5n751gkkffsw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM3Y2UyN2MtYmVhYmZiNzQtOGFjYjU2NTktNzcyOTE2ZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.245508Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd7xhjwm5ymqbdvyg9a96vt4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJkYjk3MzktNTRkZDkxZTgtMTc3Y2FkOC00ZmNjZTc0OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.245574Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd7xhjwm31jy8bgks12e0fbv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZlMzI5YWYtZDA0MzdmZjAtY2U4MGJkYjYtZGEzZjllMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.245933Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd7xhjwm359c8jw9cvmdr2hm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTU1MjE4Y2ItNTM3NTY0ODUtODkwM2VhYjMtNTAwMTI3NTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.246905Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd7xhjwm9c3nygfk80ds8489, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc5NzE5YzMtOGY3ZjYwYTQtN2M1ZTYzY2ItZDAzNTY5NGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.247016Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd7xhjwm8e5gsg4rachznytj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2Q0MTU0MWQtNjZjMGYzOTctODg0N2U4YTItMTJiODMzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.247119Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd7xhjwmey9hfte7td0g4bbs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODAzMDM4MTItOTE3YjgxZDgtNTk0NGE0NDItZjQxNDIzNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.247396Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd7xhjwkdaw7dx0d9ygne4x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc5ZTAwZDktMmQ1MGUwMTYtODAyMWZhOTQtODA0YjZiMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.247595Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd7xhjwj9nsh5n751gkkffsw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM3Y2UyN2MtYmVhYmZiNzQtOGFjYjU2NTktNzcyOTE2ZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.248597Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd7xhjwm31jy8bgks12e0fbv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZlMzI5YWYtZDA0MzdmZjAtY2U4MGJkYjYtZGEzZjllMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.249186Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715687. Ctx: { TraceId: 01jd7xhjwk975awk49gy2yeywx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzEyZTFlZTctZmFiODYxZTctOGE1MDE0YzItOWU4MjRiN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.249360Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd7xhjwk3dx5edn8761sagky, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJmZTQ ... sion/3?node_id=2&id=ZjU1Yzc3ODktODk2ZTBkYTEtYzQxNDdkOWYtOWYxNzQyZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.742662Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731629. Ctx: { TraceId: 01jd7xj7wab7nf3eg0sdaq4pgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTJmY2Q5M2ItZTYzZDkwNmUtMTNhMmIwODYtOTY4NzM5MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.743323Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731630. Ctx: { TraceId: 01jd7xj7w9cgf1xt6aet83evre, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFlYWYxYTEtMWFmMzkzOWUtZGFlMzk5ZTEtMTI3NDU1NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.743616Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731632. Ctx: { TraceId: 01jd7xj7wbdbw5kb1m2cacn7b5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGVlZWY5MmUtNGIzNmNhNjktMjQ3YWJlNTUtYWIwZTc4ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.743864Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731631. Ctx: { TraceId: 01jd7xj7wb2m1hz6hp9t7a15jb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjU1Yzc3ODktODk2ZTBkYTEtYzQxNDdkOWYtOWYxNzQyZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.744326Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731633. Ctx: { TraceId: 01jd7xj7wbdbw5kb1m2cacn7b5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGVlZWY5MmUtNGIzNmNhNjktMjQ3YWJlNTUtYWIwZTc4ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.744623Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731634. Ctx: { TraceId: 01jd7xj7wfap6n91dd2azv634a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTJhNGQyNjQtYmFmZTdiNDAtNzY4NDk1ZGItMzlhNzkwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.745072Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731635. Ctx: { TraceId: 01jd7xj7wbdbw5kb1m2cacn7b5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGVlZWY5MmUtNGIzNmNhNjktMjQ3YWJlNTUtYWIwZTc4ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.745663Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731636. Ctx: { TraceId: 01jd7xj7wfap6n91dd2azv634a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTJhNGQyNjQtYmFmZTdiNDAtNzY4NDk1ZGItMzlhNzkwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.746253Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731637. Ctx: { TraceId: 01jd7xj7wh230ncyba0hd6gg7v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTM3YzliOGMtYzdkMjhhZGMtYjczYTcwY2QtOTQ5MGNmN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.746563Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731638. Ctx: { TraceId: 01jd7xj7wjd2fb2ybjph5zab87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFlYWYxYTEtMWFmMzkzOWUtZGFlMzk5ZTEtMTI3NDU1NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.747082Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731639. Ctx: { TraceId: 01jd7xj7wh230ncyba0hd6gg7v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTM3YzliOGMtYzdkMjhhZGMtYjczYTcwY2QtOTQ5MGNmN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.747597Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731643. Ctx: { TraceId: 01jd7xj7wh230ncyba0hd6gg7v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTM3YzliOGMtYzdkMjhhZGMtYjczYTcwY2QtOTQ5MGNmN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.747770Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731640. Ctx: { TraceId: 01jd7xj7wj4t9grenfjzhpatdh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTJmY2Q5M2ItZTYzZDkwNmUtMTNhMmIwODYtOTY4NzM5MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.747831Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731641. Ctx: { TraceId: 01jd7xj7wjd2fb2ybjph5zab87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFlYWYxYTEtMWFmMzkzOWUtZGFlMzk5ZTEtMTI3NDU1NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.747886Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731642. Ctx: { TraceId: 01jd7xj7wkd3y0tf2j3525n2pq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjU1Yzc3ODktODk2ZTBkYTEtYzQxNDdkOWYtOWYxNzQyZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.748003Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731644. Ctx: { TraceId: 01jd7xj7wh230ncyba0hd6gg7v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTM3YzliOGMtYzdkMjhhZGMtYjczYTcwY2QtOTQ5MGNmN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.748612Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731646. Ctx: { TraceId: 01jd7xj7wj4t9grenfjzhpatdh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTJmY2Q5M2ItZTYzZDkwNmUtMTNhMmIwODYtOTY4NzM5MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.748675Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731647. Ctx: { TraceId: 01jd7xj7wh230ncyba0hd6gg7v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTM3YzliOGMtYzdkMjhhZGMtYjczYTcwY2QtOTQ5MGNmN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.748841Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731648. Ctx: { TraceId: 01jd7xj7wkd3y0tf2j3525n2pq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjU1Yzc3ODktODk2ZTBkYTEtYzQxNDdkOWYtOWYxNzQyZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.749341Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731649. Ctx: { TraceId: 01jd7xj7wj4t9grenfjzhpatdh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTJmY2Q5M2ItZTYzZDkwNmUtMTNhMmIwODYtOTY4NzM5MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.749752Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731645. Ctx: { TraceId: 01jd7xj7wjd2fb2ybjph5zab87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFlYWYxYTEtMWFmMzkzOWUtZGFlMzk5ZTEtMTI3NDU1NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.749837Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731650. Ctx: { TraceId: 01jd7xj7wkd3y0tf2j3525n2pq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjU1Yzc3ODktODk2ZTBkYTEtYzQxNDdkOWYtOWYxNzQyZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.750167Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731651. Ctx: { TraceId: 01jd7xj7wjd2fb2ybjph5zab87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFlYWYxYTEtMWFmMzkzOWUtZGFlMzk5ZTEtMTI3NDU1NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.750314Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731652. Ctx: { TraceId: 01jd7xj7wkd3y0tf2j3525n2pq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjU1Yzc3ODktODk2ZTBkYTEtYzQxNDdkOWYtOWYxNzQyZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.750821Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731653. Ctx: { TraceId: 01jd7xj7wjd2fb2ybjph5zab87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFlYWYxYTEtMWFmMzkzOWUtZGFlMzk5ZTEtMTI3NDU1NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.751135Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731654. Ctx: { TraceId: 01jd7xj7wjd2fb2ybjph5zab87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFlYWYxYTEtMWFmMzkzOWUtZGFlMzk5ZTEtMTI3NDU1NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS 2024-11-21T17:50:49.752821Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731655. Ctx: { TraceId: 01jd7xj7wq6hz386q7dr5ntxm0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTJhNGQyNjQtYmFmZTdiNDAtNzY4NDk1ZGItMzlhNzkwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.753311Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731656. Ctx: { TraceId: 01jd7xj7wre89jdvy1bnrysn8x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTJmY2Q5M2ItZTYzZDkwNmUtMTNhMmIwODYtOTY4NzM5MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.753626Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731657. Ctx: { TraceId: 01jd7xj7wq6hz386q7dr5ntxm0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTJhNGQyNjQtYmFmZTdiNDAtNzY4NDk1ZGItMzlhNzkwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.754049Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731658. Ctx: { TraceId: 01jd7xj7wre89jdvy1bnrysn8x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTJmY2Q5M2ItZTYzZDkwNmUtMTNhMmIwODYtOTY4NzM5MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.754238Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731659. Ctx: { TraceId: 01jd7xj7wq6hz386q7dr5ntxm0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTJhNGQyNjQtYmFmZTdiNDAtNzY4NDk1ZGItMzlhNzkwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.754287Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731660. Ctx: { TraceId: 01jd7xj7wre89jdvy1bnrysn8x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTJmY2Q5M2ItZTYzZDkwNmUtMTNhMmIwODYtOTY4NzM5MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.754540Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731661. Ctx: { TraceId: 01jd7xj7wq6hz386q7dr5ntxm0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTJhNGQyNjQtYmFmZTdiNDAtNzY4NDk1ZGItMzlhNzkwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.754642Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731662. Ctx: { TraceId: 01jd7xj7wre89jdvy1bnrysn8x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTJmY2Q5M2ItZTYzZDkwNmUtMTNhMmIwODYtOTY4NzM5MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.754947Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731663. Ctx: { TraceId: 01jd7xj7wq6hz386q7dr5ntxm0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTJhNGQyNjQtYmFmZTdiNDAtNzY4NDk1ZGItMzlhNzkwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 26646, MsgBus: 24398 2024-11-21T17:50:28.758160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791433749370857:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:28.758507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b17/r3tmp/tmpt5fuBd/pdisk_1.dat 2024-11-21T17:50:28.812505Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26646, node 1 2024-11-21T17:50:28.821012Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:28.821028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:28.821030Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:28.821070Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24398 TClient is connected to server localhost:24398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:50:28.859338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:28.859361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:28.860489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:28.887933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.895122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.909240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.926061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.935782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:29.013537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791438044339699:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:29.013564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:29.045523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:29.051412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:29.061420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:29.116205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:29.171001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:29.225278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:29.237527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791438044340224:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:29.237562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:29.237592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791438044340229:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:29.238277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:29.242342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791438044340233:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:50:29.391580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:50:29.488126Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd7xhm3e2qy0wpp4mtzebs76, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTA1NzFkM2UtYzUwMjM0Ny0zM2ExYmZhYy1jYzVmZWRmYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.488447Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd7xhm3ecsa195z9rykexvhf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZmZjRkZmUtYzIxNTc2MmUtMjg3NWE3ZmYtYzhkNWM5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.488555Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd7xhm3e8qwbm15zdvajmp3w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU5MDhlZDUtY2JiM2M2Y2UtNGYzM2IyM2UtOGU3ZGExNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.488596Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd7xhm3e2zs0x3frtmfq8bkh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWUyNTQ1NGEtN2I3ZTZiNmItOTM1OTU0MjItNjVlODk5Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.488773Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd7xhm3e7jqt3a7z6z24tmhm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzljZTAxOTQtMmIyMzkxMWYtZGVhYTk4MDMtYjkzZTFlNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.490071Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd7xhm3e2jp3bxy82gx26y40, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQ4NTRkZjEtOTA1N2VmNzMtYzY1M2FhMDUtMWJiMzdhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.490164Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd7xhm3e6p4efmc8b5qxxk1j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZlYzljNWEtY2RmNzgxZmEtNDcwYjNlMDMtNGY1MGNjNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.490227Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd7xhm3e771pdx9c0n8nd5gx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjM0ODU2NTQtNTg4ZmJkOTYtNzhhY2JmOGUtNDJiNDg1YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.491108Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd7xhm3e2zs0x3frtmfq8bkh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWUyNTQ1NGEtN2I3ZTZiNmItOTM1OTU0MjItNjVlODk5Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.491436Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd7xhm3ecsa195z9rykexvhf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZmZjRkZmUtYzIxNTc2MmUtMjg3NWE3ZmYtYzhkNWM5ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.491551Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd7xhm3ecnxn92pm2s67wwws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTc0OGVjYmYtZWJlMGNkMTctZTkxMzUxOGUtNDhkNDFmZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.491669Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd7xhm3ef3027gtvr4d7zsyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZiMTJhOTctZmVhMTdiYmItMjczZjU1YjgtMjFlYzRlZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.491674Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd7xhm3e2qy0wpp4mtzebs76, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTA1NzFkM2UtYzUwMjM0Ny0zM2ExYmZhYy1jYzVmZWRmYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:29.492208Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jd7xhm3e8qwbm15zdvajmp3w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU5MDh ... sion/3?node_id=2&id=ZTU2MGU0ODItOTI3ZmY2YTgtODAwZmQ3NS02ODY3MzQ0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.905669Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721644. Ctx: { TraceId: 01jd7xj81fdxd815mbm2nyc4hd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzFlNGJhZTAtYzFiMWZhZDktYjM3NTZjNTMtYjhmMTYwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.905980Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721645. Ctx: { TraceId: 01jd7xj81eb58bajes49hf12ge, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODYxNDE5NjMtYjhjYmVjNmQtMjQzYzE2NDAtYmY3Njk2MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.906337Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721647. Ctx: { TraceId: 01jd7xj81eb58bajes49hf12ge, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODYxNDE5NjMtYjhjYmVjNmQtMjQzYzE2NDAtYmY3Njk2MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.906398Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721646. Ctx: { TraceId: 01jd7xj81dbc08r2y2qwe7h5k3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2MGU0ODItOTI3ZmY2YTgtODAwZmQ3NS02ODY3MzQ0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.907528Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721648. Ctx: { TraceId: 01jd7xj81eb58bajes49hf12ge, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODYxNDE5NjMtYjhjYmVjNmQtMjQzYzE2NDAtYmY3Njk2MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.908457Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721649. Ctx: { TraceId: 01jd7xj81ke9bb1b4ycw25v67d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzdmNWNhMTItMmRhNzY2NDItNDM0Nzg4NmItMTMxOTg1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.908534Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721650. Ctx: { TraceId: 01jd7xj81kbse2041b87am3pee, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhNDkwYjQtNWRmYmUzN2ItYjY5YTg2YWQtMjI0MjNkZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.909578Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721652. Ctx: { TraceId: 01jd7xj81ke9bb1b4ycw25v67d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzdmNWNhMTItMmRhNzY2NDItNDM0Nzg4NmItMTMxOTg1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.909718Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721651. Ctx: { TraceId: 01jd7xj81k7p4htqy4t4vm1t57, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWFkN2Y1MmYtMzFmMWRkOGYtOGM0YTgxOTItOWFiYTQwYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.909742Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721653. Ctx: { TraceId: 01jd7xj81kbse2041b87am3pee, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhNDkwYjQtNWRmYmUzN2ItYjY5YTg2YWQtMjI0MjNkZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.910280Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721654. Ctx: { TraceId: 01jd7xj81ke9bb1b4ycw25v67d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzdmNWNhMTItMmRhNzY2NDItNDM0Nzg4NmItMTMxOTg1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.910523Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721655. Ctx: { TraceId: 01jd7xj81kbse2041b87am3pee, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhNDkwYjQtNWRmYmUzN2ItYjY5YTg2YWQtMjI0MjNkZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.910644Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721656. Ctx: { TraceId: 01jd7xj81k7p4htqy4t4vm1t57, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWFkN2Y1MmYtMzFmMWRkOGYtOGM0YTgxOTItOWFiYTQwYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.911346Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721658. Ctx: { TraceId: 01jd7xj81k7p4htqy4t4vm1t57, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWFkN2Y1MmYtMzFmMWRkOGYtOGM0YTgxOTItOWFiYTQwYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.911434Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721657. Ctx: { TraceId: 01jd7xj81n54nrqdsf9xzvqpfa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzFlNGJhZTAtYzFiMWZhZDktYjM3NTZjNTMtYjhmMTYwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.912067Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721659. Ctx: { TraceId: 01jd7xj81n3t6xmy2fbp31fk8e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODYxNDE5NjMtYjhjYmVjNmQtMjQzYzE2NDAtYmY3Njk2MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.912153Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721660. Ctx: { TraceId: 01jd7xj81k7p4htqy4t4vm1t57, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWFkN2Y1MmYtMzFmMWRkOGYtOGM0YTgxOTItOWFiYTQwYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.912757Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721661. Ctx: { TraceId: 01jd7xj81n54nrqdsf9xzvqpfa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzFlNGJhZTAtYzFiMWZhZDktYjM3NTZjNTMtYjhmMTYwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.913413Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721662. Ctx: { TraceId: 01jd7xj81n54nrqdsf9xzvqpfa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzFlNGJhZTAtYzFiMWZhZDktYjM3NTZjNTMtYjhmMTYwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.913576Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721663. Ctx: { TraceId: 01jd7xj81n3t6xmy2fbp31fk8e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODYxNDE5NjMtYjhjYmVjNmQtMjQzYzE2NDAtYmY3Njk2MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.913965Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721664. Ctx: { TraceId: 01jd7xj81n3t6xmy2fbp31fk8e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODYxNDE5NjMtYjhjYmVjNmQtMjQzYzE2NDAtYmY3Njk2MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.914847Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721666. Ctx: { TraceId: 01jd7xj81t66z5b5c5hanwksb9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2MGU0ODItOTI3ZmY2YTgtODAwZmQ3NS02ODY3MzQ0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.915423Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721667. Ctx: { TraceId: 01jd7xj81t66z5b5c5hanwksb9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2MGU0ODItOTI3ZmY2YTgtODAwZmQ3NS02ODY3MzQ0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.915635Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721665. Ctx: { TraceId: 01jd7xj81tekg1jm5p0q6ajw93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzdmNWNhMTItMmRhNzY2NDItNDM0Nzg4NmItMTMxOTg1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T17:50:49.916362Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721669. Ctx: { TraceId: 01jd7xj81tekg1jm5p0q6ajw93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzdmNWNhMTItMmRhNzY2NDItNDM0Nzg4NmItMTMxOTg1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.916574Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721668. Ctx: { TraceId: 01jd7xj81v332xqg588cxxkq8t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhNDkwYjQtNWRmYmUzN2ItYjY5YTg2YWQtMjI0MjNkZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.916714Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721671. Ctx: { TraceId: 01jd7xj81tekg1jm5p0q6ajw93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzdmNWNhMTItMmRhNzY2NDItNDM0Nzg4NmItMTMxOTg1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.917194Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721670. Ctx: { TraceId: 01jd7xj81ve384jvje9y51c3gc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzFlNGJhZTAtYzFiMWZhZDktYjM3NTZjNTMtYjhmMTYwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.917442Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721672. Ctx: { TraceId: 01jd7xj81tekg1jm5p0q6ajw93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzdmNWNhMTItMmRhNzY2NDItNDM0Nzg4NmItMTMxOTg1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.917730Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721673. Ctx: { TraceId: 01jd7xj81w2a231c1jy4nb3dxk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWFkN2Y1MmYtMzFmMWRkOGYtOGM0YTgxOTItOWFiYTQwYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2024-11-21T17:50:49.918141Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721674. Ctx: { TraceId: 01jd7xj81v332xqg588cxxkq8t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhNDkwYjQtNWRmYmUzN2ItYjY5YTg2YWQtMjI0MjNkZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.918366Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721675. Ctx: { TraceId: 01jd7xj81ve384jvje9y51c3gc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzFlNGJhZTAtYzFiMWZhZDktYjM3NTZjNTMtYjhmMTYwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.918577Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721676. Ctx: { TraceId: 01jd7xj81w2a231c1jy4nb3dxk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWFkN2Y1MmYtMzFmMWRkOGYtOGM0YTgxOTItOWFiYTQwYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.918681Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721677. Ctx: { TraceId: 01jd7xj81v332xqg588cxxkq8t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhNDkwYjQtNWRmYmUzN2ItYjY5YTg2YWQtMjI0MjNkZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:49.918750Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721678. Ctx: { TraceId: 01jd7xj81ve384jvje9y51c3gc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzFlNGJhZTAtYzFiMWZhZDktYjM3NTZjNTMtYjhmMTYwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |82.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit |82.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardViewTest::AsyncCreateDifferentViews >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts >> TSchemeShardViewTest::EmptyQueryText >> TSchemeShardViewTest::DropView >> TSchemeShardViewTest::AsyncDropSameView >> TSchemeShardViewTest::AsyncCreateSameView >> TSchemeShardViewTest::CreateView >> TSchemeShardViewTest::ReadOnlyMode >> TSchemeShardViewTest::EmptyName |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> DataShardSnapshots::LockedWriteCleanupOnCopyTable [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey |82.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> TSchemeShardViewTest::DropView [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> TSchemeShardViewTest::CreateView [GOOD] >> TSchemeShardViewTest::EmptyName [GOOD] >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> TSchemeShardServerLess::BaseCase |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> TSchemeShardServerLess::Fake [GOOD] |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:53.116699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:53.116732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.116737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:53.116742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:53.126669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:53.126698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:53.126721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.126805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:53.252991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:53.253008Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:53.282993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:53.283682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:53.287960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:53.297213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:53.297386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:53.297464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.300288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:53.304091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:53.352971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.352978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:53.352992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.354362Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:53.371239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.373955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.374039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:53.384856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:53.384897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.388181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.388207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:53.388268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.388281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:53.395796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:53.395865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:53.396731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396749Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:53.407004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.414036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.414625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:53.415572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:53.423689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:53.431573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.431628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.431640Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.431734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:53.431743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.431782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.431805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:53.432764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.432784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.432848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.432853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:53.432957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.432969Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:53.432989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:53.432994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.433001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:53.433008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.433014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:53.433018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:53.433047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.433054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:53.433058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:53.433556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.433577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.433583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:53.433589Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:53.433595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.433617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:53.434774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:53.440537Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:53.440680Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:53.455537Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:53.456135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.456173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2024-11-21T17:50:53.456180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2024-11-21T17:50:53.456193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:50:53.456204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T17:50:53.456211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:53.456440Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:53.457231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T17:50:53.457257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2024-11-21T17:50:53.457317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.457325Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2024-11-21T17:50:53.457332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T17:50:53.457354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:53.457520Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:53.457821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T17:50:53.457841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T17:50:53.457883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.457897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.457907Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2024-11-21T17:50:53.457921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T17:50:53.457938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.457948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:53.458275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.458284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.458303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:53.458315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.458318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:50:53.458321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:50:53.458364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.458368Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:50:53.458374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:53.458377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:53.458379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:50:53.458382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:53.458385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:53.458387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:50:53.458395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:53.458398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:50:53.458412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T17:50:53.458414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T17:50:53.458484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:53.458491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:53.458493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:53.458508Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T17:50:53.458511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.458621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:53.458651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:53.458654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:53.458657Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T17:50:53.458659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:53.458665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:50:53.459247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:53.459563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 |82.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:53.116664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:53.116691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.116695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:53.116699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:53.126619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:53.126640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:53.126660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.126752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:53.253005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:53.253026Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:53.282955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:53.283631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:53.287969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:53.297201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:53.297392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:53.297463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.300290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:53.304022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:53.352973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.352978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:53.352992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.354352Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:53.365325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.373953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.374032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:53.384883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:53.384923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.388317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.388343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:53.388379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.388386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:53.395803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:53.395835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:53.396741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:53.407004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.414063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.414795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:53.416679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:53.423731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:53.431574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.431629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.431640Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.431752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:53.431765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.431793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.431829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:53.432682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.432693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.432731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.432736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:53.432823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.432831Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:53.432846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:53.432850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.432856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:53.432861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.432865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:53.432870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:53.432885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.432891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:53.432895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:53.433256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.433272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.433277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:53.433282Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:53.433287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.433303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Count reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:50:53.463416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T17:50:53.463418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T17:50:53.463420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T17:50:53.463502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:50:53.463508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.463512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:50:53.463515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T17:50:53.463715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:53.463724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:53.463726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:53.463729Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:50:53.463731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:53.463791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:53.463796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:53.463798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:53.463801Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:50:53.463803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:50:53.463807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T17:50:53.464038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:53.464051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2024-11-21T17:50:53.464089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:50:53.464093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T17:50:53.464108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:50:53.464109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2024-11-21T17:50:53.464115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:50:53.464116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:50:53.464162Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:50:53.464173Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:53.464178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:53.464180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:330:2322] 2024-11-21T17:50:53.464190Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:50:53.464194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:53.464196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:330:2322] 2024-11-21T17:50:53.464205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:53.464207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:330:2322] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2024-11-21T17:50:53.464269Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:53.464288Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 27us result status StatusSuccess 2024-11-21T17:50:53.464373Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.464420Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:53.464433Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 15us result status StatusSuccess 2024-11-21T17:50:53.464491Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.464523Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:53.464531Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 9us result status StatusSuccess 2024-11-21T17:50:53.464549Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:53.116699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:53.116733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.116738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:53.116742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:53.126662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:53.126682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:53.126705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.126811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:53.253022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:53.253040Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:53.283505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:53.284148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:53.287981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:53.297183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:53.297387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:53.297483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.300301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:53.304033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:53.352967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.352975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:53.352991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.354362Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:53.371494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.373955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.374032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:53.384883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:53.384934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:53.385944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:53.395760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:53.395814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:53.396794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:53.407249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.414679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.415404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:53.416508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:53.423711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:53.435569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.435611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.435620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.435685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:53.435694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.435718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.435732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:53.438931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.438946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.438983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.438988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:53.439060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.439068Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:53.439081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:53.439084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.439089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:53.439094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.439100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:53.439104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:53.439119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.439124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:53.439127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:53.439479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.439499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.439504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:53.439509Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:53.439515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.439571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... : [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:150" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2024-11-21T17:50:53.477158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path exists but creating right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:150, operation: CREATE VIEW, path: /MyRoot/MyView 2024-11-21T17:50:53.477508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T17:50:53.477543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:50:53.477573Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T17:50:53.477624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.477640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.477646Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2024-11-21T17:50:53.477669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T17:50:53.477693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.477704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:50:53.478052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.478058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.478087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:53.478101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.478104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:50:53.478110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:50:53.478116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.478121Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:50:53.478128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:50:53.478131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:53.478135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:50:53.478137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:50:53.478140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:50:53.478143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:50:53.478151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:53.478155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:50:53.478157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T17:50:53.478159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T17:50:53.478274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:53.478281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:53.478284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:53.478287Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T17:50:53.478290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.478384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:53.478390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:50:53.478392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:50:53.478394Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T17:50:53.478396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:53.478402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:50:53.478996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:50:53.479036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2024-11-21T17:50:53.479089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:50:53.479094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T17:50:53.479114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:50:53.479116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2024-11-21T17:50:53.479121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:50:53.479123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:50:53.479169Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:50:53.479181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:53.479184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:300:2292] 2024-11-21T17:50:53.479210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:53.479215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:53.479217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:300:2292] 2024-11-21T17:50:53.479236Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:50:53.479241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:53.479243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:300:2292] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2024-11-21T17:50:53.479287Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:53.479306Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 28us result status StatusSuccess 2024-11-21T17:50:53.479373Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:53.116647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:53.116679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.116684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:53.116688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:53.126657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:53.126675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:53.126691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.126777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:53.252975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:53.252994Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:53.282999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:53.283631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:53.287946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:53.297182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:53.297358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:53.297453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.300278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:53.304119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:53.352966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.352972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:53.352986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.354360Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:53.365650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.373980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.374069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:53.384878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:53.384937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:53.385947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:53.395777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:53.395827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:53.396741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:53.407005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.414062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.414831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:53.416021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:53.423707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:53.431574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.431633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.431645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.431743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:53.431755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.431783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.431800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:53.433118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.433130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.433166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.433172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:53.433247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.433256Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:53.433268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:53.433272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.433278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:53.433283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.433287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:53.433291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:53.433305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.433310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:53.433316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:53.433690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.433707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.433713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:53.433717Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:53.433721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.433738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:50:53.435407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:50:53.440567Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:50:53.440760Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:50:53.455208Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:50:53.455760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.455791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2024-11-21T17:50:53.455798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2024-11-21T17:50:53.460944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:150, at schemeshard: 72057594046678944 2024-11-21T17:50:53.475389Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:53.476339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:150" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.476374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:150, operation: CREATE VIEW, path: /MyRoot/ 2024-11-21T17:50:53.476501Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> DataShardSnapshots::LockedWritesLimitedPerKey [GOOD] >> RetryPolicy::RetryWithBatching [GOOD] >> TInterconnectTest::TestCrossConnect [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:53.116662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:53.116688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.116691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:53.116695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:53.126649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:53.126656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:53.126666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.126746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:53.252984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:53.253004Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:53.282980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:53.283658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:53.287959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:53.297294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:53.297486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:53.297582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.300314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:53.304100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:53.352971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.352985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:53.352998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.354362Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:53.371494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.373973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.374074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:53.384892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:53.384938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:53.385940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:53.395794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:53.395836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:53.396741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:53.407346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407366Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.415180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.415836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:53.417531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:53.423729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:53.431575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.431644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.431655Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.431746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:53.431757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.431786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.431806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:53.432656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.432669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.432723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.432729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:53.432823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.432832Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:53.432846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:53.432851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.432856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:53.432861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.432868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:53.432871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:53.432885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.432892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:53.432895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:53.433296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.433316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.433321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:53.433325Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:53.433330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.433347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ecute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "Some query" } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.455761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0 2024-11-21T17:50:53.455766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0, viewDescription: Name: "MyView" QueryText: "Some query" 2024-11-21T17:50:53.455778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:50:53.455795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T17:50:53.455801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:53.456017Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:53.456829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusAccepted TxId: 100 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T17:50:53.456865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2024-11-21T17:50:53.456936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.456943Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 ProgressState 2024-11-21T17:50:53.456968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2024-11-21T17:50:53.456992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:53.457157Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:53.457468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T17:50:53.457499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T17:50:53.457578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.457598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.457610Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2024-11-21T17:50:53.457643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T17:50:53.457668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.457679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:53.458126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.458137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.458164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:53.458182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.458187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T17:50:53.458191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T17:50:53.458251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.458261Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T17:50:53.458269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T17:50:53.458273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:50:53.458278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T17:50:53.458282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:50:53.458286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T17:50:53.458289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T17:50:53.458300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:53.458304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T17:50:53.458307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T17:50:53.458310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T17:50:53.458419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:50:53.458432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:50:53.458436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:50:53.458440Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T17:50:53.458445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.458563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:50:53.458574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:50:53.458577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:50:53.458581Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T17:50:53.458584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:53.458593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T17:50:53.459274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:50:53.459583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 101 2024-11-21T17:50:53.459637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:50:53.459645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:50:53.459704Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:50:53.459722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:50:53.459726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:296:2288] TestWaitNotification: OK eventTxId 101 2024-11-21T17:50:53.459782Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:53.459808Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 37us result status StatusSuccess 2024-11-21T17:50:53.459910Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TInterconnectTest::TestManyEventsWithReconnect >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> TSchemeShardServerLess::BaseCase [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> BsControllerConfig::DeleteStoragePool [GOOD] |82.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |82.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |82.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:53.116650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:53.116680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.116683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:53.116688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:53.126651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:53.126660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:53.126670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.126747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:53.253078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:53.253098Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:53.282990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:53.283651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:53.287927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:53.297192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:53.297408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:53.297478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.300268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:53.303987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:53.352969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.352974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:53.352989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.354339Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:53.363999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.373939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.374032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:53.384849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:53.384902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.388195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.388248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:53.388295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.388305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:53.396584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:53.396619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:53.397294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.397307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:53.397313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:53.407459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407475Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.416362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.417050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:53.424060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:53.424150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:53.431575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.431633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.431645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.431750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:53.431760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.431786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.431805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:53.436748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.436765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.436801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.436807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:53.436879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.436888Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:53.436901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:53.436905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.436910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:53.436916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.436920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:53.436924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:53.436943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.436949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:53.436955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:53.437328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.437344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.437348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:53.437352Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:53.437357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.437373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... nKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2024-11-21T17:50:53.460317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropView Drop { Name: "MyView" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.460342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropView Propose, opId: 102:0, path: /MyRoot/MyView 2024-11-21T17:50:53.460354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T17:50:53.460359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:53.460737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAccepted TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T17:50:53.460759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP VIEW, path: /MyRoot/MyView 2024-11-21T17:50:53.460779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.460797Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 ProgressState 2024-11-21T17:50:53.460806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2024-11-21T17:50:53.460824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:53.461114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:50:53.461136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T17:50:53.461193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.461212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.461220Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2024-11-21T17:50:53.461241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T17:50:53.461269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.461277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:53.461617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.461626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.461645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:53.461674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.461678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T17:50:53.461681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:50:53.461742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.461748Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:50:53.461756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:50:53.461759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:53.461763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T17:50:53.461768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:53.461771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:50:53.461774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:50:53.461782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:53.461787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T17:50:53.461790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:50:53.461793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:50:53.461868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:53.461881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:53.461885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:53.461888Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:50:53.461892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.461974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:53.461982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:53.461985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:53.461989Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:50:53.461992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:53.462003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T17:50:53.462049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:53.462054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:50:53.462062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.462610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:53.462645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:53.462822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T17:50:53.462862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:50:53.462867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:50:53.462919Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:53.462934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:53.462938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:320:2312] TestWaitNotification: OK eventTxId 102 2024-11-21T17:50:53.462993Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:53.463016Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 32us result status StatusPathDoesNotExist 2024-11-21T17:50:53.463052Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 >> TBlobStorageProxyTest::TestQuadrupleGroups >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs >> TBlobStorageProxyTest::TestInFlightPuts >> TBlobStorageProxyTest::TestProxyGetSingleTimeout >> TBlobStorageProxyTest::TestProxyLongTailDiscover >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> TBlobStorageProxyTest::TestGetMultipart >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> TBlobStorageProxyTest::TestPartialGetBlock >> TBlobStorageProxyTest::TestDoubleFailure ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:54.995907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:54.997449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:54.997456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:54.997460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:54.997463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:54.997466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:54.997472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:54.999566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:55.027329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:55.027349Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:55.036180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:55.036855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:55.036875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:55.046898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:55.059230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:55.059334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.087940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:55.102244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:55.155960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:55.155965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:55.155978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.163012Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:55.184935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:55.203601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:55.219112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:55.219125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:55.219915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:55.219925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:55.219928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:55.220284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:55.220586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220593Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.220607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.221133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:55.221501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:55.221548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:55.221681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.221704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:55.221710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.221767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:55.221773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.221793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:55.221802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:55.222166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:55.222173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:55.222197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.222201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:55.231200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.231222Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:55.231245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:55.231248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.231253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:55.231257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.231261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:55.231263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:55.231278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:55.231282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:55.231284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:55.231618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:55.231631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:55.231636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:55.231640Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:55.231644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:55.231655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... DEBUG: Free shard 72057594046678944:6 hive 72075186233409546 at ss 72057594046678944 2024-11-21T17:50:55.707625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.707630Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2024-11-21T17:50:55.707637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2024-11-21T17:50:55.707640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T17:50:55.707643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2024-11-21T17:50:55.707647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T17:50:55.707651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2024-11-21T17:50:55.707655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2024-11-21T17:50:55.707684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:50:55.707941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T17:50:55.708012Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409546 2024-11-21T17:50:55.708258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:55.708592Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409548 2024-11-21T17:50:55.708627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T17:50:55.708667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 Forgetting tablet 72075186234409546 2024-11-21T17:50:55.708881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2024-11-21T17:50:55.708898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:50:55.708970Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409547 Forgetting tablet 72075186234409548 Forgetting tablet 72075186234409547 2024-11-21T17:50:55.709376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T17:50:55.709403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:50:55.709642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:55.709648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:50:55.709662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:50:55.709690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T17:50:55.709715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:55.709718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:50:55.709725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:55.709985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T17:50:55.709992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2024-11-21T17:50:55.710000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2024-11-21T17:50:55.710002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2024-11-21T17:50:55.710254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T17:50:55.710260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2024-11-21T17:50:55.710282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:50:55.710289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T17:50:55.710332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T17:50:55.710337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T17:50:55.710385Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T17:50:55.710397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T17:50:55.710400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:912:2778] TestWaitNotification: OK eventTxId 106 2024-11-21T17:50:55.710452Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:55.710476Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 33us result status StatusPathDoesNotExist 2024-11-21T17:50:55.710506Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:50:55.710554Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:55.710565Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 15us result status StatusPathDoesNotExist 2024-11-21T17:50:55.710575Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:50:55.710601Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:55.710614Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 14us result status StatusSuccess 2024-11-21T17:50:55.710660Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409546 is deleted wait until 72075186234409547 is deleted wait until 72075186234409548 is deleted wait until 72075186234409549 is deleted 2024-11-21T17:50:55.710715Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409546 2024-11-21T17:50:55.710727Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409547 2024-11-21T17:50:55.710732Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409548 2024-11-21T17:50:55.710737Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 Deleted tabletId 72075186234409546 Deleted tabletId 72075186234409547 Deleted tabletId 72075186234409548 Deleted tabletId 72075186234409549 |82.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |82.2%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T17:50:01.594316Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:01.594339Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.598864Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:01.601584Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T17:50:01.601801Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T17:50:01.602366Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T17:50:01.602837Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T17:50:01.603264Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.605039Z node 1 :PERSQUEUE INFO: new Cookie default|55527327-ef7c4e4c-7bf6585c-a3d87a97_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.606070Z node 1 :PERSQUEUE INFO: new Cookie default|ea91e8d7-5e14aa52-611f1df5-87b65d4a_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.610190Z node 1 :PERSQUEUE INFO: new Cookie default|51e620f9-9c78a2f6-3dc00c35-980578ed_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.611234Z node 1 :PERSQUEUE INFO: new Cookie default|bed7a5a5-a079392-bba5f59c-286c0b44_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.612221Z node 1 :PERSQUEUE INFO: new Cookie default|21228667-dbdfe520-a17c62b4-9511ad44_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.613517Z node 1 :PERSQUEUE INFO: new Cookie default|f113db68-6286436f-c1a5711c-5a1cd712_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T17:50:01.850005Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:01.850032Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:179:2057] recipient: [2:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:182:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:183:2057] recipient: [2:181:2193] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:185:2057] recipient: [2:181:2193] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:01.858236Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:01.858251Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:184:2194] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:261:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:03.317339Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:03.317524Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2024-11-21T17:50:03.317681Z node 2 :PERSQUEU ... NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [47:290:2283] sender: [47:392:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:101:2057] recipient: [48:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:101:2057] recipient: [48:99:2133] Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:106:2057] recipient: [48:99:2133] 2024-11-21T17:50:54.407870Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:54.407891Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:147:2057] recipient: [48:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:147:2057] recipient: [48:145:2168] Leader for TabletID 72057594037927938 is [48:151:2172] sender: [48:152:2057] recipient: [48:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:177:2057] recipient: [48:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.411632Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:54.411816Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 48 actor [48:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 48 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 Important: false } 2024-11-21T17:50:54.411922Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [48:184:2197] 2024-11-21T17:50:54.412392Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:50:54.412694Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [48:185:2198] 2024-11-21T17:50:54.412991Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.414324Z node 48 :PERSQUEUE INFO: new Cookie default|2a6857dd-82763333-a28c5ab-cf7bcbea_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.414923Z node 48 :PERSQUEUE INFO: new Cookie default|7a7bad66-222841ae-99854480-2d557fdb_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.418925Z node 48 :PERSQUEUE INFO: new Cookie default|ce5c443d-f733100d-65d89e31-53163590_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.419953Z node 48 :PERSQUEUE INFO: new Cookie default|7518c400-4a99284f-d535e40c-1cc4de90_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.421088Z node 48 :PERSQUEUE INFO: new Cookie default|c7444b4e-af8f5c46-fa9131da-204aa0b2_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.422030Z node 48 :PERSQUEUE INFO: new Cookie default|289abf94-8fd3b52b-2157bb09-660a31d_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:101:2057] recipient: [49:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:101:2057] recipient: [49:99:2133] Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:106:2057] recipient: [49:99:2133] 2024-11-21T17:50:54.647107Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:54.647129Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:147:2057] recipient: [49:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:147:2057] recipient: [49:145:2168] Leader for TabletID 72057594037927938 is [49:151:2172] sender: [49:152:2057] recipient: [49:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:177:2057] recipient: [49:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.650811Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:50:54.650992Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 49 actor [49:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 49 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 Important: false } 2024-11-21T17:50:54.651110Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [49:184:2197] 2024-11-21T17:50:54.651589Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:184:2197] 2024-11-21T17:50:54.651833Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [49:185:2198] 2024-11-21T17:50:54.652113Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.653683Z node 49 :PERSQUEUE INFO: new Cookie default|8a135ee6-a84dcb79-a0dc6af9-cc0d28e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.654298Z node 49 :PERSQUEUE INFO: new Cookie default|e612f8ed-c2623be-b76ac273-1ed2575d_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.657212Z node 49 :PERSQUEUE INFO: new Cookie default|52d08fdf-482cbde8-b90658a6-a3af3e7f_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.658075Z node 49 :PERSQUEUE INFO: new Cookie default|7b8266e8-dd61bb77-d3b3e676-fd946d1e_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.658926Z node 49 :PERSQUEUE INFO: new Cookie default|c3f573c0-75030f35-973b4116-bb95721c_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:50:54.659747Z node 49 :PERSQUEUE INFO: new Cookie default|4b8bf380-7f5c0d38-a018717b-917af954_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:54.995915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:54.997447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:54.997453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:54.997457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:54.997461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:54.997464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:54.997471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:54.999607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:55.027329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:55.027348Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:55.036268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:55.036958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:55.036985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:55.046621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:55.059255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:55.059374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.087946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:55.102319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:55.155980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:55.155985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:55.155995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.163079Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:55.185209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:55.203616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:55.219139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:55.219150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:55.219920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:55.219941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:55.219945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:55.220314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:55.220589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.220614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.221206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:55.221606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:55.221644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:55.221756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.221774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:55.221778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.221815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:55.221820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.221835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:55.221842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:55.222121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:55.222125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:55.222153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.222157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:55.231212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.231235Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:55.231258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:55.231262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.231266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:55.231270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.231274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:55.231276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:55.231292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:55.231296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:55.231299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:55.231607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:55.231618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:55.231622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:55.231626Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:55.231629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:55.231640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 024-11-21T17:50:55.383423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.383426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet 72057594046678944 2024-11-21T17:50:55.383430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2024-11-21T17:50:55.383449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:55.383753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2024-11-21T17:50:55.383774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2024-11-21T17:50:55.383828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.383843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:55.383847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet 72057594046678944 2024-11-21T17:50:55.383889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T17:50:55.383895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet 72057594046678944 2024-11-21T17:50:55.383907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:50:55.383924Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:599:2528], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2024-11-21T17:50:55.384271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:55.384278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:55.384304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.384308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T17:50:55.384354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.384361Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2024-11-21T17:50:55.384365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2024-11-21T17:50:55.384448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:55.384458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:55.384463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:50:55.384467Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:50:55.384472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T17:50:55.384484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T17:50:55.385030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.385041Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T17:50:55.385050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T17:50:55.385054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:50:55.385059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T17:50:55.385064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:50:55.385069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:50:55.385072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:50:55.385104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:50:55.385210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T17:50:55.385548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T17:50:55.385557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T17:50:55.385613Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:50:55.385626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:50:55.385630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:753:2637] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T17:50:55.386166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:55.386184Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2024-11-21T17:50:55.386187Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2024-11-21T17:50:55.386208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2024-11-21T17:50:55.386212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2024-11-21T17:50:55.395015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:55.395045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2024-11-21T17:50:55.395578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:55.395595Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2024-11-21T17:50:55.395598Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2024-11-21T17:50:55.395621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2024-11-21T17:50:55.395625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2024-11-21T17:50:55.395995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:55.396015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:54.995922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:54.997472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:54.997488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:54.997493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:54.997497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:54.997499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:54.997507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:54.999575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:55.027330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:55.027350Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:55.036189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:55.036713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:55.036736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:55.046608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:55.059231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:55.059336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.087955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:55.102267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:55.155990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:55.155997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:55.156009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.162992Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:55.185418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:55.203615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:55.219147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:55.219162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:55.219916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:55.219929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:55.219933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:55.220301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:55.220676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.220705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.221276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:55.221664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:55.221700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:55.221838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.221861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:55.221866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.221914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:55.221920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.221941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:55.221950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:55.222330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:55.222336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:55.222359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.222361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:55.231217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.231262Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:55.231279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:55.231284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.231289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:55.231294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.231299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:55.231302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:55.231322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:55.231327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:55.231330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:55.231683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:55.231697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:55.231700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:55.231703Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:55.231707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:55.231718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186234409547, partId: 0 2024-11-21T17:50:55.381634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409547 2024-11-21T17:50:55.381641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2024-11-21T17:50:55.381648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#104:0 Got OK TEvConfigureStatus from tablet# 72075186234409547 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2024-11-21T17:50:55.382434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.382538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186234409548, partId: 0 2024-11-21T17:50:55.382554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409548 2024-11-21T17:50:55.382559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2024-11-21T17:50:55.382565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#104:0 Got OK TEvConfigureStatus from tablet# 72075186234409548 shardIdx# 72057594046678944:7 at schemeshard# 72057594046678944 2024-11-21T17:50:55.382569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 3 -> 128 2024-11-21T17:50:55.383087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.383629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.383657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.383662Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.383667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet 72057594046678944 2024-11-21T17:50:55.383672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2024-11-21T17:50:55.383696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:55.384047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2024-11-21T17:50:55.384069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2024-11-21T17:50:55.384127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.384143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:55.384149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet 72057594046678944 2024-11-21T17:50:55.384199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T17:50:55.384207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet 72057594046678944 2024-11-21T17:50:55.384221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:50:55.384258Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:599:2528], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2024-11-21T17:50:55.384632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:55.384639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:55.384662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.384666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T17:50:55.384705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.384711Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2024-11-21T17:50:55.384714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2024-11-21T17:50:55.384793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:55.384802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:50:55.384806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:50:55.384809Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:50:55.384818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T17:50:55.384829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T17:50:55.385289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.385299Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T17:50:55.385309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T17:50:55.385313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:50:55.385317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T17:50:55.385322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:50:55.385327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:50:55.385331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:50:55.385359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:50:55.385451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T17:50:55.385790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T17:50:55.385798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T17:50:55.385852Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:50:55.385865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:50:55.385869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:753:2637] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T17:50:55.386508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:55.386530Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2024-11-21T17:50:55.386534Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2024-11-21T17:50:55.386556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2024-11-21T17:50:55.386562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2024-11-21T17:50:55.395096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:55.395131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |82.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:53.116659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:53.116688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.116695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:53.116699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:53.126629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:53.126640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:53.126653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.126750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:53.252991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:53.253006Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:53.283018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:53.283713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:53.287961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:53.297183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:53.297414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:53.297498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.300289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:53.304111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:53.352971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.352978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:53.352992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.354361Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:53.371762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.374058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.374151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:53.384891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:53.384927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:53.385939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:53.395802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:53.395838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:53.396731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396749Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:53.407285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.415253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.415976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:53.416749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:53.424037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:53.432360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.432411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.432435Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.432503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:53.432514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.432543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.432564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:53.434156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.434168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.434199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.434205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:53.434278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.434287Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:53.434300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:53.434304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.434309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:53.434314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.434319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:53.434322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:53.434336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.434342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:53.434347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:53.434700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.434714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.434719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:53.434724Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:53.434728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.434742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 477207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp:127, operation: DROP VIEW, path: /MyRoot/MyView 2024-11-21T17:50:53.477245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp:127" TxId: 104 SchemeshardId: 72057594046678944 PathId: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2024-11-21T17:50:53.477253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp:127, operation: DROP VIEW, path: /MyRoot/MyView 2024-11-21T17:50:53.477534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:50:53.477557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T17:50:53.477624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.477642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.477648Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2024-11-21T17:50:53.477676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T17:50:53.477704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.477714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:53.478128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.478138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.478164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:53.478185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.478190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T17:50:53.478194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:50:53.478270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.478276Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:50:53.478284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:50:53.478288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:53.478293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T17:50:53.478298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:53.478302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:50:53.478306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:50:53.478316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:50:53.478321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T17:50:53.478324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:50:53.478328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:50:53.478401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:53.478411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:53.478416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:53.478421Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:50:53.478425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.478530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:53.478539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:53.478543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:53.478546Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:50:53.478550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:50:53.478560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T17:50:53.478753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:53.478759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:50:53.478770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.479120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:53.479417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:53.479435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2024-11-21T17:50:53.479487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:50:53.479493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2024-11-21T17:50:53.479506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T17:50:53.479510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T17:50:53.479568Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:50:53.479583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:50:53.479590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:326:2318] 2024-11-21T17:50:53.479597Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:50:53.479611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:50:53.479614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:326:2318] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2024-11-21T17:50:53.479678Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:53.479697Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 30us result status StatusPathDoesNotExist 2024-11-21T17:50:53.479737Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |82.2%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:53.116717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:53.116752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.116758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:53.116762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:53.126681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:53.126713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:53.126739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:53.126824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:53.253138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:53.253167Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:53.283014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:53.283648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:53.287950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:53.297245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:53.297409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:53.297470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.300272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:53.303949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.352952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:53.352964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.352970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:53.352986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.354362Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:53.364255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.373935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.374032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:53.384866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:53.384906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.385996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:53.386045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.386053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:53.395789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:53.395828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:53.396722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396750Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:53.396755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:53.407004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.407035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.414040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.414795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:53.415539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:53.423685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:53.431573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.431629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.431640Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.431734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:53.431745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:53.431781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.431799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:53.435681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.435706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.435740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.435744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:53.435800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.435805Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:53.435814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:53.435817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.435820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:53.435823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:53.435826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:53.435829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:53.435839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.435843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:53.435845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:53.436113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.436125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:53.436130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:53.436133Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:53.436136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:53.436146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ead records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.931350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.932795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.932812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.932870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:53.932876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.932880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:53.933113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:376:2347] sender: [1:432:2058] recipient: [1:15:2062] 2024-11-21T17:50:53.964532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:53.964594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2024-11-21T17:50:53.964600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2024-11-21T17:50:53.964619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:50:53.964634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T17:50:53.964641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:53.965407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2024-11-21T17:50:53.965432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2024-11-21T17:50:53.965475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.965480Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2024-11-21T17:50:53.965487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2024-11-21T17:50:53.965510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:53.965864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2024-11-21T17:50:53.965896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2024-11-21T17:50:53.966016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:53.966031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:53.966036Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2024-11-21T17:50:53.966057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2024-11-21T17:50:53.966079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:53.966088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T17:50:53.966441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:53.966447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:53.966478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:50:53.966493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:53.966497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:425:2386], at schemeshard: 72057594046678944, txId: 103, path id: 1 2024-11-21T17:50:53.966500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:425:2386], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T17:50:53.966553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:50:53.966558Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T17:50:53.966566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:50:53.966569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:50:53.966573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T17:50:53.966577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:50:53.966580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:50:53.966582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:50:53.966592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:50:53.966596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2024-11-21T17:50:53.966599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:50:53.966601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T17:50:53.966699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:50:53.966706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:50:53.966709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:50:53.966713Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:50:53.966716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:50:53.966788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:50:53.966794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:50:53.966796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:50:53.966798Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:50:53.966801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:50:53.966806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T17:50:53.967532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:50:53.967711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2024-11-21T17:46:56.127197Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.127204Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.127208Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:46:56.127330Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:46:56.127345Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.127347Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.127984Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006817s 2024-11-21T17:46:56.128136Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:46:56.128157Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.128161Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.128183Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008815s 2024-11-21T17:46:56.128333Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:46:56.128346Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.128349Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:46:56.128366Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005507s 2024-11-21T17:46:56.170315Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1732211216170308 2024-11-21T17:46:56.281047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790525516893068:2214];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:56.281149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:46:56.282673Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439790522821990608:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:56.282953Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000d5d/r3tmp/tmpKQZmr2/pdisk_1.dat 2024-11-21T17:46:56.305871Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:56.306908Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:46:56.333544Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27075, node 1 2024-11-21T17:46:56.352908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000d5d/r3tmp/yandexx2JSbK.tmp 2024-11-21T17:46:56.352925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000d5d/r3tmp/yandexx2JSbK.tmp 2024-11-21T17:46:56.352974Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000d5d/r3tmp/yandexx2JSbK.tmp 2024-11-21T17:46:56.353004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:46:56.356417Z INFO: TTestServer started on Port 18361 GrpcPort 27075 TClient is connected to server localhost:18361 PQClient connected to localhost:27075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:56.379864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:56.379888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:56.381542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:46:56.406870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:46:56.407080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:56.407103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:46:56.408120Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:46:56.408408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T17:46:56.575946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790525516893777:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.575965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790525516893796:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.575971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.576735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T17:46:56.578203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439790525516893832:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.578239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:46:56.580929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439790525516893806:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T17:46:56.606202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:46:56.610093Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439790522821990925:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:56.610211Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTAzM2YxMzEtNzM3ZDg1NjItOWYzNDA4YjQtNWI3NjJhZTU=, ActorId: [2:7439790522821990885:2277], ActorState: ExecuteState, TraceId: 01jd7xb46xb6jfbtwpx6wzj5y3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:56.610691Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:56.665180Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439790525516893974:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:46:56.665261Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThiYjcyNTctZTM3NGJkOTgtMWNhYjZlMDUtZDMyMTk0OGU=, ActorId: [1:7439790525516893774:2299], ActorState: ExecuteState, TraceId: 01jd7xb45z3r2te1797ejjz6zq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:46:56.665393Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:46:56.672622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T17:46:56.739390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:27075", true, true, 1000); 2024-11-21T17:46:56.776695Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd7xb4bn1rbx6fgs851zekx3, Database: , ... o 9 partNo 0 2024-11-21T17:50:54.965058Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1454 count 9 nextOffset 9 batches 1 2024-11-21T17:50:54.965060Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0test-message-group-id' seqNo 10 partNo 0 2024-11-21T17:50:54.965062Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1612 count 10 nextOffset 10 batches 1 2024-11-21T17:50:54.965100Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,10 HeadOffset 0 endOffset 0 curOffset 10 d0000000000_00000000000000000000_00000_0000000010_00000| size 1208 WTime 1732211454964 2024-11-21T17:50:54.965125Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:50:54.965828Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 2024-11-21T17:50:54.965839Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:54.965849Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T17:50:54.965852Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:54.965855Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2024-11-21T17:50:54.965856Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:54.965859Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T17:50:54.965870Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:54.965874Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2024-11-21T17:50:54.965876Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:54.965881Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2024-11-21T17:50:54.965882Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:54.965887Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2024-11-21T17:50:54.965889Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:54.965893Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2024-11-21T17:50:54.965895Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:54.965901Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2024-11-21T17:50:54.965908Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:54.965912Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2024-11-21T17:50:54.965919Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:50:54.965923Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2024-11-21T17:50:54.965927Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T17:50:54.965937Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:50:54.965939Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:50:54.965943Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T17:50:54.965967Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2024-11-21T17:50:54.965976Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T17:50:54.966005Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T17:50:54.966012Z node 17 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:50:54.966026Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1732211454964 queuesize 0 startOffset 0 2024-11-21T17:50:54.966120Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { } 2024-11-21T17:50:54.966127Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: acknoledged message 1 2024-11-21T17:50:54.966131Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: acknoledged message 2 2024-11-21T17:50:54.966133Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: acknoledged message 3 2024-11-21T17:50:54.966135Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: acknoledged message 4 2024-11-21T17:50:54.966137Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: acknoledged message 5 2024-11-21T17:50:54.966138Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: acknoledged message 6 2024-11-21T17:50:54.966142Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: acknoledged message 7 2024-11-21T17:50:54.966144Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: acknoledged message 8 2024-11-21T17:50:54.966145Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: acknoledged message 9 2024-11-21T17:50:54.966147Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: acknoledged message 10 2024-11-21T17:50:54.966222Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: close. Timeout = 0 ms 2024-11-21T17:50:54.966229Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session will now close 2024-11-21T17:50:54.966233Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: aborting 2024-11-21T17:50:54.966330Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:50:54.966334Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0] Write session: destroy 2024-11-21T17:50:54.966452Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0 grpc read done: success: 0 data: 2024-11-21T17:50:54.966461Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0 grpc read failed 2024-11-21T17:50:54.966465Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0 grpc closed 2024-11-21T17:50:54.966469Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|9a755748-83f2a5e4-bb7cd724-f6dca679_0 is DEAD 2024-11-21T17:50:54.966655Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:50:54.966685Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:50:54.966699Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7439791547847523231:2625] destroyed 2024-11-21T17:50:54.966716Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::DeleteStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:207:2066] recipient: [1:183:2075] 2024-11-21T17:50:35.297304Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:35.297852Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:35.298157Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:35.298231Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:35.298323Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:35.298327Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:35.298346Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:35.298964Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:35.298982Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:35.299002Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:35.299012Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:35.299019Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:35.299024Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:229:2066] recipient: [1:20:2067] 2024-11-21T17:50:35.309375Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:35.309412Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:35.319691Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:35.319730Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:35.319743Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:35.319751Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:35.319768Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:35.319773Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:35.319778Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:35.319782Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:35.330041Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:35.330098Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:35.330246Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:35.330252Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:35.330271Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:35.331595Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:207:2066] recipient: [11:185:2075] 2024-11-21T17:50:37.110576Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:37.110740Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:37.110957Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:37.111044Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:37.111179Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:37.111186Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:37.111217Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:37.112084Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:37.112114Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:37.112138Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:37.112152Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:37.112163Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:37.112170Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:229:2066] recipient: [11:20:2067] 2024-11-21T17:50:37.122606Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:37.122657Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:37.132991Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:37.133044Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:37.133060Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:37.133070Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:37.133099Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:37.133109Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:37.133115Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:37.133123Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:37.143457Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:37.143501Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:37.143630Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:37.143634Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:37.143655Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:37.143744Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:2964:2106] recipient: [21:2865:2115] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:2964:2106] recipient: [21:2865:2115] Leader for TabletID 72057594037932033 is [21:2966:2117] sender: [21:2967:2106] recipient: [21:2865:2115] 2024-11-21T17:50:39.103943Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:39.104162Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:39.104433Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:39.104520Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:39.104670Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:39.104680Z node 21 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:39.104724Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:39.105578Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:39.105616Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:39.105642Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:39.105659Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:39.105671Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx ... pp:355} Create new pdisk PDiskId# 87:1002 Path# /dev/disk1 2024-11-21T17:50:47.753850Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1000 Path# /dev/disk1 2024-11-21T17:50:47.753855Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1000 Path# /dev/disk3 2024-11-21T17:50:47.753860Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 109:1002 Path# /dev/disk3 2024-11-21T17:50:47.753865Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 86:1000 Path# /dev/disk1 2024-11-21T17:50:47.753870Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 104:1002 Path# /dev/disk3 2024-11-21T17:50:47.753875Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 85:1000 Path# /dev/disk1 2024-11-21T17:50:47.753880Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 107:1002 Path# /dev/disk3 2024-11-21T17:50:47.753885Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 84:1000 Path# /dev/disk1 2024-11-21T17:50:47.753890Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1001 Path# /dev/disk2 2024-11-21T17:50:47.753895Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1000 Path# /dev/disk3 2024-11-21T17:50:47.753921Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 108:1002 Path# /dev/disk3 2024-11-21T17:50:47.753926Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1001 Path# /dev/disk1 2024-11-21T17:50:47.753939Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1000 Path# /dev/disk3 2024-11-21T17:50:47.753943Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 111:1002 Path# /dev/disk3 2024-11-21T17:50:47.753948Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1001 Path# /dev/disk1 2024-11-21T17:50:47.753952Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1002 Path# /dev/disk2 2024-11-21T17:50:47.753956Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 90:1001 Path# /dev/disk2 2024-11-21T17:50:47.753961Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1000 Path# /dev/disk3 2024-11-21T17:50:47.753978Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 110:1002 Path# /dev/disk3 2024-11-21T17:50:47.753984Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1001 Path# /dev/disk1 2024-11-21T17:50:47.753989Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1002 Path# /dev/disk3 2024-11-21T17:50:47.753993Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 88:1001 Path# /dev/disk3 2024-11-21T17:50:47.753997Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1001 Path# /dev/disk2 2024-11-21T17:50:47.754002Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 114:1002 Path# /dev/disk3 2024-11-21T17:50:47.754007Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1002 Path# /dev/disk1 2024-11-21T17:50:47.754012Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1002 Path# /dev/disk2 2024-11-21T17:50:47.754018Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 85:1001 Path# /dev/disk2 2024-11-21T17:50:47.754022Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1000 Path# /dev/disk3 2024-11-21T17:50:47.754027Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 85:1002 Path# /dev/disk3 2024-11-21T17:50:47.754032Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 91:1001 Path# /dev/disk3 2024-11-21T17:50:47.754036Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1001 Path# /dev/disk2 2024-11-21T17:50:47.754040Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 86:1001 Path# /dev/disk2 2024-11-21T17:50:47.754045Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1000 Path# /dev/disk1 2024-11-21T17:50:47.754049Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 88:1002 Path# /dev/disk2 2024-11-21T17:50:47.754054Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1000 Path# /dev/disk3 2024-11-21T17:50:47.754058Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 86:1002 Path# /dev/disk3 2024-11-21T17:50:47.754063Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1001 Path# /dev/disk2 2024-11-21T17:50:47.754067Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1002 Path# /dev/disk2 2024-11-21T17:50:47.754072Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 91:1002 Path# /dev/disk2 2024-11-21T17:50:47.754077Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1000 Path# /dev/disk3 2024-11-21T17:50:47.754083Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 89:1001 Path# /dev/disk3 2024-11-21T17:50:47.754088Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1001 Path# /dev/disk2 2024-11-21T17:50:47.754093Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 84:1001 Path# /dev/disk2 2024-11-21T17:50:47.754098Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 94:1002 Path# /dev/disk2 2024-11-21T17:50:47.754102Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1000 Path# /dev/disk3 2024-11-21T17:50:47.754107Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 92:1001 Path# /dev/disk3 2024-11-21T17:50:47.754111Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1001 Path# /dev/disk2 2024-11-21T17:50:47.754117Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 89:1002 Path# /dev/disk2 2024-11-21T17:50:47.754121Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1000 Path# /dev/disk3 2024-11-21T17:50:47.754126Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 95:1001 Path# /dev/disk2 2024-11-21T17:50:47.754131Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1000 Path# /dev/disk3 2024-11-21T17:50:47.754136Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1001 Path# /dev/disk1 2024-11-21T17:50:47.754141Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 112:1002 Path# /dev/disk3 2024-11-21T17:50:47.754146Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1002 Path# /dev/disk1 2024-11-21T17:50:47.754150Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 84:1002 Path# /dev/disk3 2024-11-21T17:50:47.754155Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 90:1002 Path# /dev/disk3 2024-11-21T17:50:47.754162Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1000 Path# /dev/disk2 2024-11-21T17:50:47.754167Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 118:1002 Path# /dev/disk3 2024-11-21T17:50:47.754172Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1002 Path# /dev/disk1 2024-11-21T17:50:47.754177Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 92:1002 Path# /dev/disk2 2024-11-21T17:50:47.754182Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1001 Path# /dev/disk3 2024-11-21T17:50:47.754188Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1001 Path# /dev/disk3 2024-11-21T17:50:47.754192Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 116:1002 Path# /dev/disk3 2024-11-21T17:50:47.754198Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1002 Path# /dev/disk1 2024-11-21T17:50:47.754203Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 115:1002 Path# /dev/disk3 2024-11-21T17:50:47.754208Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1002 Path# /dev/disk1 2024-11-21T17:50:47.754213Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 93:1002 Path# /dev/disk3 2024-11-21T17:50:47.754218Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1001 Path# /dev/disk2 2024-11-21T17:50:47.754224Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1002 Path# /dev/disk2 2024-11-21T17:50:47.754229Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 119:1002 Path# /dev/disk3 2024-11-21T17:50:47.754233Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1002 Path# /dev/disk1 2024-11-21T17:50:47.754239Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 117:1002 Path# /dev/disk3 2024-11-21T17:50:47.754244Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1002 Path# /dev/disk1 2024-11-21T17:50:47.754249Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 95:1002 Path# /dev/disk3 2024-11-21T17:50:47.754254Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1002 Path# /dev/disk2 2024-11-21T17:50:47.765747Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool 1" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T17:50:47.794341Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "storage pool 2" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 2 ItemConfigGeneration: 1 } } } 2024-11-21T17:50:47.815899Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 1 ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:54.995915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:54.997442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:54.997452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:54.997456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:54.997461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:54.997463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:54.997470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:54.999575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:55.027328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:55.027348Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:55.036180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:55.036710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:55.036736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:55.046718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:55.059214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:55.059334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.087965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:55.102234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:55.155963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:55.155972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:55.155983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.162980Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:55.186652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:55.203600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:55.219152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:55.219169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:55.219916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:55.219929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:55.219933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:55.220301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:55.220689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.220714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.221141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:55.221508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:55.221547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:55.221670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.221688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:55.221692Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.221733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:55.221737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.221756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:55.221764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:55.222084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:55.222090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:55.222120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.222125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:55.231186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.231209Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:55.231223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:55.231226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.231232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:55.231247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.231252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:55.231255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:55.231275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:55.231279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:55.231282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:55.231611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:55.231621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:55.231624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:55.231627Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:55.231630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:55.231640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 3 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2024-11-21T17:50:55.412443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:50:55.412458Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:595:2527], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:50:55.412505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2024-11-21T17:50:55.412509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2024-11-21T17:50:55.412527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2024-11-21T17:50:55.412531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:690:2594], at schemeshard: 72075186234409546, txId: 0, path id: 1 2024-11-21T17:50:55.412785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2024-11-21T17:50:55.412853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:3 msg type: 268697640 2024-11-21T17:50:55.412873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2024-11-21T17:50:55.413078Z node 1 :HIVE INFO: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 3 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 106 2024-11-21T17:50:55.413095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update domain reply, message: Origin: 72075186233409546 TxId: 106, at schemeshard: 72057594046678944 2024-11-21T17:50:55.413099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:55.413110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2024-11-21T17:50:55.413116Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2024-11-21T17:50:55.413121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 138 -> 240 2024-11-21T17:50:55.413239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T17:50:55.413247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:50:55.413510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.413528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.413533Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2024-11-21T17:50:55.413544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2024-11-21T17:50:55.413547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T17:50:55.413552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2024-11-21T17:50:55.413557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T17:50:55.413562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2024-11-21T17:50:55.413565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2024-11-21T17:50:55.413574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T17:50:55.413935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T17:50:55.413943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T17:50:55.414014Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T17:50:55.414030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T17:50:55.414035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:830:2714] TestWaitNotification: OK eventTxId 106 2024-11-21T17:50:55.414105Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:55.414129Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 32us result status StatusSuccess 2024-11-21T17:50:55.414192Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:55.414245Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2024-11-21T17:50:55.414256Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/ServerLess0" took 11us result status StatusSuccess 2024-11-21T17:50:55.414284Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 2024-11-21T17:50:55.414323Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:50:55.414331Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 9us result status StatusSuccess 2024-11-21T17:50:55.414357Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:55.414394Z node 1 :HIVE INFO: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWritesLimitedPerKey [GOOD] Test command err: 2024-11-21T17:50:43.876129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:43.876647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:43.876681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001541/r3tmp/tmpmcBnvS/pdisk_1.dat 2024-11-21T17:50:43.980076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:43.997046Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:44.039174Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:44.039464Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:44.039508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:44.039524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:44.050036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:44.153547Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:50:44.153573Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:50:44.153607Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T17:50:44.163363Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:50:44.163601Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:50:44.163619Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:50:44.163679Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:50:44.163724Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:50:44.163738Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:50:44.163809Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:50:44.164172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.164488Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:50:44.164502Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:50:44.178379Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:44.178611Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:44.178691Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:50:44.178749Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:44.186873Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:44.187066Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:44.187095Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:50:44.187288Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:50:44.187297Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:50:44.187305Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:50:44.187350Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:50:44.191315Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:50:44.191388Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:50:44.191413Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:50:44.191419Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:50:44.191423Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:50:44.191428Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:44.191565Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:44.191574Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:44.191713Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:50:44.191735Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:50:44.191750Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:44.191755Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:44.191762Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:50:44.191769Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:44.191775Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:44.191782Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:50:44.191786Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:50:44.191790Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:50:44.191795Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:50:44.191800Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:44.191819Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:50:44.191824Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:50:44.191845Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:50:44.191889Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:50:44.191899Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:50:44.191915Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:50:44.191923Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:50:44.191927Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:50:44.191933Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:50:44.191938Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:50:44.191981Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:50:44.191986Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:50:44.191992Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:50:44.192003Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:44.192014Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:50:44.192018Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:50:44.192021Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:50:44.192025Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:50:44.192030Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:50:44.192187Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:50:44.192194Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:50:44.192199Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:44.192206Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:50:44.192216Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:50:44.192678Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:50:44.192694Z ... ing: false } RuntimeSettings { TimeoutMs: 300000 ExecType: DATA UseSpilling: false StatsMode: DQ_STATS_MODE_NONE } } TxBody: cleared Tasks TxBody: injected Locks 2024-11-21T17:50:54.942950Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [7:987:2792], Recipient [7:631:2536]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 987 RawX2: 30064773864 } TxBody: " \0018\000`\200\200\200\005jI\010\001\0329\n!\tY\001\000\000\000\000\000\000\021\000\000\001\000\000\020\000\001\030\001 \004)\000\001\205\000\000\000\000\0010\002\020\200\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715669 ExecLevel: 0 Flags: 8 2024-11-21T17:50:54.942957Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:50:54.942982Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [7:631:2536], Recipient [7:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T17:50:54.942987Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T17:50:54.942997Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:50:54.943017Z node 7 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 345, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T17:50:54.943022Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 345, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T17:50:54.943031Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit CheckDataTx 2024-11-21T17:50:54.943039Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2024-11-21T17:50:54.943041Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T17:50:54.943044Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:50:54.943046Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:50:54.943050Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2024-11-21T17:50:54.943057Z node 7 :TX_DATASHARD TRACE: Activated operation [0:281474976715669] at 72075186224037888 2024-11-21T17:50:54.943059Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2024-11-21T17:50:54.943061Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:50:54.943063Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T17:50:54.943065Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T17:50:54.943070Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2024-11-21T17:50:54.943076Z node 7 :TX_DATASHARD TRACE: Operation [0:281474976715669] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193400 2024-11-21T17:50:54.943082Z node 7 :TX_DATASHARD TRACE: KqpCommitLock LockId: 345 DataShard: 72075186224037888 Generation: 1 Counter: 4 SchemeShard: 72057594046644480 PathId: 2 2024-11-21T17:50:54.943087Z node 7 :TX_DATASHARD TRACE: Committing changes lockId# 345 in localTid# 1001 shard# 72075186224037888 2024-11-21T17:50:54.943111Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T17:50:54.943116Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:50:54.943118Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T17:50:54.943120Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:50:54.943122Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:54.943126Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is DelayComplete 2024-11-21T17:50:54.943128Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:50:54.943130Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:50:54.943132Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:50:54.943137Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2024-11-21T17:50:54.943140Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:50:54.943143Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:281474976715669] at 72075186224037888 has finished 2024-11-21T17:50:54.943217Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:50:54.943222Z node 7 :TX_DATASHARD TRACE: Complete execution for [0:281474976715669] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:54.943227Z node 7 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715669 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T17:50:54.943249Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715669 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 65 } } ComputeActorStats { } 2024-11-21T17:50:54.954206Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T17:50:54.954222Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] TxId# 281474976715670 ProcessProposeKqpTransaction 2024-11-21T17:50:54.954350Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd7xjcyz6wpbp1kbhpebw26k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YjRlZDNlMDQtYzY5MTMzN2UtZDM3N2ViMjMtYzVlOGM4Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2024-11-21T17:50:54.954650Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:1009:2817], Recipient [7:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T17:50:54.954671Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:50:54.954678Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2024-11-21T17:50:54.954683Z node 7 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3000/18446744073709551615 2024-11-21T17:50:54.954690Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit CheckRead 2024-11-21T17:50:54.954702Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2024-11-21T17:50:54.954706Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:50:54.954710Z node 7 :TX_DATASHARD TRACE: Add [0:11] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:50:54.954713Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:50:54.954721Z node 7 :TX_DATASHARD TRACE: Activated operation [0:11] at 72075186224037888 2024-11-21T17:50:54.954725Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2024-11-21T17:50:54.954728Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:50:54.954732Z node 7 :TX_DATASHARD TRACE: Add [0:11] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:50:54.954751Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:50:54.954763Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T17:50:54.954801Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:1009:2817], 0} after executionsCount# 1 2024-11-21T17:50:54.954808Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1009:2817], 0} sends rowCount# 3, bytes# 72, quota rows left# 998, quota bytes left# 5242808, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:50:54.954821Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1009:2817], 0} finished in read 2024-11-21T17:50:54.954829Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2024-11-21T17:50:54.954831Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:50:54.954835Z node 7 :TX_DATASHARD TRACE: Add [0:11] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:50:54.954838Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:50:54.954844Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2024-11-21T17:50:54.954847Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:50:54.954850Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:11] at 72075186224037888 has finished 2024-11-21T17:50:54.954854Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:50:54.954868Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T17:50:54.955001Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:1009:2817], Recipient [7:631:2536]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:50:54.955009Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 23 } }, { items { uint32_value: 3 } items { uint32_value: 31 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 Starting iteration 50 Starting iteration 51 Starting iteration 52 Starting iteration 53 Starting iteration 54 Starting iteration 55 Starting iteration 56 Starting iteration 57 Starting iteration 58 Starting iteration 59 Starting iteration 60 Starting iteration 61 Starting iteration 62 Starting iteration 63 Starting iteration 64 Starting iteration 65 Starting iteration 66 Starting iteration 67 Starting iteration 68 Starting iteration 69 Starting iteration 70 Starting iteration 71 Starting iteration 72 Starting iteration 73 Starting iteration 74 Starting iteration 75 Starting iteration 76 Starting iteration 77 Starting iteration 78 Starting iteration 79 Starting iteration 80 Starting iteration 81 Starting iteration 82 Starting iteration 83 Starting iteration 84 Starting iteration 85 Starting iteration 86 Starting iteration 87 Starting iteration 88 Starting iteration 89 Starting iteration 90 Starting iteration 91 Starting iteration 92 Starting iteration 93 Starting iteration 94 Starting iteration 95 Starting iteration 96 Starting iteration 97 Starting iteration 98 Starting iteration 99 Starting iteration 100 Starting iteration 101 Starting iteration 102 Starting iteration 103 Starting iteration 104 Starting iteration 105 Starting iteration 106 Starting iteration 107 Starting iteration 108 Starting iteration 109 Starting iteration 110 Starting iteration 111 Starting iteration 112 Starting iteration 113 Starting iteration 114 Starting iteration 115 Starting iteration 116 Starting iteration 117 Starting iteration 118 Starting iteration 119 Starting iteration 120 Starting iteration 121 Starting iteration 122 Starting iteration 123 Starting iteration 124 Starting iteration 125 Starting iteration 126 Starting iteration 127 Starting iteration 128 Starting iteration 129 Starting iteration 130 Starting iteration 131 Starting iteration 132 Starting iteration 133 Starting iteration 134 Starting iteration 135 Starting iteration 136 Starting iteration 137 Starting iteration 138 Starting iteration 139 Starting iteration 140 Starting iteration 141 Starting iteration 142 Starting iteration 143 Starting iteration 144 Starting iteration 145 Starting iteration 146 Starting iteration 147 Starting iteration 148 Starting iteration 149 Starting iteration 150 Starting iteration 151 Starting iteration 152 Starting iteration 153 Starting iteration 154 Starting iteration 155 Starting iteration 156 Starting iteration 157 Starting iteration 158 Starting iteration 159 Starting iteration 160 Starting iteration 161 Starting iteration 162 Starting iteration 163 Starting iteration 164 Starting iteration 165 Starting iteration 166 Starting iteration 167 Starting iteration 168 Starting iteration 169 Starting iteration 170 Starting iteration 171 Starting iteration 172 Starting iteration 173 Starting iteration 174 Starting iteration 175 Starting iteration 176 Starting iteration 177 Starting iteration 178 Starting iteration 179 Starting iteration 180 Starting iteration 181 Starting iteration 182 Starting iteration 183 Starting iteration 184 Starting iteration 185 Starting iteration 186 Starting iteration 187 Starting iteration 188 Starting iteration 189 Starting iteration 190 Starting iteration 191 Starting iteration 192 Starting iteration 193 Starting iteration 194 Starting iteration 195 Starting iteration 196 Starting iteration 197 Starting iteration 198 Starting iteration 199 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block |82.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydbd/ydbd |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |82.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |82.2%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |82.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> Balancing::Balancing_OneTopic_TopicApi >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure |82.2%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 >> TopicAutoscaling::PartitionSplit_PQv1 >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage |82.2%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> CdcStreamChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::InsertSingleRow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2845:2115] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2845:2115] Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2967:2106] recipient: [1:2845:2115] 2024-11-21T17:50:17.158550Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:17.159342Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:17.159719Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:17.159810Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:17.159948Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:17.159956Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:17.159995Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:17.160909Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:17.160937Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:17.160963Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:17.160980Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:17.160992Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:17.161000Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2992:2106] recipient: [1:60:2107] 2024-11-21T17:50:17.171618Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:17.171661Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:17.181954Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:17.181994Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:17.182005Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:17.182013Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:17.182035Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:17.182040Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:17.182044Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:17.182050Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:17.192325Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:17.192394Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:17.192522Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:17.192527Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:17.192544Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:17.194583Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2024-11-21T17:50:17.194771Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1000 Path# /dev/disk3 2024-11-21T17:50:17.194778Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1001 Path# /dev/disk2 2024-11-21T17:50:17.194782Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1002 Path# /dev/disk1 2024-11-21T17:50:17.194787Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1000 Path# /dev/disk2 2024-11-21T17:50:17.194791Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1001 Path# /dev/disk1 2024-11-21T17:50:17.194794Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1000 Path# /dev/disk3 2024-11-21T17:50:17.194798Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1001 Path# /dev/disk2 2024-11-21T17:50:17.194803Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1002 Path# /dev/disk1 2024-11-21T17:50:17.194806Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1000 Path# /dev/disk3 2024-11-21T17:50:17.194810Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1001 Path# /dev/disk2 2024-11-21T17:50:17.194823Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1002 Path# /dev/disk1 2024-11-21T17:50:17.194827Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1000 Path# /dev/disk3 2024-11-21T17:50:17.194831Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1002 Path# /dev/disk3 2024-11-21T17:50:17.194834Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1001 Path# /dev/disk1 2024-11-21T17:50:17.194838Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1000 Path# /dev/disk2 2024-11-21T17:50:17.194841Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1000 Path# /dev/disk3 2024-11-21T17:50:17.194845Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1001 Path# /dev/disk2 2024-11-21T17:50:17.194850Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1000 Path# /dev/disk2 2024-11-21T17:50:17.194854Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1000 Path# /dev/disk2 2024-11-21T17:50:17.194857Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1000 Path# /dev/disk2 2024-11-21T17:50:17.194861Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1000 Path# /dev/disk2 2024-11-21T17:50:17.194864Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1001 Path# /dev/disk1 2024-11-21T17:50:17.194868Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1000 Path# /dev/disk3 2024-11-21T17:50:17.194871Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2024-11-21T17:50:17.194875Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1001 Path# /dev/disk1 2024-11-21T17:50:17.194878Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1000 Path# /dev/disk3 2024-11-21T17:50:17.194882Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2024-11-21T17:50:17.194885Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /dev/disk3 2024-11-21T17:50:17.194889Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1000 Path# /dev/disk2 2024-11-21T17:50:17.194892Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1001 Path# /dev/disk1 2024-11-21T17:50:17.194895Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1000 Path# /dev/disk3 2024-11-21T17:50:17.194899Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2024-11-21T17:50:17.194903Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1002 Path# /dev/disk1 2024-11-21T17:50:17.194906Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /dev/disk2 2024-11-21T17:50:17.194910Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1000 Path# /dev/disk3 2024-11-21T17:50:17.194914Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2024-11-21T17:50:17.194917Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1000 Path# /dev/ ... 1T17:50:47.511562Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 165:1002 Path# /dev/disk2 2024-11-21T17:50:47.511566Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 169:1002 Path# /dev/disk1 2024-11-21T17:50:47.511569Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 174:1002 Path# /dev/disk2 2024-11-21T17:50:47.511574Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 164:1002 Path# /dev/disk3 2024-11-21T17:50:47.511577Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 167:1002 Path# /dev/disk3 2024-11-21T17:50:47.511579Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 170:1002 Path# /dev/disk3 2024-11-21T17:50:47.511582Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1000 Path# /dev/disk1 2024-11-21T17:50:47.511585Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1000 Path# /dev/disk1 2024-11-21T17:50:47.511588Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2024-11-21T17:50:47.511591Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1001 Path# /dev/disk2 2024-11-21T17:50:47.511596Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 172:1002 Path# /dev/disk1 2024-11-21T17:50:47.511599Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 173:1001 Path# /dev/disk2 2024-11-21T17:50:47.511603Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 173:1002 Path# /dev/disk1 2024-11-21T17:50:47.511606Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1002 Path# /dev/disk3 2024-11-21T17:50:47.511609Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 192:1002 Path# /dev/disk2 2024-11-21T17:50:47.511612Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1001 Path# /dev/disk3 2024-11-21T17:50:47.511617Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 189:1002 Path# /dev/disk1 2024-11-21T17:50:47.511620Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2024-11-21T17:50:47.511623Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1002 Path# /dev/disk2 2024-11-21T17:50:47.511626Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 175:1002 Path# /dev/disk3 2024-11-21T17:50:47.511629Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2024-11-21T17:50:47.511632Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 161:1001 Path# /dev/disk2 2024-11-21T17:50:47.511635Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 161:1002 Path# /dev/disk3 2024-11-21T17:50:47.575288Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2024-11-21T17:50:47.577014Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 220:1000 Path# /dev/disk2 2024-11-21T17:50:47.577035Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 220:1001 Path# /dev/disk1 2024-11-21T17:50:47.577040Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 219:1000 Path# /dev/disk2 2024-11-21T17:50:47.577044Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 219:1001 Path# /dev/disk1 2024-11-21T17:50:47.577049Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 218:1000 Path# /dev/disk2 2024-11-21T17:50:47.577054Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 218:1001 Path# /dev/disk1 2024-11-21T17:50:47.577058Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 217:1000 Path# /dev/disk2 2024-11-21T17:50:47.577063Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 217:1001 Path# /dev/disk1 2024-11-21T17:50:47.577067Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 216:1000 Path# /dev/disk2 2024-11-21T17:50:47.577072Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 216:1001 Path# /dev/disk1 2024-11-21T17:50:47.577077Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 215:1000 Path# /dev/disk2 2024-11-21T17:50:47.577081Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 215:1001 Path# /dev/disk1 2024-11-21T17:50:47.577086Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 214:1000 Path# /dev/disk2 2024-11-21T17:50:47.577090Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 214:1001 Path# /dev/disk1 2024-11-21T17:50:47.577093Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 213:1000 Path# /dev/disk3 2024-11-21T17:50:47.577097Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2024-11-21T17:50:47.577102Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 213:1002 Path# /dev/disk1 2024-11-21T17:50:47.577106Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 212:1000 Path# /dev/disk3 2024-11-21T17:50:47.577110Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2024-11-21T17:50:47.577115Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 212:1002 Path# /dev/disk1 2024-11-21T17:50:47.577120Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 211:1000 Path# /dev/disk3 2024-11-21T17:50:47.577125Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2024-11-21T17:50:47.577129Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 211:1002 Path# /dev/disk1 2024-11-21T17:50:47.577166Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2024-11-21T17:50:47.577172Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 2024-11-21T17:50:47.577178Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2024-11-21T17:50:47.577187Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2024-11-21T17:50:47.577198Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2024-11-21T17:50:47.577207Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2024-11-21T17:50:47.577215Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 6891, MsgBus: 7900 2024-11-21T17:50:28.209958Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791436255928316:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:28.210017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b28/r3tmp/tmpZD7Hq7/pdisk_1.dat 2024-11-21T17:50:28.261403Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6891, node 1 2024-11-21T17:50:28.273870Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:50:28.273882Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:50:28.273883Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:50:28.273919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7900 TClient is connected to server localhost:7900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:50:28.310906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:28.310929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:28.312065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:28.342537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.349777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.365162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.383732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.393006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:28.480096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791436255929858:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:28.480125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:28.502991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.557757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.563575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.570883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.624991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.634560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.642315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791436255930376:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:28.642343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:28.642368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791436255930381:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:28.642993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:50:28.647203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791436255930383:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:50:28.806431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:50:28.856753Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd7xhkfq419zvbzvj6ay1hmb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdkNjFjZmMtZWI5MjUwNDYtOTQ0ZjFlYWYtMzdhMWY0ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.857944Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd7xhkfq419zvbzvj6ay1hmb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdkNjFjZmMtZWI5MjUwNDYtOTQ0ZjFlYWYtMzdhMWY0ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.858381Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd7xhkfq419zvbzvj6ay1hmb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdkNjFjZmMtZWI5MjUwNDYtOTQ0ZjFlYWYtMzdhMWY0ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.862863Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd7xhkfy5m2qkqs3153pxfz7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjNlYzk3MzgtODY0N2EzZDYtN2Q2NmY2YmMtNWI2NDBhZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.863487Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd7xhkfy5m2qkqs3153pxfz7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjNlYzk3MzgtODY0N2EzZDYtN2Q2NmY2YmMtNWI2NDBhZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.863746Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd7xhkfy5m2qkqs3153pxfz7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjNlYzk3MzgtODY0N2EzZDYtN2Q2NmY2YmMtNWI2NDBhZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.867953Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd7xhkg31jkjz31gsj9h91w5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdkNjFjZmMtZWI5MjUwNDYtOTQ0ZjFlYWYtMzdhMWY0ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.868547Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd7xhkg31jkjz31gsj9h91w5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdkNjFjZmMtZWI5MjUwNDYtOTQ0ZjFlYWYtMzdhMWY0ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.868841Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd7xhkg31jkjz31gsj9h91w5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdkNjFjZmMtZWI5MjUwNDYtOTQ0ZjFlYWYtMzdhMWY0ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.872576Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jd7xhkg79g8m6teym54hx9n2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjNlYzk3MzgtODY0N2EzZDYtN2Q2NmY2YmMtNWI2NDBhZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.873227Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd7xhkg79g8m6teym54hx9n2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjNlYzk3MzgtODY0N2EzZDYtN2Q2NmY2YmMtNWI2NDBhZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.873497Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jd7xhkg79g8m6teym54hx9n2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjNlYzk3MzgtODY0N2EzZDYtN2Q2NmY2YmMtNWI2NDBhZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.876656Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd7xhkgc5m3q3vehgdsr8944, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdkNjFjZmMtZWI5MjUwNDYtOTQ0ZjFlYWYtMzdhMWY0ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:28.877218Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jd7xhkgc5m3q3vehgdsr8944, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdkNjFjZmMt ... : TxId: 281474976723130. Ctx: { TraceId: 01jd7xjfc3a4t6ymv4x171sf5f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.412683Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723131. Ctx: { TraceId: 01jd7xjfc3a4t6ymv4x171sf5f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.413169Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723132. Ctx: { TraceId: 01jd7xjfc3a4t6ymv4x171sf5f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.417082Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723133. Ctx: { TraceId: 01jd7xjfc87bdxwpykypqqfapx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.417854Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723134. Ctx: { TraceId: 01jd7xjfc87bdxwpykypqqfapx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.418338Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723135. Ctx: { TraceId: 01jd7xjfc87bdxwpykypqqfapx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.422823Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723136. Ctx: { TraceId: 01jd7xjfcdcpz0ank56npqannk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.423923Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723137. Ctx: { TraceId: 01jd7xjfcdcpz0ank56npqannk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.424761Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723138. Ctx: { TraceId: 01jd7xjfcdcpz0ank56npqannk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.434562Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723139. Ctx: { TraceId: 01jd7xjfcr5zz584nnd62g06s7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.435738Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723140. Ctx: { TraceId: 01jd7xjfcr5zz584nnd62g06s7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.436373Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723141. Ctx: { TraceId: 01jd7xjfcr5zz584nnd62g06s7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.442234Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723142. Ctx: { TraceId: 01jd7xjfd191p0xjc1fhdpsb88, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.443581Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723143. Ctx: { TraceId: 01jd7xjfd191p0xjc1fhdpsb88, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.444291Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723144. Ctx: { TraceId: 01jd7xjfd191p0xjc1fhdpsb88, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.449923Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723145. Ctx: { TraceId: 01jd7xjfd8errzcm7j64d6zvne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.451124Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723146. Ctx: { TraceId: 01jd7xjfd8errzcm7j64d6zvne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.451776Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723147. Ctx: { TraceId: 01jd7xjfd8errzcm7j64d6zvne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.456807Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723148. Ctx: { TraceId: 01jd7xjfdf873bz3ma9h4bs5g3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.457767Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723149. Ctx: { TraceId: 01jd7xjfdf873bz3ma9h4bs5g3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.458288Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723150. Ctx: { TraceId: 01jd7xjfdf873bz3ma9h4bs5g3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.463718Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723151. Ctx: { TraceId: 01jd7xjfdp54a8p08crnwvqzkb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.464781Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723152. Ctx: { TraceId: 01jd7xjfdp54a8p08crnwvqzkb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.465351Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723153. Ctx: { TraceId: 01jd7xjfdp54a8p08crnwvqzkb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.470310Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723154. Ctx: { TraceId: 01jd7xjfdxb12827gqap8ed234, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.471327Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723155. Ctx: { TraceId: 01jd7xjfdxb12827gqap8ed234, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.471927Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723156. Ctx: { TraceId: 01jd7xjfdxb12827gqap8ed234, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.480122Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723157. Ctx: { TraceId: 01jd7xjfe7bevqekez5qjp5e8v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.481078Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723158. Ctx: { TraceId: 01jd7xjfe7bevqekez5qjp5e8v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.481666Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723159. Ctx: { TraceId: 01jd7xjfe7bevqekez5qjp5e8v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.486262Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723160. Ctx: { TraceId: 01jd7xjfedepjaw77n0n5qyhrq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.487120Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723161. Ctx: { TraceId: 01jd7xjfedepjaw77n0n5qyhrq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.487634Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723162. Ctx: { TraceId: 01jd7xjfedepjaw77n0n5qyhrq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFlZTkzYjYtNTc5NzcwMzQtODU2YWI3OC04MDVjMWU4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.493947Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723163. Ctx: { TraceId: 01jd7xjfek5ebn6xr4yq13vbxm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.497271Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723164. Ctx: { TraceId: 01jd7xjfek5ebn6xr4yq13vbxm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:57.498163Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723165. Ctx: { TraceId: 01jd7xjfek5ebn6xr4yq13vbxm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmI4NDY1NDItNDM4ZTdmYTAtYmU1Njk3M2MtMzczNjU4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:43.190729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:43.190754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:43.190758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:43.190762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:43.190765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:43.190768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:43.190773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:43.190836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:43.200116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:43.200135Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:43.202635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:43.203204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:43.203229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:43.204474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:43.204743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:43.204869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.204958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:43.206207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.206442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:43.206453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.206487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:43.206494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:43.206500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:43.206516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.207800Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:43.220973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:43.221052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.221113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:43.221154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:43.221161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.221771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.221801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:43.221846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.221853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:43.221857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:43.221862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:43.222202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.222230Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:43.222234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:43.222565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.222572Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.222578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.222583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.223090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:43.223425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:43.223462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:43.223591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.223611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:43.223618Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.223656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:43.223660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.223683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:43.223693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:43.224041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:43.224046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:43.224079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.224082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:43.224133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.224138Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:43.224145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:43.224148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.224152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:43.224155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.224157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:43.224160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:43.224167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:43.224171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:43.224173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:43.224386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:43.224396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:43.224399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:43.224402Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:43.224405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:43.224415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... nResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 60030000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 51 } } 2024-11-21T17:50:58.243434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T17:50:58.243465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T17:50:58.243471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2024-11-21T17:50:58.246909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:58.246976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:58.246985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:58.247000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2024-11-21T17:50:58.247043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:58.247959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:50:58.247998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T17:50:58.248158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:58.248181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:58.248188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:50:58.248297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T17:50:58.248328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:50:58.249584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:58.249598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:50:58.249668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:58.249673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:50:58.249787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:58.249794Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:50:58.249870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:58.249881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:50:58.249886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:50:58.249891Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T17:50:58.249895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:50:58.249907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T17:50:58.250144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 238 } } 2024-11-21T17:50:58.250152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:58.250181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 238 } } 2024-11-21T17:50:58.250195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 238 } } FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:50:58.250543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:58.250549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:50:58.250570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:58.250575Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:50:58.250581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:50:58.250590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:58.250593Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:58.250597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:50:58.250601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:50:58.251236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:50:58.251285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:58.251345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:58.251390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:50:58.251396Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:50:58.251407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:50:58.251410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:58.251415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:50:58.251426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T17:50:58.251434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:50:58.251438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:50:58.251442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:50:58.251460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:50:58.251810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:50:58.251818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:607:2564] TestWaitNotification: OK eventTxId 102 2024-11-21T17:50:58.251889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T17:50:58.251900Z node 1 :FLAT_TX_SCHEMESHARD ERROR: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2024-11-21T17:50:58.252285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T17:50:58.252308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:50:58.252315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T00:06:00.039500Z, at schemeshard: 72057594046678944 2024-11-21T17:50:58.252324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 >> TopicAutoscaling::ControlPlane_CreateAlterDescribe >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout |82.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] |82.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |82.3%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |82.3%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.3%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut >> CdcStreamChangeCollector::UpsertToSameKey |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable [GOOD] Test command err: 2024-11-21T17:50:43.798384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:43.798945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:43.798989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00154c/r3tmp/tmpYNu2LM/pdisk_1.dat 2024-11-21T17:50:43.909362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:43.926926Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:43.969184Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:43.969472Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:43.969519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:43.969538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:43.980124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:44.084433Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:50:44.084462Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:50:44.084498Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T17:50:44.093741Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:50:44.093984Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:50:44.093999Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:50:44.094055Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:50:44.094096Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:50:44.094110Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:50:44.094178Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:50:44.094553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.094801Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:50:44.094813Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:50:44.108775Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:44.108999Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:44.109084Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:50:44.109159Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:44.114866Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:44.114992Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:44.115010Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:50:44.115112Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:50:44.115118Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:50:44.115122Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:50:44.115162Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:50:44.117340Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:50:44.117382Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:50:44.117397Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:50:44.117400Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:50:44.117403Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:50:44.117406Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:44.117495Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:44.117499Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:44.117603Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:50:44.117614Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:50:44.117622Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:44.117625Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:44.117630Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:50:44.117634Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:44.117638Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:44.117642Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:50:44.117646Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:50:44.117648Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:50:44.117652Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:50:44.117654Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:44.117667Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:50:44.117670Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:50:44.117685Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:50:44.117724Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:50:44.117730Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:50:44.117741Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:50:44.117746Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:50:44.117749Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:50:44.117752Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:50:44.117759Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:50:44.117789Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:50:44.117792Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:50:44.117795Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:50:44.117797Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:44.117803Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:50:44.117805Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:50:44.117807Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:50:44.117809Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:50:44.117812Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:50:44.117970Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:50:44.117975Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:50:44.128276Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:50:44.128306Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:50:44.128312Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:44.128323Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... 17:50:58.475356Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:50:58.475395Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [8:923:2741], Recipient [8:923:2741]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T17:50:58.475400Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T17:50:58.475417Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:50:58.475461Z node 8 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T17:50:58.475471Z node 8 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T17:50:58.475494Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CheckDataTx 2024-11-21T17:50:58.475514Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T17:50:58.475519Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T17:50:58.475528Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:50:58.475533Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:50:58.475543Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v1000/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v400/18446744073709551615 2024-11-21T17:50:58.475560Z node 8 :TX_DATASHARD TRACE: Activated operation [0:281474976715664] at 72075186224037888 2024-11-21T17:50:58.475566Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T17:50:58.475569Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:50:58.475572Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T17:50:58.475576Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T17:50:58.475590Z node 8 :TX_DATASHARD TRACE: TSysLocks::GetLock: lock 281474976715661 not found 2024-11-21T17:50:58.475596Z node 8 :TX_DATASHARD TRACE: ValidateLocks: broken lock 281474976715661 expected 1:0 found 0:0 2024-11-21T17:50:58.475612Z node 8 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2024-11-21T17:50:58.475623Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T17:50:58.475626Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T17:50:58.475629Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:50:58.475632Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:58.475640Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2024-11-21T17:50:58.475643Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:50:58.475646Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:50:58.475650Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:50:58.475663Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T17:50:58.475666Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:50:58.475670Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2024-11-21T17:50:58.475682Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:50:58.475687Z node 8 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:58.475693Z node 8 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2024-11-21T17:50:58.475710Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:58.475807Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=ZmJmYjIxNzItMWFiZDYwOGItOWZiMzE0OGItODBiNTcxNTY=, ActorId: [8:801:2648], ActorState: ExecuteState, TraceId: 01jd7xjgd3ddehtc4t1xbz0ta0, Create QueryResponse for error on request, msg: 2024-11-21T17:50:58.475953Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xjgd3ddehtc4t1xbz0ta0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ZmJmYjIxNzItMWFiZDYwOGItOWZiMzE0OGItODBiNTcxNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:58.475999Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [8:951:2648], Recipient [8:923:2741]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 951 RawX2: 34359741016 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\001 \003\"\006\020\0020\000@\n" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2024-11-21T17:50:58.476003Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:50:58.476018Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [8:923:2741], Recipient [8:923:2741]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T17:50:58.476020Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T17:50:58.476026Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:50:58.476043Z node 8 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T17:50:58.476051Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2024-11-21T17:50:58.476056Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:50:58.476058Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T17:50:58.476060Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:50:58.476062Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:50:58.476066Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v1000/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2024-11-21T17:50:58.476072Z node 8 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2024-11-21T17:50:58.476075Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:50:58.476077Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:50:58.476078Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T17:50:58.476081Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T17:50:58.476087Z node 8 :TX_DATASHARD TRACE: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193454 2024-11-21T17:50:58.476096Z node 8 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2024-11-21T17:50:58.476106Z node 8 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T17:50:58.476112Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:50:58.476123Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T17:50:58.476125Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:50:58.476127Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:58.476131Z node 8 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T17:50:58.476142Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2024-11-21T17:50:58.476145Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:50:58.476148Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:50:58.476151Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:50:58.476157Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T17:50:58.476159Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:50:58.476163Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2024-11-21T17:50:58.476171Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:50:58.476174Z node 8 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:58.476179Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:58.804725Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [8:968:2774], Recipient [8:923:2741]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:58.804755Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:58.804766Z node 8 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [8:967:2773], serverId# [8:968:2774], sessionId# [0:0:0] 2024-11-21T17:50:58.804798Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [8:557:2484], Recipient [8:923:2741]: NKikimr::TEvDataShard::TEvGetOpenTxs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:03.692990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:03.693010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.693015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:03.693019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:03.693023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:03.693026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:03.693043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.693126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:03.703454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:03.703474Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.705513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:03.705631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:03.705670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:03.708336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:03.708413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:03.708503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.708697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.709395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.709645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.709656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.709666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:03.709671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.709676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:03.709708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:03.710968Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.727446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.727522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.727575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:03.727614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:03.727622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.728246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.728272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:03.728307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.728316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:03.728320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:03.728324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:03.728675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.728683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:03.728686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:03.728954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.728961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.728966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.728972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.729458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:03.729781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:03.729814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:03.729932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.729949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.729955Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.729994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:03.729998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.730019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.730028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.730341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.730347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.730369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.730371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:03.730414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.730417Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:03.730425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:03.730428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.730431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:03.730434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.730436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:03.730439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:03.730446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.730451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:03.730455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... _SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T17:50:56.182620Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:56.182623Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T17:50:56.182629Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:50:56.182735Z node 169 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:56.182741Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:56.182744Z node 169 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:50:56.182746Z node 169 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 11 2024-11-21T17:50:56.182748Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:50:56.182753Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T17:50:56.182989Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T17:50:56.182994Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T17:50:56.182997Z node 169 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T17:50:56.183184Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2024-11-21T17:50:56.183196Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:50:56.183230Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:50:56.183235Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409548 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000011 2024-11-21T17:50:56.183308Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000011, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:56.183320Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 725849475177 } } Step: 5000011 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:56.183324Z node 169 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000011, at schemeshard: 72057594046678944 2024-11-21T17:50:56.183335Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T17:50:56.183340Z node 169 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2024-11-21T17:50:56.183342Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T17:50:56.183347Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:56.183352Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:50:56.183356Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2024-11-21T17:50:56.183359Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T17:50:56.183361Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2024-11-21T17:50:56.183363Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2024-11-21T17:50:56.183368Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:50:56.183371Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2024-11-21T17:50:56.183373Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2024-11-21T17:50:56.183376Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T17:50:56.183489Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:56.183566Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:50:56.183574Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Erasing txId 281474976710763 2024-11-21T17:50:56.183757Z node 169 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:56.183760Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:56.183777Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:50:56.183788Z node 169 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:56.183790Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [169:200:2203], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2024-11-21T17:50:56.183793Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [169:200:2203], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 2024-11-21T17:50:56.183867Z node 169 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:56.183872Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:56.183875Z node 169 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:50:56.183878Z node 169 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T17:50:56.183880Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T17:50:56.183930Z node 169 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:56.183935Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:56.183937Z node 169 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:50:56.183940Z node 169 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T17:50:56.183942Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:50:56.183947Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2024-11-21T17:50:56.183952Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [169:120:2146] 2024-11-21T17:50:56.183982Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:50:56.183984Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:50:56.183990Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:50:56.184263Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:56.184454Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:50:56.184471Z node 169 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2024-11-21T17:50:56.184479Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2024-11-21T17:50:56.184523Z node 169 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1004 2024-11-21T17:50:56.184785Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:50:56.184791Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:50:56.184842Z node 169 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:50:56.184855Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:50:56.184859Z node 169 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [169:959:2898] TestWaitNotification: OK eventTxId 1004 >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe |82.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> AsyncIndexChangeCollector::UpsertSingleRow >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> AsyncIndexChangeCollector::UpsertToSameKey >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit >> AsyncIndexChangeCollector::DeleteNothing >> TBlobStorageProxyTest::TestGetFail [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] Test command err: 2024-11-21T17:50:43.794731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:43.795324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:43.795359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001556/r3tmp/tmpT4RDd4/pdisk_1.dat 2024-11-21T17:50:43.899329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:43.916016Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:43.958337Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:43.958667Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:43.958721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:43.958743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:43.969395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:44.073560Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:50:44.073590Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:50:44.073633Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T17:50:44.080189Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:50:44.080494Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:50:44.080509Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:50:44.080550Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:50:44.080593Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:50:44.080605Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:50:44.080671Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:50:44.081041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.081250Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:50:44.081259Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:50:44.094394Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:44.094567Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:44.094636Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:50:44.094681Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:44.100538Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:44.100734Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:44.100760Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:50:44.100966Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:50:44.100976Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:50:44.100983Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:50:44.101036Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:50:44.104267Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:50:44.104344Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:50:44.104372Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:50:44.104376Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:50:44.104381Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:50:44.104387Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:44.104537Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:44.104543Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:44.104706Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:50:44.104729Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:50:44.104742Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:44.104747Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:44.104754Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:50:44.104762Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:44.104769Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:44.104777Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:50:44.104782Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:50:44.104786Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:50:44.104792Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:50:44.104797Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:44.104817Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:50:44.104822Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:50:44.104846Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:50:44.104897Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:50:44.104908Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:50:44.104926Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:50:44.104934Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:50:44.104938Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:50:44.104944Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:50:44.104948Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:50:44.104998Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:50:44.105002Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:50:44.105008Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:50:44.105018Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:44.105029Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:50:44.105032Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:50:44.105036Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:50:44.105039Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:50:44.105044Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:50:44.105207Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:50:44.105212Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:50:44.105215Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:44.105221Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:50:44.105229Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:50:44.105684Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:50:44.105698Z ... 21T17:50:58.382785Z node 8 :KQP_EXECUTER DEBUG: ActorId: [8:1939:3214] TxId: 281474976715669. Ctx: { TraceId: 01jd7xjg9s94yemsjrethfs3bj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ZGYwYTgwNmItZmJiYTc2YTAtMzc3NDFjMDItZDlhMDExZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000317s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 2024-11-21T17:50:58.382813Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGYwYTgwNmItZmJiYTc2YTAtMzc3NDFjMDItZDlhMDExZjc=, ActorId: [8:1929:3214], ActorState: ExecuteState, TraceId: 01jd7xjg9s94yemsjrethfs3bj, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T17:50:58.382846Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZGYwYTgwNmItZmJiYTc2YTAtMzc3NDFjMDItZDlhMDExZjc=, ActorId: [8:1929:3214], ActorState: ExecuteState, TraceId: 01jd7xjg9s94yemsjrethfs3bj, txInfo Status: Committed Kind: ReadOnly TotalDuration: 3.135 ServerDuration: 3.117 QueriesCount: 2 2024-11-21T17:50:58.382877Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGYwYTgwNmItZmJiYTc2YTAtMzc3NDFjMDItZDlhMDExZjc=, ActorId: [8:1929:3214], ActorState: ExecuteState, TraceId: 01jd7xjg9s94yemsjrethfs3bj, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T17:50:58.382935Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZGYwYTgwNmItZmJiYTc2YTAtMzc3NDFjMDItZDlhMDExZjc=, ActorId: [8:1929:3214], ActorState: ExecuteState, TraceId: 01jd7xjg9s94yemsjrethfs3bj, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:50:58.382940Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGYwYTgwNmItZmJiYTc2YTAtMzc3NDFjMDItZDlhMDExZjc=, ActorId: [8:1929:3214], ActorState: ExecuteState, TraceId: 01jd7xjg9s94yemsjrethfs3bj, EndCleanup, isFinal: 0 2024-11-21T17:50:58.382952Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGYwYTgwNmItZmJiYTc2YTAtMzc3NDFjMDItZDlhMDExZjc=, ActorId: [8:1929:3214], ActorState: ExecuteState, TraceId: 01jd7xjg9s94yemsjrethfs3bj, Sent query response back to proxy, proxyRequestId: 12, proxyId: [8:162:2148] { items { uint32_value: 1 } items { uint32_value: 10 } } 2024-11-21T17:50:58.459369Z node 8 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [8:1994:3235], Recipient [8:1996:3236]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:58.460735Z node 8 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [8:1994:3235], Recipient [8:1996:3236]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:58.460816Z node 8 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [8:1994:3235], Recipient [8:1996:3236]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:58.462433Z node 8 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [8:1996:3236] 2024-11-21T17:50:58.462486Z node 8 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:58.462725Z node 8 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:58.462960Z node 8 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:50:58.463076Z node 8 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:50:58.463083Z node 8 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:50:58.463087Z node 8 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:50:58.463124Z node 8 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:50:58.463134Z node 8 :TX_DATASHARD INFO: Switched to work state PreOffline tabletId 72075186224037888 2024-11-21T17:50:58.463154Z node 8 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 0 siblings to be activated: wait to activation from: 2024-11-21T17:50:58.463158Z node 8 :TX_DATASHARD INFO: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:50:58.463171Z node 8 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [8:2028:3261] 2024-11-21T17:50:58.463175Z node 8 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:50:58.463177Z node 8 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2024-11-21T17:50:58.463181Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:58.463184Z node 8 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2024-11-21T17:50:58.463251Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [8:1996:3236], Recipient [8:1996:3236]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:58.463258Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:58.463315Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvRegisterTablet TabletId# 72075186224037888 ProcessingParams { Version: 1 PlanResolution: 100 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 }} 2024-11-21T17:50:58.463331Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72075186224037888 2024-11-21T17:50:58.463337Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] SEND to Sender# [8:1996:3236] {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 1000} 2024-11-21T17:50:58.463388Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2024-11-21T17:50:58.463414Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 276168711, Sender [8:2029:3262], Recipient [8:1996:3236]: NKikimr::NDataShard::TEvChangeExchange::TEvSplitAck 2024-11-21T17:50:58.463490Z node 8 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:58.463494Z node 8 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037888 state 5 2024-11-21T17:50:58.463501Z node 8 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:58.463796Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [8:25:2072], Recipient [8:1996:3236]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 1000} 2024-11-21T17:50:58.463801Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T17:50:58.463804Z node 8 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1000 2024-11-21T17:50:58.463809Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:58.463980Z node 8 :TX_DATASHARD NOTICE: TTxChangeExchangeSplitAck Execute, at tablet# 72075186224037888 2024-11-21T17:50:58.463987Z node 8 :TX_DATASHARD NOTICE: TTxChangeExchangeSplitAck Complete, at tablet# 72075186224037888 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2024-11-21T17:50:58.464949Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 1000 NextAcquireStep: 1000 2024-11-21T17:50:58.464980Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [8:25:2072], Recipient [8:1996:3236]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 1000 NextReadStep# 1000 ReadStep# 1000 } 2024-11-21T17:50:58.464983Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T17:50:58.464988Z node 8 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 1000 next step 1000 2024-11-21T17:50:58.571348Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 1100 2024-11-21T17:50:58.571422Z node 9 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [9:51:2063] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 3 NextAcquireStep: 1100 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 1100 at node 8 bucket 0 2024-11-21T17:50:58.571512Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1100} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 1100 at node 9 bucket 0 ... blocked step 1100 at node 9 bucket 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 1100 at node 9 bucket 1 ... blocked step 1100 at node 9 bucket 1 2024-11-21T17:50:59.217739Z node 8 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 3000 in 1.000000s at 2.950000s 2024-11-21T17:50:59.218768Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 2000 2024-11-21T17:50:59.218833Z node 9 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [9:51:2063] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 3 NextAcquireStep: 2000 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 2000 at node 8 bucket 0 2024-11-21T17:50:59.218966Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2000} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 2000 at node 9 bucket 0 ... blocked step 2000 at node 9 bucket 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 2000 at node 9 bucket 1 ... blocked step 2000 at node 9 bucket 1 2024-11-21T17:50:59.732299Z node 8 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 1.000000s at 3.950000s 2024-11-21T17:50:59.733522Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 3000 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2024-11-21T17:50:59.733660Z node 9 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [9:51:2063] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 3 NextAcquireStep: 3000 ... observed step 3000 at node 8 bucket 0 2024-11-21T17:50:59.733704Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 3000 at node 9 bucket 0 ... blocked step 3000 at node 9 bucket 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 3000 at node 9 bucket 1 ... blocked step 3000 at node 9 bucket 1 ... upsert finished before unblocking node 2 |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] |82.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |82.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> CdcStreamChangeCollector::InsertSingleRow >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> TExtSubDomainTest::DeclareAndLs >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:43.194669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:43.194692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:43.194697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:43.194702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:43.194708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:43.194712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:43.194721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:43.194807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:43.204956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:43.204977Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:43.206878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:43.206944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:43.206966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:43.209128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:43.209209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:43.209294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.209514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:43.210376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.210650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:43.210659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.210669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:43.210673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:43.210677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:43.210710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:43.212005Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:43.227896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:43.227967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.228031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:43.228074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:43.228083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.228802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.228830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:43.228886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.228896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:43.228901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:43.228907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:43.229463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.229481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:43.229486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:43.230049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.230062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.230069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.230076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.230690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:43.231095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:43.231161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:43.231346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:43.231373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:43.231381Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.231437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:43.231444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:43.231479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:43.231491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:43.231972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:43.231982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:43.232023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:43.232028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:43.232104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:43.232111Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:43.232123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:43.232128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.232134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:43.232139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:43.232144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:43.232148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:43.232158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:43.232164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:43.232168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... hard: 72057594046678944 2024-11-21T17:51:01.226240Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:01.226283Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:51:01.226320Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:01.226325Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2024-11-21T17:51:01.226330Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T17:51:01.226429Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:51:01.226437Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:51:01.226654Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:51:01.226672Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:51:01.226677Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:51:01.226682Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:51:01.226688Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T17:51:01.226887Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:51:01.226900Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:51:01.226904Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:51:01.226908Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:51:01.226912Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:51:01.226924Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2024-11-21T17:51:01.227605Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 266 } } 2024-11-21T17:51:01.227622Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T17:51:01.227641Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 266 } } 2024-11-21T17:51:01.227654Z node 72 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 266 } } 2024-11-21T17:51:01.228158Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 309237647625 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T17:51:01.228171Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T17:51:01.228190Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 309237647625 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T17:51:01.228198Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:51:01.228206Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 309237647625 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T17:51:01.228217Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:01.228221Z node 72 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:51:01.228226Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:51:01.228251Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2024-11-21T17:51:01.228516Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:51:01.228534Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:51:01.228917Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:51:01.228948Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:51:01.228967Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:51:01.228974Z node 72 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T17:51:01.228991Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:51:01.228995Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:51:01.229001Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2024-11-21T17:51:01.229006Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:51:01.229011Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:51:01.229016Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:51:01.229040Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2024-11-21T17:51:01.229619Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T17:51:01.229629Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T17:51:01.229681Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T17:51:01.229699Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:51:01.229704Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:398:2373] TestWaitNotification: OK eventTxId 1002 2024-11-21T17:51:01.229767Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:01.229814Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 56us result status StatusSuccess 2024-11-21T17:51:01.229923Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] Test command err: 2024-11-21T17:49:13.975322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:13.975868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:13.975895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001d04/r3tmp/tmpRusCsi/pdisk_1.dat 2024-11-21T17:49:14.084556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.101574Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:14.143996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:14.144022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:14.154622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:14.258693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:14.273268Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:49:14.273475Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:49:14.273554Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:49:14.273607Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:14.282693Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:49:14.282900Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:14.282929Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:49:14.283107Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:49:14.283116Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:49:14.283123Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:49:14.283175Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:49:14.286826Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:49:14.286901Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:49:14.286924Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:49:14.286930Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:49:14.286935Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:49:14.286941Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.287084Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.287092Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.287232Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:49:14.287254Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:49:14.287266Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.287271Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.287278Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:49:14.287285Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.287292Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:49:14.287299Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.287306Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:49:14.287310Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:49:14.287314Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:49:14.287320Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:49:14.287339Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:49:14.287343Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:49:14.287366Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:49:14.287428Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:49:14.287438Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:49:14.287458Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:49:14.287468Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:49:14.287472Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:49:14.287478Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:49:14.287481Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.287528Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:49:14.287532Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:49:14.287535Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:49:14.287539Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.287550Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:49:14.287553Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:49:14.287557Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:49:14.287560Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.287565Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:49:14.287824Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:49:14.287832Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:49:14.298138Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:49:14.298171Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:49:14.298178Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:49:14.298217Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:49:14.298238Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:49:14.472130Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.472155Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:49:14.472164Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:49:14.472184Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:49:14.472190Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:49:14.472216Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:49:14.472262Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:49:14.472268Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:49:14.472274Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:49:14.472955Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:49:14.472976Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:49:14.473105Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.473110Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:49:14.473117Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:49:14.473125Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:49:14.473130Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:49:14.473139Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:49.391565Z node 17 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:49.433510Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:49.433552Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:49.444269Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:49.548269Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:49.755513Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:706:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:49.755536Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:716:2594], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:49.755544Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:49.756249Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:50:49.943800Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [17:720:2597], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:50:49.988413Z node 17 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7xj7wv19qvqxpgsjy10nvw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=17&id=N2JiYWNjMjgtNjhlMTA4MzItOWVhODcxZTEtNjM5ZjUyMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:50.292095Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:90:2136], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:50.292137Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:50:50.292148Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001d04/r3tmp/tmphd3oVv/pdisk_1.dat 2024-11-21T17:50:50.370565Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:50.384070Z node 18 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:50.426097Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:50.426133Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:50.436693Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:50.540731Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:50.747729Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:703:2587], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:50.747755Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:714:2592], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:50.747765Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:50.748655Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:50:50.936794Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [18:717:2595], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:50:51.053463Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7xj8vvfg47xawcw5x5qkpy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YmRiYTkxMDEtYjZhYjYwMzMtNzM5ZmExYTItNDZlNjUxZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:51.149771Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xj962d1b9v5caj122agc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=ZTE5M2FjZi02YjY5YjBjYS05MWQ3MDhjNi1lMWEzODk3ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:51.284820Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xj99199xaz2h654d2ta9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YTJmYjgyYWUtZGZlZTE1Y2YtYTFhOTdhMTgtZDg0YWNiNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:51.388735Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xj9dcdq9gex3ztc1a1wq1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=NTVkZGRiYzktNWZkNjljY2UtMTg4Nzk2M2MtMTEzZDliZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:51.476483Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xj9gj929gpj1fntswy241, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YWMyNmQ1NGItNmFlZTE0MjYtZjA4MGI5MDItNWQwZmRhOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:51.561275Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xj9k56rzy0hbceftxt9te, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OWViNGZiYTMtYzdiMDVjM2MtY2Q3M2RlMi0xNmQyMmRkZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:51.649242Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xj9nv4h9bdrq6875ttz1j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=ODE1YTg5N2YtYTYzNTZjYzYtY2MzZDM0OWItZTU1OWRlYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:51.756097Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xj9rjf3818vw79w4nns50, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=ZDNjZmY0MWEtNDNjM2ZlNmItOTlkMTI3MDAtYzBmMzA1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:51.833384Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd7xj9vx9xe9rsh6t6tmnqj3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YjBmZmZmZmEtNWJiNTg2YzQtYmQyMWFhZmYtNTYwMDZjMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:50:51.911979Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd7xj9ya2b0fm0w8w3679mya, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MTMzZjE4ZDMtZTFkOWM4YjAtYjNlN2RjZDYtMmNlNDlhMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2024-11-21T17:50:54.988310Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:50:54.988333Z node 18 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 10487152 RowCount: 10 IndexSize: 0 InMemSize: 10487152 LastAccessTime: 1505 LastUpdateTime: 1505 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2868 Memory: 17425112 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 10487152 RowCount: 10 IndexSize: 0 InMemSize: 10487152 LastAccessTime: 1505 LastUpdateTime: 1505 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1234 Memory: 124596 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 2 TableStats { DataSize: 10486220 RowCount: 10 IndexSize: 0 InMemSize: 0 LastAccessTime: 1505 LastUpdateTime: 1505 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 10486220 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1234 Memory: 124596 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2024-11-21T17:51:01.044705Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd7xjjx24hrvgn4f564t8brj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=NTU0ODc0ZWYtMzRkMTY0OGYtNjkyMWM2YzYtNTFkNzM1MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> TExtSubDomainTest::DeclareAndLs [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> YdbTableBulkUpsert::NotNulls >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete >> YdbScripting::MultiResults ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs [GOOD] Test command err: 2024-11-21T17:51:01.780577Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791576519213120:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:01.780596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c46/r3tmp/tmpLAnlqn/pdisk_1.dat 2024-11-21T17:51:01.840350Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4361 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:01.852301Z node 1 :TX_PROXY DEBUG: actor# [1:7439791576519213347:2100] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:01.852338Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791576519213610:2245] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:01.852392Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791576519213370:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:01.852407Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439791576519213370:2114], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:51:01.852483Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:51:01.852855Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791576519213069:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791576519213615:2246] 2024-11-21T17:51:01.852880Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791576519213069:2049] Subscribe: subscriber# [1:7439791576519213615:2246], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:01.852896Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791576519213072:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791576519213616:2246] 2024-11-21T17:51:01.852899Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791576519213072:2052] Subscribe: subscriber# [1:7439791576519213616:2246], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:01.852905Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791576519213075:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791576519213617:2246] 2024-11-21T17:51:01.852907Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791576519213075:2055] Subscribe: subscriber# [1:7439791576519213617:2246], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:01.852918Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213615:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791576519213069:2049] 2024-11-21T17:51:01.852922Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213616:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791576519213072:2052] 2024-11-21T17:51:01.852926Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213617:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791576519213075:2055] 2024-11-21T17:51:01.852932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791576519213612:2246] 2024-11-21T17:51:01.852939Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791576519213613:2246] 2024-11-21T17:51:01.852955Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439791576519213611:2246][/dc-1] Set up state: owner# [1:7439791576519213370:2114], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:01.853012Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791576519213614:2246] 2024-11-21T17:51:01.853023Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439791576519213611:2246][/dc-1] Path was already updated: owner# [1:7439791576519213370:2114], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:01.853031Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213615:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791576519213612:2246], cookie# 1 2024-11-21T17:51:01.853034Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213616:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791576519213613:2246], cookie# 1 2024-11-21T17:51:01.853037Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213617:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791576519213614:2246], cookie# 1 2024-11-21T17:51:01.853043Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791576519213069:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791576519213615:2246] 2024-11-21T17:51:01.853050Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791576519213069:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791576519213615:2246], cookie# 1 2024-11-21T17:51:01.853055Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791576519213072:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791576519213616:2246] 2024-11-21T17:51:01.853057Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791576519213072:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791576519213616:2246], cookie# 1 2024-11-21T17:51:01.853077Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791576519213075:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791576519213617:2246] 2024-11-21T17:51:01.853084Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791576519213075:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791576519213617:2246], cookie# 1 2024-11-21T17:51:01.856295Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213615:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791576519213069:2049], cookie# 1 2024-11-21T17:51:01.856311Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213616:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791576519213072:2052], cookie# 1 2024-11-21T17:51:01.856313Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213617:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791576519213075:2055], cookie# 1 2024-11-21T17:51:01.856320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791576519213612:2246], cookie# 1 2024-11-21T17:51:01.856326Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:01.856330Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791576519213613:2246], cookie# 1 2024-11-21T17:51:01.856337Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:01.856342Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791576519213614:2246], cookie# 1 2024-11-21T17:51:01.856344Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Unexpected sync response: sender# [1:7439791576519213614:2246], cookie# 1 2024-11-21T17:51:01.861203Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791576519213370:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:51:01.861288Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791576519213370:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Ta ... dleNotify: self# [1:7439791576519213370:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2024-11-21T17:51:01.932942Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791576519213370:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439791576519213670:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1732211461981 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:01.932959Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439791576519213370:2114], cacheItem# { Subscriber: { Subscriber: [1:7439791576519213670:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1732211461981 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2024-11-21T17:51:01.933000Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791576519213677:2291], recipient# [1:7439791576519213669:2289], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:51:01.933016Z node 1 :TX_PROXY INFO: Actor# [1:7439791576519213669:2289] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2024-11-21T17:51:01.933664Z node 1 :TX_PROXY DEBUG: actor# [1:7439791576519213347:2100] Handle TEvNavigate describe path /dc-1 2024-11-21T17:51:01.933676Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791576519213679:2293] HANDLE EvNavigateScheme /dc-1 2024-11-21T17:51:01.933689Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791576519213370:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:01.933704Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7439791576519213370:2114], cookie# 4 2024-11-21T17:51:01.933717Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213615:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791576519213612:2246], cookie# 4 2024-11-21T17:51:01.933746Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213616:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791576519213613:2246], cookie# 4 2024-11-21T17:51:01.933749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213617:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791576519213614:2246], cookie# 4 2024-11-21T17:51:01.933753Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791576519213072:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791576519213616:2246], cookie# 4 2024-11-21T17:51:01.933758Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791576519213075:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791576519213617:2246], cookie# 4 2024-11-21T17:51:01.933761Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791576519213069:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791576519213615:2246], cookie# 4 2024-11-21T17:51:01.933762Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213616:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7439791576519213072:2052], cookie# 4 2024-11-21T17:51:01.933765Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213617:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7439791576519213075:2055], cookie# 4 2024-11-21T17:51:01.933768Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791576519213615:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7439791576519213069:2049], cookie# 4 2024-11-21T17:51:01.933771Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7439791576519213613:2246], cookie# 4 2024-11-21T17:51:01.933774Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:01.933777Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7439791576519213614:2246], cookie# 4 2024-11-21T17:51:01.933780Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:01.933793Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7439791576519213612:2246], cookie# 4 2024-11-21T17:51:01.933799Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791576519213611:2246][/dc-1] Unexpected sync response: sender# [1:7439791576519213612:2246], cookie# 4 2024-11-21T17:51:01.933802Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791576519213370:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2024-11-21T17:51:01.933811Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791576519213370:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439791576519213611:2246] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732211461974 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:01.933834Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439791576519213370:2114], cacheItem# { Subscriber: { Subscriber: [1:7439791576519213611:2246] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732211461974 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2024-11-21T17:51:01.933870Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791576519213680:2294], recipient# [1:7439791576519213679:2293], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:51:01.933879Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791576519213679:2293] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T17:51:01.933895Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791576519213679:2293] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2024-11-21T17:51:01.934006Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791576519213679:2293] Handle TEvDescribeSchemeResult Forward to# [1:7439791576519213678:2292] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211461974 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732211461974 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1732211461981 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057... (TRUNCATED) |82.4%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit [GOOD] Test command err: 2024-11-21T17:50:44.769956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:44.770474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:44.770509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001538/r3tmp/tmplY591I/pdisk_1.dat 2024-11-21T17:50:44.870501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.885990Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:44.927640Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:44.927880Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:44.927917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:44.927932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:44.938434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:45.041623Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:50:45.041647Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:50:45.041676Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T17:50:45.050651Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:50:45.050940Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:50:45.050956Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:50:45.051014Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:50:45.051068Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:50:45.051085Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:50:45.051183Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:50:45.051640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:45.051980Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:50:45.051993Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:50:45.066244Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:45.066426Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:45.066504Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:50:45.066554Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:45.073426Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:45.073636Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:45.073667Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:50:45.073836Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:50:45.073845Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:50:45.073851Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:50:45.073897Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:50:45.076682Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:50:45.076766Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:50:45.076797Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:50:45.076803Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:50:45.076808Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:50:45.076814Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:45.076977Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:45.076987Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:45.077152Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:50:45.077177Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:50:45.077193Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:45.077199Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:45.077206Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:50:45.077214Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:45.077223Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:45.077230Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:50:45.077236Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:50:45.077239Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:50:45.077245Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:50:45.077251Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:45.077272Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:50:45.077277Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:50:45.077303Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:50:45.077356Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:50:45.077367Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:50:45.077386Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:50:45.077398Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:50:45.077402Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:50:45.077408Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:50:45.077411Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:50:45.077453Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:50:45.077456Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:50:45.077459Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:50:45.077468Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:45.077478Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:50:45.077480Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:50:45.077482Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:50:45.077484Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:50:45.077488Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:50:45.077636Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:50:45.077640Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:50:45.077642Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:45.077648Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:50:45.077656Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:50:45.078017Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:50:45.078024Z ... 888, tableId# 2, last full compaction# 1970-01-01T00:00:04.040844Z 2024-11-21T17:51:01.846580Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:719:2599], Recipient [7:631:2536]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2024-11-21T17:51:01.846587Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T17:51:01.846593Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2024-11-21T17:51:01.846603Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2024-11-21T17:51:01.846616Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2024-11-21T17:51:01.846626Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:631:2536], Recipient [7:719:2599]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2024-11-21T17:51:01.846630Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T17:51:01.846635Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2024-11-21T17:51:01.846643Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2024-11-21T17:51:01.846654Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2024-11-21T17:51:01.846666Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:719:2599], Recipient [7:631:2536]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T17:51:01.846670Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T17:51:01.846674Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2024-11-21T17:51:01.846680Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T17:51:01.846718Z node 7 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [7:944:2745], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:01.846743Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:631:2536], Recipient [7:719:2599]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T17:51:01.846747Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T17:51:01.846751Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2024-11-21T17:51:01.846757Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T17:51:01.846776Z node 7 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [7:944:2745], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 308 } } ComputeActorStats { } 2024-11-21T17:51:01.846909Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:01.846943Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 154 } } ComputeActorStats { } 2024-11-21T17:51:01.847052Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:01.848114Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2024-11-21T17:51:01.848144Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:631:2536], Recipient [7:719:2599]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T17:51:01.848150Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:01.848155Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2024-11-21T17:51:01.848340Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2024-11-21T17:51:01.848362Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:719:2599], Recipient [7:631:2536]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T17:51:01.848366Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:01.848370Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2024-11-21T17:51:01.878610Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T17:51:01.878641Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] TxId# 281474976715667 ProcessProposeKqpTransaction 2024-11-21T17:51:01.878895Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xjkpr1th49x27pqv35bgb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTdkNTMyZTMtZDAyNGUzNWYtN2RkZmM2M2QtZjczMjAyY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2024-11-21T17:51:01.879627Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:1068:2865], Recipient [7:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T17:51:01.879661Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:51:01.879673Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v4000/18446744073709551615 ImmediateWriteEdge# v4001/0 ImmediateWriteEdgeReplied# v4001/0 2024-11-21T17:51:01.879680Z node 7 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v4001/18446744073709551615 2024-11-21T17:51:01.879693Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T17:51:01.879713Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:51:01.879718Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:51:01.879724Z node 7 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:51:01.879728Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:51:01.879742Z node 7 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T17:51:01.879748Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:51:01.879751Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:51:01.879755Z node 7 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:51:01.879759Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:51:01.879774Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T17:51:01.879857Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:1068:2865], 0} after executionsCount# 1 2024-11-21T17:51:01.879865Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1068:2865], 0} sends rowCount# 2, bytes# 72, quota rows left# 999, quota bytes left# 5242808, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:51:01.879884Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1068:2865], 0} finished in read 2024-11-21T17:51:01.879894Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:51:01.879897Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:51:01.879901Z node 7 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:51:01.879904Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:51:01.879916Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:51:01.879919Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:51:01.879923Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T17:51:01.879928Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:51:01.879950Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T17:51:01.880245Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:1068:2865], Recipient [7:631:2536]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:51:01.880257Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> YdbTableBulkUpsert::NotNulls [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> YdbTableBulkUpsert::Errors >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> YdbScripting::MultiResults [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> YdbScripting::Params ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ca2/r3tmp/tmpzlXHh4/pdisk_1.dat 2024-11-21T17:51:01.054881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:01.075423Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10182 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:01.096549Z node 1 :TX_PROXY DEBUG: actor# [1:7439791574998903212:2134] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:01.096578Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791574998903797:2394] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:01.096614Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791574998903432:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:01.096637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791574998903597:2253][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7439791574998903432:2148], cookie# 1 2024-11-21T17:51:01.096967Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791574998903604:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791574998903601:2253], cookie# 1 2024-11-21T17:51:01.096975Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791574998903605:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791574998903602:2253], cookie# 1 2024-11-21T17:51:01.096979Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791574998903606:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791574998903603:2253], cookie# 1 2024-11-21T17:51:01.097271Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791570703935793:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791574998903604:2253], cookie# 1 2024-11-21T17:51:01.097277Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791570703935796:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791574998903605:2253], cookie# 1 2024-11-21T17:51:01.097281Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791570703935799:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791574998903606:2253], cookie# 1 2024-11-21T17:51:01.097296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791574998903604:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791570703935793:2050], cookie# 1 2024-11-21T17:51:01.097298Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791574998903605:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791570703935796:2053], cookie# 1 2024-11-21T17:51:01.097301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791574998903606:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791570703935799:2056], cookie# 1 2024-11-21T17:51:01.097308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791574998903597:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791574998903601:2253], cookie# 1 2024-11-21T17:51:01.097314Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791574998903597:2253][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:01.097317Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791574998903597:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791574998903602:2253], cookie# 1 2024-11-21T17:51:01.097320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791574998903597:2253][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:01.097325Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791574998903597:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791574998903603:2253], cookie# 1 2024-11-21T17:51:01.097331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791574998903597:2253][/dc-1] Unexpected sync response: sender# [1:7439791574998903603:2253], cookie# 1 2024-11-21T17:51:01.097356Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791574998903432:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2024-11-21T17:51:01.098272Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791574998903432:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439791574998903597:2253] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:01.098294Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439791574998903432:2148], cacheItem# { Subscriber: { Subscriber: [1:7439791574998903597:2253] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2024-11-21T17:51:01.098599Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791574998903800:2396], recipient# [1:7439791574998903797:2394], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:51:01.098610Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791574998903797:2394] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T17:51:01.105018Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791574998903797:2394] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2024-11-21T17:51:01.105498Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791574998903797:2394] Handle TEvDescribeSchemeResult Forward to# [1:7439791574998903796:2393] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 2024-11-21T17:51:01.105923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:01.105941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:51:01.107667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2024-11-21T17:51:01.108091Z node 1 :TX_PROXY DEBUG: actor# [1:7439791574998903212:2134] Handle TEvProposeTransaction 2024-11-21T17:51:01.108103Z node 1 :TX_PROXY DEBUG: actor# [1:7439791574998903212:2134] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2024-11-21T17:51:01.141823Z node 1 :TX_PROXY DEBUG: actor# [1:7439791574998903212:2134] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:51:01.142936Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:51:01.142948Z node 1 :TX_PROXY DEBUG: actor# [1:7439791574998903212:2134] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:51:01.142977Z node 1 :TX_PROXY DEBUG: actor# [1:7439791574998903212:2134] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7439791574998903854:2438] 2024-11-21T17:51:01.151304Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791574998903854:2438] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2024-11-21T17:51:01.151344Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791574998903854:2438] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:51:01.151379Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791574998903432:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [1844674407370 ... 753] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:02.064964Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439791581186676470:2754][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439791576891707991:2050] 2024-11-21T17:51:02.064968Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439791581186676471:2754][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439791576891707994:2053] 2024-11-21T17:51:02.064971Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439791581186676472:2754][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439791576891707997:2056] 2024-11-21T17:51:02.064976Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791581186676459:2754][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439791581186676467:2754] 2024-11-21T17:51:02.064980Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791581186676459:2754][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439791581186676468:2754] 2024-11-21T17:51:02.064983Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7439791581186676459:2754][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7439791576891708335:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:02.064986Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791581186676459:2754][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7439791581186676469:2754] 2024-11-21T17:51:02.064990Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7439791581186676459:2754][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7439791576891708335:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:02.064993Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7439791576891708335:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T17:51:02.064999Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7439791576891708335:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7439791581186676459:2754] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:02.065006Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791576891708335:2147], cacheItem# { Subscriber: { Subscriber: [3:7439791581186676459:2754] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:02.065013Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439791581186676476:2755][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7439791576891707991:2050] 2024-11-21T17:51:02.065016Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439791581186676477:2755][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7439791576891707994:2053] 2024-11-21T17:51:02.065020Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439791581186676478:2755][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7439791576891707997:2056] 2024-11-21T17:51:02.065024Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791581186676460:2755][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7439791581186676473:2755] 2024-11-21T17:51:02.065027Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791581186676460:2755][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7439791581186676474:2755] 2024-11-21T17:51:02.065031Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7439791581186676460:2755][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7439791576891708335:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:02.065034Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791581186676460:2755][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7439791581186676475:2755] 2024-11-21T17:51:02.065038Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7439791581186676460:2755][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7439791576891708335:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:02.065053Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791581186676479:2756], recipient# [3:7439791581186676456:2282], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:02.065065Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791576891707991:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439791581186676464:2753] 2024-11-21T17:51:02.065068Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791576891707991:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439791581186676470:2754] 2024-11-21T17:51:02.065070Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791576891707991:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439791581186676476:2755] 2024-11-21T17:51:02.065072Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791576891707994:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439791581186676465:2753] 2024-11-21T17:51:02.065075Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791576891707994:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439791581186676471:2754] 2024-11-21T17:51:02.065076Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791576891707994:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439791581186676477:2755] 2024-11-21T17:51:02.065079Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791576891707997:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439791581186676472:2754] 2024-11-21T17:51:02.065082Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791576891707997:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7439791581186676478:2755] 2024-11-21T17:51:02.065086Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7439791576891708335:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2024-11-21T17:51:02.065094Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7439791576891708335:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7439791581186676460:2755] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:02.065101Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791576891708335:2147], cacheItem# { Subscriber: { Subscriber: [3:7439791581186676460:2755] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:02.065120Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791581186676480:2757], recipient# [3:7439791581186676457:2283], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |82.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> SubDomainWithReboots::RootWithStoragePoolsAndTable >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: 2024-11-21T17:50:58.534284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:58.534836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:58.534861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001eb1/r3tmp/tmp2rGhiE/pdisk_1.dat 2024-11-21T17:50:58.633752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.650289Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:58.694668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:58.694713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:58.705440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:58.809485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.824608Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T17:50:58.824679Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:58.831888Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:58.831941Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:50:58.832042Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:50:58.832055Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:50:58.832059Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:50:58.832085Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:50:58.835548Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:50:58.835606Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:50:58.835625Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T17:50:58.835630Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:50:58.835634Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:50:58.835640Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:58.835972Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T17:50:58.836003Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:58.836952Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:50:58.836978Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:50:58.837077Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T17:50:58.837120Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:58.837126Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:58.837133Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:50:58.837138Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:58.837174Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:50:58.837219Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:50:58.837234Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:50:58.837449Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:58.837470Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:50:58.837563Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:50:58.837570Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:50:58.837574Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:50:58.837602Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:50:58.837608Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:50:58.837620Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:50:58.837630Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T17:50:58.837634Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:50:58.837637Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:50:58.837641Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:50:58.837749Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:50:58.837757Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:50:58.837771Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:50:58.837775Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:58.837779Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:50:58.837783Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:50:58.837829Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T17:50:58.837845Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:50:58.837870Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:50:58.837878Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:50:58.837976Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:50:58.837985Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:50:58.848305Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:50:58.848355Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:50:58.848472Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:50:58.848478Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T17:50:59.022647Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T17:50:59.022696Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T17:50:59.023337Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T17:50:59.023352Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:50:59.023399Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:50:59.023405Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:50:59.023412Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T17:50:59.023462Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:50:59.023485Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:50:59.023568Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:50:59.023572Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:59.023583Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:50:59.023592Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:50:59.023860Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:50:59.023922Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:59.024038Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:59.024042Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:50:59.024045Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:50:59.024068Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:50:59.024081Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:50:59.024116Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T17:50:59.024120Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:50:59.024142Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:59.024149Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... in PlanQueue unit at 72075186224037889 2024-11-21T17:51:02.800425Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:02.800468Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:02.800546Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:02.800564Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:51:02.800676Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:02.800758Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:02.801083Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 498 RawX2: 17179871629 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:02.801092Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:02.801111Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T17:51:02.801114Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:02.801156Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:02.801161Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:02.801166Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:02.801201Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:02.801220Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:02.801434Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:02.801446Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2024-11-21T17:51:02.801501Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:02.801555Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:02.801741Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 1000 txid# 281474976715657} 2024-11-21T17:51:02.801751Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2024-11-21T17:51:02.801761Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:51:02.802077Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:51:02.802089Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:51:02.802095Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2024-11-21T17:51:02.802112Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:02.802121Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:02.802132Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:02.802251Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:51:02.802258Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:51:02.802265Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:02.802383Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:51:02.802389Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:02.802506Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:02.802511Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:02.802515Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:51:02.802525Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:02.802530Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:02.802538Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:02.802752Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:02.802766Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:02.803248Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2024-11-21T17:51:02.803260Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T17:51:02.803302Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:02.803333Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:02.803433Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:51:02.803439Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:02.804734Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:748:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:02.804755Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:758:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:02.804763Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:02.805580Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:51:02.806348Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:02.806364Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:02.992738Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:02.992783Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:02.993222Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:762:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:51:03.056398Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7xjmmm556d2eq6cc6g2vck, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YWI5OWUwYmQtNmIzNjdlY2YtZDZkZjI3ZjctZDZkN2NmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:03.057356Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:916:2716], serverId# [4:917:2717], sessionId# [0:0:0] 2024-11-21T17:51:03.057482Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T17:51:03.057951Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xjmmm556d2eq6cc6g2vck, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YWI5OWUwYmQtNmIzNjdlY2YtZDZkZjI3ZjctZDZkN2NmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:03.058519Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xjmmm556d2eq6cc6g2vck, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YWI5OWUwYmQtNmIzNjdlY2YtZDZkZjI3ZjctZDZkN2NmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:03.058633Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:51:03.058916Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732211463058886 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T17:51:03.069347Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:51:03.069391Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2024-11-21T17:51:03.069420Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T17:51:03.069429Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:03.069778Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037889 2024-11-21T17:51:03.069793Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:03.070871Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:933:2726], serverId# [4:934:2727], sessionId# [0:0:0] 2024-11-21T17:51:03.072067Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:935:2728], serverId# [4:936:2729], sessionId# [0:0:0] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> CdcStreamChangeCollector::DeleteSingleRow >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK >> CdcStreamChangeCollector::IndexAndStreamUpsert >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts >> YdbTableBulkUpsert::Errors [GOOD] >> YdbTableBulkUpsert::Limits >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2024-11-21T17:51:01.698300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791575978547096:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:01.698569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c52/r3tmp/tmpBpuEvY/pdisk_1.dat 2024-11-21T17:51:01.763170Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:01.798501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:01.798527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:9947 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:01.799843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:01.800152Z node 1 :TX_PROXY DEBUG: actor# [1:7439791575978547314:2136] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:01.800173Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791575978547730:2419] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:01.800205Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791575978547336:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:01.800220Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439791575978547336:2149], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:51:01.800301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791575978547731:2420][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:51:01.800744Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791575978546987:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791575978547735:2420] 2024-11-21T17:51:01.800753Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791575978546990:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791575978547736:2420] 2024-11-21T17:51:01.800766Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791575978546987:2050] Subscribe: subscriber# [1:7439791575978547735:2420], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:01.800766Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791575978546990:2053] Subscribe: subscriber# [1:7439791575978547736:2420], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:01.800781Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791575978546993:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791575978547737:2420] 2024-11-21T17:51:01.800784Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791575978546993:2056] Subscribe: subscriber# [1:7439791575978547737:2420], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:01.800806Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791575978547735:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791575978546987:2050] 2024-11-21T17:51:01.800811Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791575978547736:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791575978546990:2053] 2024-11-21T17:51:01.800826Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791575978547737:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791575978546993:2056] 2024-11-21T17:51:01.800832Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791575978547731:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791575978547732:2420] 2024-11-21T17:51:01.800837Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791575978547731:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791575978547733:2420] 2024-11-21T17:51:01.800837Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791575978546993:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791575978547737:2420] 2024-11-21T17:51:01.800846Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439791575978547731:2420][/dc-1] Set up state: owner# [1:7439791575978547336:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:01.800869Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791575978546987:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791575978547735:2420] 2024-11-21T17:51:01.800872Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791575978546990:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791575978547736:2420] 2024-11-21T17:51:01.800881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791575978547731:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791575978547734:2420] 2024-11-21T17:51:01.800886Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439791575978547731:2420][/dc-1] Path was already updated: owner# [1:7439791575978547336:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:01.800891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791575978547735:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791575978547732:2420], cookie# 1 2024-11-21T17:51:01.800894Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791575978547736:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791575978547733:2420], cookie# 1 2024-11-21T17:51:01.800897Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791575978547737:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791575978547734:2420], cookie# 1 2024-11-21T17:51:01.800903Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791575978546993:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791575978547737:2420], cookie# 1 2024-11-21T17:51:01.800908Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791575978547737:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791575978546993:2056], cookie# 1 2024-11-21T17:51:01.800912Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791575978547731:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791575978547734:2420], cookie# 1 2024-11-21T17:51:01.800916Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791575978547731:2420][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:01.801022Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791575978546987:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791575978547735:2420], cookie# 1 2024-11-21T17:51:01.801026Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791575978546990:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791575978547736:2420], cookie# 1 2024-11-21T17:51:01.801069Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791575978547735:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791575978546987:2050], cookie# 1 2024-11-21T17:51:01.801071Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791575978547736:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791575978546990:2053], cookie# 1 2024-11-21T17:51:01.801075Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791575978547731:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791575978547732:2420], cookie# 1 2024-11-21T17:51:01.801078Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791575978547731:2420][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:01.801082Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791575978547731:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791575978547733:2420], cookie# 1 2024-11-21T17:51:01.801084Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791575978547731:2420][/dc-1] Unexpected sync response: sender# [1:7439791575978547733:2420], cookie# 1 2024-11-21T17:51:01.808969Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791575978547336:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:51:01.809037Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791575978547336:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 2024-11-21T17:51:03.077795Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186224037888 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2024-11-21T17:51:03.077872Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 7 2024-11-21T17:51:03.077944Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186224037888 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2024-11-21T17:51:03.077986Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2024-11-21T17:51:03.077941Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037892 not found 2024-11-21T17:51:03.078014Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186224037888 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2024-11-21T17:51:03.078041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2024-11-21T17:51:03.078058Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186224037888 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2024-11-21T17:51:03.078074Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T17:51:03.078102Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: INVALID_OWNER Origin: 72075186224037888 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1 ForwardRequest { HiveTabletId: 72057594037968897 }, at schemeshard: 72057594046644480 2024-11-21T17:51:03.078112Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2024-11-21T17:51:03.078115Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Redirecting tablet deletion requests from 72075186224037888 to 72057594037968897 2024-11-21T17:51:03.078120Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Resending tablet deletion request from 72057594046644480 to 72057594037968897 2024-11-21T17:51:03.078129Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:51:03.078146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186224037888 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2024-11-21T17:51:03.078161Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T17:51:03.078165Z node 4 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:03.078673Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2024-11-21T17:51:03.078685Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2024-11-21T17:51:03.078741Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2024-11-21T17:51:03.078754Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037893 2024-11-21T17:51:03.078765Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2024-11-21T17:51:03.078767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037890 2024-11-21T17:51:03.078772Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2024-11-21T17:51:03.078773Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037889 2024-11-21T17:51:03.078780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2024-11-21T17:51:03.078782Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037891 2024-11-21T17:51:03.079251Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037893 not found 2024-11-21T17:51:03.079263Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2024-11-21T17:51:03.079265Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2024-11-21T17:51:03.079268Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2024-11-21T17:51:03.079847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:03.079910Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T17:51:03.079955Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T17:51:03.079958Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T17:51:03.079983Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T17:51:03.080002Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T17:51:03.080004Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T17:51:03.080017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:51:03.080161Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2024-11-21T17:51:03.081628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2024-11-21T17:51:03.081646Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2024-11-21T17:51:03.081662Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:51:03.081702Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:51:03.083921Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791581526130583:2050] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [4:7439791582807145069:2101] 2024-11-21T17:51:03.083939Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439791581526130583:2050] Unsubscribe: subscriber# [4:7439791582807145069:2101], path# /dc-1/USER_0 2024-11-21T17:51:03.083946Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791581526130586:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [4:7439791582807145070:2101] 2024-11-21T17:51:03.083949Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439791581526130586:2053] Unsubscribe: subscriber# [4:7439791582807145070:2101], path# /dc-1/USER_0 2024-11-21T17:51:03.083954Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791581526130589:2056] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [4:7439791582807145071:2101] 2024-11-21T17:51:03.083957Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439791581526130589:2056] Unsubscribe: subscriber# [4:7439791582807145071:2101], path# /dc-1/USER_0 2024-11-21T17:51:03.084016Z node 3 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2024-11-21T17:51:03.084060Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:03.110852Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439791582807145054:2098], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:03.110897Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439791587102113162:2454], recipient# [4:7439791587102113160:2324], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:03.112195Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439791582807145054:2098], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:03.112247Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439791587102113171:2455], recipient# [4:7439791587102113170:2330], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |82.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |82.4%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2024-11-21T17:51:00.730024Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791573242485965:2056];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:00.730275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001cbb/r3tmp/tmpLsAn49/pdisk_1.dat 2024-11-21T17:51:00.791341Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21137 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:00.812866Z node 1 :TX_PROXY DEBUG: actor# [1:7439791573242486179:2136] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:00.812892Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791573242486543:2383] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:00.812944Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791573242486286:2194], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:00.812955Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439791573242486286:2194], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:51:00.813033Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791573242486544:2384][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:51:00.813401Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791573242485852:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791573242486548:2384] 2024-11-21T17:51:00.813404Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791573242485855:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791573242486549:2384] 2024-11-21T17:51:00.813424Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791573242485855:2053] Subscribe: subscriber# [1:7439791573242486549:2384], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:00.813424Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791573242485852:2050] Subscribe: subscriber# [1:7439791573242486548:2384], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:00.813441Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791573242485858:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791573242486550:2384] 2024-11-21T17:51:00.813445Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791573242485858:2056] Subscribe: subscriber# [1:7439791573242486550:2384], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:00.813446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791573242486548:2384][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791573242485852:2050] 2024-11-21T17:51:00.813451Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791573242486549:2384][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791573242485855:2053] 2024-11-21T17:51:00.813452Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791573242485852:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791573242486548:2384] 2024-11-21T17:51:00.813455Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791573242486550:2384][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791573242485858:2056] 2024-11-21T17:51:00.813456Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791573242485855:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791573242486549:2384] 2024-11-21T17:51:00.813459Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791573242485858:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791573242486550:2384] 2024-11-21T17:51:00.813461Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791573242486544:2384][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791573242486545:2384] 2024-11-21T17:51:00.813468Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791573242486544:2384][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791573242486546:2384] 2024-11-21T17:51:00.813486Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439791573242486544:2384][/dc-1] Set up state: owner# [1:7439791573242486286:2194], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:00.813525Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791573242486544:2384][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791573242486547:2384] 2024-11-21T17:51:00.813529Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439791573242486544:2384][/dc-1] Path was already updated: owner# [1:7439791573242486286:2194], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:00.813537Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791573242486548:2384][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791573242486545:2384], cookie# 1 2024-11-21T17:51:00.813541Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791573242486549:2384][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791573242486546:2384], cookie# 1 2024-11-21T17:51:00.813544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791573242486550:2384][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791573242486547:2384], cookie# 1 2024-11-21T17:51:00.813548Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791573242485852:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791573242486548:2384], cookie# 1 2024-11-21T17:51:00.813553Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791573242485855:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791573242486549:2384], cookie# 1 2024-11-21T17:51:00.813556Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791573242485858:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791573242486550:2384], cookie# 1 2024-11-21T17:51:00.813565Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791573242486548:2384][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791573242485852:2050], cookie# 1 2024-11-21T17:51:00.813569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791573242486549:2384][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791573242485855:2053], cookie# 1 2024-11-21T17:51:00.813572Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791573242486550:2384][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791573242485858:2056], cookie# 1 2024-11-21T17:51:00.813576Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791573242486544:2384][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791573242486545:2384], cookie# 1 2024-11-21T17:51:00.813581Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791573242486544:2384][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:00.813584Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791573242486544:2384][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791573242486546:2384], cookie# 1 2024-11-21T17:51:00.813587Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791573242486544:2384][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:00.813590Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791573242486544:2384][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791573242486547:2384], cookie# 1 2024-11-21T17:51:00.813592Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791573242486544:2384][/dc-1] Unexpected sync response: sender# [1:7439791573242486547:2384], cookie# 1 2024-11-21T17:51:00.820874Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791573242486286:2194], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:51:00.820954Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791573242486286:2194], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... ndle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [3:7439791581931736126:2597], cookie# 30 2024-11-21T17:51:03.137777Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791581931735146:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [3:7439791581931736125:2597], cookie# 30 2024-11-21T17:51:03.137784Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791581931735152:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [3:7439791581931736127:2597], cookie# 30 2024-11-21T17:51:03.137794Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439791581931736126:2597][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 18446744073709551615 Partial: 0 }: sender# [3:7439791581931735149:2053], cookie# 30 2024-11-21T17:51:03.137798Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439791581931736125:2597][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 18446744073709551615 Partial: 0 }: sender# [3:7439791581931735146:2050], cookie# 30 2024-11-21T17:51:03.137802Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7439791581931736127:2597][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 18446744073709551615 Partial: 0 }: sender# [3:7439791581931735152:2056], cookie# 30 2024-11-21T17:51:03.137811Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791581931736121:2597][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 18446744073709551615 Partial: 0 }: sender# [3:7439791581931736123:2597], cookie# 30 2024-11-21T17:51:03.137816Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791581931736121:2597][/dc-1/USER_0] Sync is in progress: cookie# 30, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:03.137821Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791581931736121:2597][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 18446744073709551615 Partial: 0 }: sender# [3:7439791581931736122:2597], cookie# 30 2024-11-21T17:51:03.137826Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791581931736121:2597][/dc-1/USER_0] Sync is done: cookie# 30, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:03.137838Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791581931736121:2597][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 18446744073709551615 Partial: 0 }: sender# [3:7439791581931736124:2597], cookie# 30 2024-11-21T17:51:03.137841Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791581931736121:2597][/dc-1/USER_0] Unexpected sync response: sender# [3:7439791581931736124:2597], cookie# 30 2024-11-21T17:51:03.137848Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7439791581931735431:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2024-11-21T17:51:03.137860Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7439791581931735431:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7439791581931736121:2597] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 30 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 4] DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:03.137869Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791581931735431:2111], cacheItem# { Subscriber: { Subscriber: [3:7439791581931736121:2597] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 30 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 4] DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 30 IsSync: true Partial: 0 } 2024-11-21T17:51:03.137906Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791586226705002:3945], recipient# [3:7439791586226705001:3944], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:03.137917Z node 3 :TX_PROXY INFO: Actor# [3:7439791586226705001:3944] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2024-11-21T17:51:03.138586Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791581931735152:2056] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [4:7439791579351772026:2868] 2024-11-21T17:51:03.138589Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791581931735149:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [4:7439791579351772025:2868] 2024-11-21T17:51:03.138604Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439791581931735149:2053] Unsubscribe: subscriber# [4:7439791579351772025:2868], path# /dc-1/USER_0 2024-11-21T17:51:03.138604Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439791581931735152:2056] Unsubscribe: subscriber# [4:7439791579351772026:2868], path# /dc-1/USER_0 2024-11-21T17:51:03.138622Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7439791581931735146:2050] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [4:7439791579351772024:2868] 2024-11-21T17:51:03.138626Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7439791581931735146:2050] Unsubscribe: subscriber# [4:7439791579351772024:2868], path# /dc-1/USER_0 2024-11-21T17:51:03.138658Z node 3 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2024-11-21T17:51:03.138714Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:03.179486Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439791579351772007:2862], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:03.179544Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439791583646739899:3234], recipient# [4:7439791583646739892:2859], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:03.179719Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439791579351772007:2862], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:03.179759Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439791583646739901:3235], recipient# [4:7439791583646739900:2864], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:03.222901Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439791579351772007:2862], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:03.222972Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439791583646739903:3236], recipient# [4:7439791583646739902:2865], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:03.388324Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439791581931735431:2111], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:03.388378Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791581931735431:2111], cacheItem# { Subscriber: { Subscriber: [3:7439791581931736428:2858] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:03.388423Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791586226705021:3951], recipient# [3:7439791586226705020:2301], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |82.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2024-11-21T17:50:44.779753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791504391135117:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:44.779770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:50:44.782743Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791503217193643:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:44.782870Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:50:44.800186Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011b4/r3tmp/tmpWZ2CgH/pdisk_1.dat 2024-11-21T17:50:44.805366Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:44.824957Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15810, node 1 2024-11-21T17:50:44.837976Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/0011b4/r3tmp/yandexLhk24L.tmp 2024-11-21T17:50:44.837991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/0011b4/r3tmp/yandexLhk24L.tmp 2024-11-21T17:50:44.838042Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/0011b4/r3tmp/yandexLhk24L.tmp 2024-11-21T17:50:44.838086Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:50:44.842284Z INFO: TTestServer started on Port 20803 GrpcPort 15810 TClient is connected to server localhost:20803 PQClient connected to localhost:15810 === TenantModeEnabled() = 0 === Init PQ - start server on port 15810 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:44.880223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:44.880273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:44.881903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:44.902923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:44.902945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:44.903870Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:50:44.904111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:44.909443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:50:44.909501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.909555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:50:44.909627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:50:44.909639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.910283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:50:44.910304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:50:44.910344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.910358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:50:44.910361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2024-11-21T17:50:44.910364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:50:44.910719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.910728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:50:44.910729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:50:44.910754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:50:44.910765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2024-11-21T17:50:44.910772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:50:44.911198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.911206Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.911209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:50:44.911214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2024-11-21T17:50:44.911663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:44.912094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2024-11-21T17:50:44.912142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:50:44.912766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211444957, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:50:44.912797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 7439791504391135662 RawX2: 4294969644 } } Step: 1732211444957 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:50:44.912816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:50:44.912887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:50:44.912898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:50:44.912926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:50:44.912940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:50:44.913266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:50:44.913277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:50:44.913316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:50:44.913325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439791504391135695:2376], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2024-11-21T17:50:44.913331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.913339Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:50:44.913353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:50:44.913358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T17:50:44.913363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2024-11-21T17:50:44.913370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T17:50:44.913373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:50:44.913379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T17:50:44.913388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:50:44.913397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:50:44.913403Z node ... 98][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_13750535228264730834_v1" sender [5:7439791585400072456:2546] lock partition 0 for ReadingSession "shared/cli_5_1_13750535228264730834_v1" (Sender=[5:7439791585400072456:2546], Pipe=[5:7439791585400072459:2546], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2024-11-21T17:51:03.499173Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2024-11-21T17:51:03.499181Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000042s 2024-11-21T17:51:03.499449Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_13750535228264730834_v1" ClientId: "cli" PipeClient { RawX1: 7439791585400072459 RawX2: 4503621102209522 } Path: "/Root/PQ/rt3.dc1--topic1" } 2024-11-21T17:51:03.499483Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2024-11-21T17:51:03.499492Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T17:51:03.499549Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 got read request: guid# f0421d96-c7217c31-4d9f819c-f117cbda 2024-11-21T17:51:03.499825Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1 2024-11-21T17:51:03.499974Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_13750535228264730834_v1:1 with generation 1 2024-11-21T17:51:03.501716Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1732211463496 } Cookie: 18446744073709551615 } 2024-11-21T17:51:03.501741Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2024-11-21T17:51:03.501779Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 sending to client partition status 2024-11-21T17:51:03.502876Z :INFO: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (empty maybe) 2024-11-21T17:51:03.503092Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2024-11-21T17:51:03.503170Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2024-11-21T17:51:03.503199Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2024-11-21T17:51:03.503204Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2024-11-21T17:51:03.503216Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2024-11-21T17:51:03.503218Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1TEvPartitionReady. Aval parts: 1 2024-11-21T17:51:03.503229Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 performing read request: guid# 7e816c9b-1f623dbb-e3940600-fe0fd719, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T17:51:03.503282Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 7e816c9b-1f623dbb-e3940600-fe0fd719 2024-11-21T17:51:03.503836Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1732211463397 CreateTimestampMS: 1732211463395 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1732211463398 CreateTimestampMS: 1732211463395 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1732211463398 CreateTimestampMS: 1732211463395 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T17:51:03.503882Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2024-11-21T17:51:03.503894Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 7e816c9b-1f623dbb-e3940600-fe0fd719 has messages 1 2024-11-21T17:51:03.503924Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 read done: guid# 7e816c9b-1f623dbb-e3940600-fe0fd719, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 371 2024-11-21T17:51:03.503937Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 response to read: guid# 7e816c9b-1f623dbb-e3940600-fe0fd719 2024-11-21T17:51:03.504022Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 Process answer. Aval parts: 0 2024-11-21T17:51:03.504162Z :DEBUG: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] [] Got ReadResponse, serverBytesSize = 371, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2024-11-21T17:51:03.504196Z :DEBUG: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2024-11-21T17:51:03.504306Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2024-11-21T17:51:03.504332Z :DEBUG: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] [] Returning serverBytesSize = 371 to budget 2024-11-21T17:51:03.504339Z :DEBUG: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] [] In ContinueReadingDataImpl, ReadSizeBudget = 371, ReadSizeServerDelta = 52428429 2024-11-21T17:51:03.504435Z :DEBUG: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T17:51:03.504485Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T17:51:03.504497Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2024-11-21T17:51:03.504502Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (2-2) 2024-11-21T17:51:03.504517Z :DEBUG: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2024-11-21T17:51:03.504525Z :DEBUG: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] [] Returning serverBytesSize = 0 to budget 2024-11-21T17:51:03.504549Z :INFO: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] Closing read session. Close timeout: 0.000000s 2024-11-21T17:51:03.504555Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2024-11-21T17:51:03.504530Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 grpc read done: success# 1, data# { read_request { bytes_size: 371 } } 2024-11-21T17:51:03.504563Z :INFO: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] Counters: { Errors: 0 CurrentSessionLifetimeMs: 8 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:51:03.504579Z :NOTICE: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:51:03.504585Z :DEBUG: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] [] Abort session to cluster 2024-11-21T17:51:03.504581Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 got read request: guid# d8d85c78-5c403ad8-f42177a5-2c0a7616 2024-11-21T17:51:03.504716Z :NOTICE: [] [] [c9aad080-5bbef4e2-65d0b509-e9f55b00] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:51:03.504975Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 grpc read done: success# 0, data# { } 2024-11-21T17:51:03.504987Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 grpc read failed 2024-11-21T17:51:03.504992Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 grpc closed 2024-11-21T17:51:03.505008Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13750535228264730834_v1 is DEAD 2024-11-21T17:51:03.505204Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7439791585400072459:2546] disconnected; active server actors: 1 2024-11-21T17:51:03.505216Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7439791585400072459:2546] client cli disconnected session shared/cli_5_1_13750535228264730834_v1 2024-11-21T17:51:03.505298Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_13750535228264730834_v1 |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut >> SubDomainWithReboots::DropSplittedTabletInsideWithStoragePools ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: 2024-11-21T17:50:59.704712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:59.705098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:59.705115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e96/r3tmp/tmpJEvgLt/pdisk_1.dat 2024-11-21T17:50:59.804635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:59.823304Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:59.824098Z node 1 :TABLET_SAUSAGECACHE INFO: Config updated MemoryLimit: 33554432 2024-11-21T17:50:59.866134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:59.866171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:59.876755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:59.981318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:59.995943Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:50:59.996016Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:00.003810Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:00.003854Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:00.004009Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:51:00.004031Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:51:00.004039Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:51:00.004086Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:00.007701Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:51:00.007776Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:00.007799Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:51:00.007805Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:00.007810Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:51:00.007817Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.008072Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:51:00.008099Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:51:00.008113Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:51:00.008121Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.008127Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.008137Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:51:00.008143Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:00.008181Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:00.008254Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:51:00.008275Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:51:00.008566Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:00.018884Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:00.018928Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:00.193527Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:51:00.194416Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:00.194439Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.194594Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.194605Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:00.194617Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:00.194700Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:00.194748Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:00.194933Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.194952Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:51:00.195365Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:00.195515Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.195945Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:51:00.195958Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.196086Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:51:00.196093Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:51:00.196104Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:00.196657Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:00.196673Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:00.196680Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:51:00.196702Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:00.196714Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:00.196726Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.197564Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:00.198013Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:00.198045Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:51:00.198052Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:00.199831Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:00.199886Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:51:00.199900Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T17:51:00.199905Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T17:51:00.220908Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:00.501819Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:00.501842Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.501888Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.501896Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:00.501907Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:00.501956Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T17:51:00.501992Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:00.502017Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.502173Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.513236Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T17:51:00.513264Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T17:51:00.513277Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:00.513281Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:00.513305Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.513330Z node 1 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], ex ... dinators count is 1 buckets per mediator 2 2024-11-21T17:51:03.366776Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:03.367036Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:51:03.367043Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:03.367158Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:51:03.367163Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:51:03.367169Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:03.367298Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:03.367304Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:03.367308Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:51:03.367321Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:03.367328Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:03.367336Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:03.367469Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:03.367684Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:03.367755Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:51:03.367760Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:03.369058Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:03.369095Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:51:03.369107Z node 3 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T17:51:03.369111Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T17:51:03.390158Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:03.672532Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 526 RawX2: 12884904347 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:03.672567Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:03.672619Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:03.672627Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:03.672634Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:03.672677Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T17:51:03.672701Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:03.672720Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:03.672844Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:03.683927Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T17:51:03.683953Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T17:51:03.683967Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:03.683988Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:03.683999Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:03.684023Z node 3 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:03.684049Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2024-11-21T17:51:03.684064Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:03.684642Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2024-11-21T17:51:03.684659Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:03.685921Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:855:2684], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:03.685940Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:866:2689], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:03.685949Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:03.686721Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:03.687403Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:03.910163Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:03.910778Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:869:2692], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:03.966363Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xjng5fxccreyfjdwfzb6d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmM1MmQ2YzEtZGRjYjhlZDMtNWRmZTZhNTgtNTY5ZTRiMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:03.966549Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:953:2748], serverId# [3:954:2749], sessionId# [0:0:0] 2024-11-21T17:51:03.966607Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:03.966915Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732211463966881 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T17:51:03.977420Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:03.977482Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T17:51:03.977490Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:03.988150Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xjnsaekqqnbdk7pwbr4es, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmRkZDE2NTctNGY1NzNkZGMtZWU4MDQ0NjMtZmI2MzNmNzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:03.990493Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:03.990797Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732211463990762 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T17:51:04.002371Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:04.002438Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T17:51:04.002448Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:04.019618Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xjnt3c53rtpgnd3b0nvke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGJmZjRjMTQtZjMyZDdmMmUtY2QxNzg4NDItZjE2OGJmZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:04.019796Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:04.020117Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1732211464020084 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T17:51:04.030910Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:04.030986Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T17:51:04.030995Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:04.031681Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:992:2780], serverId# [3:993:2781], sessionId# [0:0:0] 2024-11-21T17:51:04.032719Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:994:2782], serverId# [3:995:2783], sessionId# [0:0:0] >> TopicAutoscaling::ControlPlane_CreateAlterDescribe [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> YdbScripting::Params [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail >> ForceDropWithReboots::ForceDelete >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: 2024-11-21T17:51:00.111303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:00.111879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:00.111917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e94/r3tmp/tmpIUlD27/pdisk_1.dat 2024-11-21T17:51:00.223522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:00.241614Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:00.284035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:00.284076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:00.294568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:00.398134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:00.412615Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T17:51:00.412667Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:00.417986Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:00.418032Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:00.418142Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:51:00.418159Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:51:00.418164Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:51:00.418195Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:00.420695Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:51:00.420740Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:00.420755Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T17:51:00.420759Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:00.420762Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:51:00.420766Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.421022Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T17:51:00.421049Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:00.421833Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:51:00.421853Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:51:00.421933Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T17:51:00.421965Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.421969Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.421976Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:51:00.421980Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:00.422010Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:00.422055Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:51:00.422070Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:51:00.422251Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:00.422269Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:00.422341Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:51:00.422346Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:51:00.422350Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:51:00.422372Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:00.422377Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:51:00.422385Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:00.422395Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T17:51:00.422399Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:51:00.422402Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:51:00.422405Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:00.422489Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:51:00.422495Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:51:00.422507Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:00.422509Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.422512Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:51:00.422515Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:51:00.422550Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T17:51:00.422563Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:51:00.422585Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:51:00.422592Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:51:00.422672Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:00.422681Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:00.433804Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:00.433852Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:00.434016Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:51:00.434027Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:00.610970Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T17:51:00.611034Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T17:51:00.611840Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T17:51:00.611854Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:00.611910Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:00.611920Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:00.611930Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T17:51:00.611998Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:00.612032Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:00.612167Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:00.612172Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.612190Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:00.612202Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:51:00.612595Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:00.612697Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.612893Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.612899Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:00.612906Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:00.612941Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:00.612964Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:00.613020Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T17:51:00.613025Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:00.613058Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.613066Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... ace = 0 at datashard 72075186224037890 2024-11-21T17:51:04.513266Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:04.513276Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:04.513281Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:51:04.513310Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:04.513323Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:04.513338Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:04.514480Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:51:04.514493Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2024-11-21T17:51:04.514498Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2024-11-21T17:51:04.514528Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:04.514538Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:04.514549Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T17:51:04.515521Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:04.515567Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2024-11-21T17:51:04.515575Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T17:51:04.515714Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:04.516680Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:04.516763Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:51:04.516777Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:04.516928Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2024-11-21T17:51:04.516937Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2024-11-21T17:51:04.518746Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:798:2660], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:04.518770Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:808:2665], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:04.518779Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:04.519684Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:51:04.521219Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:04.521248Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:04.521262Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T17:51:04.736266Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:04.736307Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:04.736324Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T17:51:04.736791Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:812:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:51:04.807135Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7xjpa63fnznb6apjgpqrc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWMxOTBhZWUtYTliYWYwOWMtOGVlZTYzMmItZDMxMWQ1YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:04.807871Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1006:2770], serverId# [4:1007:2771], sessionId# [0:0:0] 2024-11-21T17:51:04.807978Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T17:51:04.808387Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xjpa63fnznb6apjgpqrc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWMxOTBhZWUtYTliYWYwOWMtOGVlZTYzMmItZDMxMWQ1YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:04.808840Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xjpa63fnznb6apjgpqrc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWMxOTBhZWUtYTliYWYwOWMtOGVlZTYzMmItZDMxMWQ1YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:04.808937Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:51:04.809160Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732211464809137 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T17:51:04.809184Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732211464809137 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T17:51:04.819589Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:51:04.819635Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2024-11-21T17:51:04.819659Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T17:51:04.819666Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:04.819932Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037889 2024-11-21T17:51:04.819940Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:04.830156Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xjpkmerbrba9h1rkm6ttn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NTljMTQ5NGEtZTc2MDYwMDEtZTNmNjVhOWQtYTc5YjA3YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:04.830303Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:51:04.830601Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1732211464830570 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T17:51:04.830642Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 1732211464830570 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T17:51:04.830657Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 1732211464830570 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T17:51:04.830668Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 1732211464830570 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T17:51:04.841119Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:51:04.841222Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T17:51:04.841239Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:04.842357Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1059:2813], serverId# [4:1060:2814], sessionId# [0:0:0] 2024-11-21T17:51:04.843307Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1061:2815], serverId# [4:1062:2816], sessionId# [0:0:0] >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] >> KqpScheme::CreateTableWithDefaultSettings >> KqpConstraints::CreateTableSerialTypeForbidden >> KqpScheme::CreateTableWithPartitionAtKeysSimpleUncompat >> KqpScheme::SchemaVersionMissmatchWithRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: 2024-11-21T17:51:00.321001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:00.321471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:00.321494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e89/r3tmp/tmp4dtTpo/pdisk_1.dat 2024-11-21T17:51:00.424848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:00.442574Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:00.485172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:00.485204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:00.495710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:00.600207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:00.615641Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T17:51:00.615709Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:00.623418Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:00.623483Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:00.623632Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:51:00.623655Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:51:00.623662Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:51:00.623708Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:00.627508Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:51:00.627600Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:00.627626Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T17:51:00.627632Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:00.627636Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:51:00.627642Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.628161Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T17:51:00.628206Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:00.629575Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:51:00.629610Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:51:00.629749Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T17:51:00.629808Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.629815Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.629825Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:51:00.629832Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:00.629880Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:00.629940Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:51:00.629960Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:51:00.630203Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:00.630231Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:00.630365Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:51:00.630374Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:51:00.630380Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:51:00.630419Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:00.630428Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:51:00.630444Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:00.630458Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T17:51:00.630463Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:51:00.630467Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:51:00.630471Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:00.630613Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:51:00.630624Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:51:00.630641Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:00.630645Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.630650Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:51:00.630654Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:51:00.630708Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T17:51:00.630727Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:51:00.630758Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:51:00.630768Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:51:00.630887Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:00.630898Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:00.641258Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:00.641311Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:00.641517Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:51:00.641529Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:00.823317Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T17:51:00.823372Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T17:51:00.824014Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T17:51:00.824032Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:00.824096Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:00.824103Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:00.824112Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T17:51:00.824176Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:00.824212Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:00.824432Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:00.824449Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.824480Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:00.824512Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:51:00.825017Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:00.825172Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.825456Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.825465Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:00.825473Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:00.825526Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:00.825557Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:00.825646Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T17:51:00.825657Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:00.825724Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.825743Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... n request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:04.973243Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:04.973654Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 498 RawX2: 17179871629 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:04.973669Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:04.973694Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T17:51:04.973699Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:04.973747Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:04.973754Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:04.973762Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:04.973812Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:04.973838Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:04.974114Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:04.974133Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2024-11-21T17:51:04.974214Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:04.974293Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:04.974552Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 1000 txid# 281474976715657} 2024-11-21T17:51:04.974567Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2024-11-21T17:51:04.974577Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:51:04.975021Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:51:04.975036Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:51:04.975042Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2024-11-21T17:51:04.975065Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:04.975074Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:04.975089Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:04.975261Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:51:04.975271Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:51:04.975279Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:04.975451Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:51:04.975460Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:04.975641Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:04.975651Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:04.975656Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:51:04.975668Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:04.975676Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:04.975685Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:04.975981Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:04.976002Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:04.976645Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2024-11-21T17:51:04.976660Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T17:51:04.976710Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:04.976744Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:04.976874Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:51:04.976883Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:04.978579Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:748:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:04.978604Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:758:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:04.978615Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:04.979660Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:51:04.980813Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:04.980838Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:05.174396Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:05.174449Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:05.174891Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:762:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:51:05.219524Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7xjprj0d0hwh9yzj44r3ex, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTcwMmNjN2YtZTc5MGQ2NC03NzU0NmQwYS01YzVlZGFlYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:05.219707Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:904:2708], serverId# [4:905:2709], sessionId# [0:0:0] 2024-11-21T17:51:05.219782Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:51:05.220184Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732211465220145 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T17:51:05.230726Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:51:05.230805Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T17:51:05.230815Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:05.242475Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xjq0g3d9x77d2645tt4m8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDY0NTJmMmUtZjI4Yzg3YjgtYWZjOWQ1YzItNmQwMWRhZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:05.242648Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:51:05.242999Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732211465242966 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T17:51:05.243036Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1732211465242966 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T17:51:05.253466Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:51:05.253526Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T17:51:05.253539Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:05.254638Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:939:2739], serverId# [4:940:2740], sessionId# [0:0:0] 2024-11-21T17:51:05.255717Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:941:2741], serverId# [4:942:2742], sessionId# [0:0:0] >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] >> YdbTableBulkUpsert::Limits [GOOD] >> YdbTableBulkUpsert::DataValidation >> KqpConstraints::SerialTypeNegative1 >> YdbTableBulkUpsert::AsyncIndexShouldFail [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: 2024-11-21T17:51:00.194410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:00.194973Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:00.195003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e8a/r3tmp/tmpEJQ1Po/pdisk_1.dat 2024-11-21T17:51:00.296571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:00.313124Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:00.355363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:00.355401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:00.365850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:00.469532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:00.484570Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T17:51:00.484640Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:00.492454Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:00.492518Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:00.492663Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:51:00.492682Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:51:00.492689Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:51:00.492729Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:00.496447Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:51:00.496507Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:00.496528Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T17:51:00.496534Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:00.496539Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:51:00.496544Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.496909Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T17:51:00.496945Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:00.498135Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:51:00.498161Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:51:00.498255Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T17:51:00.498297Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.498303Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.498311Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:51:00.498316Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:00.498352Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:00.498403Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:51:00.498420Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:51:00.498620Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:00.498645Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:00.498756Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:51:00.498763Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:51:00.498769Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:51:00.498800Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:00.498806Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:51:00.498816Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:00.498827Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T17:51:00.498832Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:51:00.498835Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:51:00.498839Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:00.498948Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:51:00.498956Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:51:00.498971Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:00.498975Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.498980Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:51:00.498984Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:51:00.499027Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T17:51:00.499043Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:51:00.499068Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:51:00.499077Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:51:00.499175Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:00.499184Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:00.509468Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:00.509509Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:00.509643Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:51:00.509667Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:00.683536Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T17:51:00.683611Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T17:51:00.684360Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T17:51:00.684377Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:00.684439Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:00.684446Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:00.684456Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T17:51:00.684516Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:00.684545Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:00.684656Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:00.684661Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.684677Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:00.684688Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:51:00.685033Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:00.685120Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.685282Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.685288Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:00.685293Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:00.685323Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:00.685341Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:00.685390Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T17:51:00.685395Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:00.685423Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.685431Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... awX2: 17179871629 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:04.727446Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:04.727526Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:04.727533Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:04.727541Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:04.727605Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:04.727634Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:04.727754Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:04.727764Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:51:04.727855Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:04.727919Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:04.728298Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:51:04.728308Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:04.728404Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:51:04.728408Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:51:04.728414Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:04.728618Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:04.728628Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:04.728633Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:51:04.728650Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:04.728659Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:04.728670Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:04.728902Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:04.729187Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:04.729209Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:51:04.729215Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:04.730503Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:04.730540Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:51:04.730550Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T17:51:04.730554Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T17:51:04.751383Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:05.030350Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 498 RawX2: 17179871629 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:05.030372Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.030403Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:05.030411Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:05.030420Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:05.030455Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T17:51:05.030479Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:05.030506Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:05.030632Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:05.041788Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T17:51:05.041834Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T17:51:05.041847Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:05.041853Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:05.041863Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.041884Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:05.041897Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2024-11-21T17:51:05.041908Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.042394Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2024-11-21T17:51:05.042406Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:05.043628Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:856:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:05.043646Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:866:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:05.043656Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:05.044485Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:05.045312Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:05.250427Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:05.250900Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:870:2693], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:05.301830Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xjptk229a2w8dz4nah46v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Njg0MDAyNy04YThlYjU0NC1hNjMyMzU5NC04YTAwYTQ1NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:05.301946Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:954:2749], serverId# [4:955:2750], sessionId# [0:0:0] 2024-11-21T17:51:05.301999Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:05.302245Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732211465302220 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T17:51:05.312586Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:05.312632Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T17:51:05.312640Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.321754Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xjq31bcrjkn9gytyvh3hh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzBlNzg3ZjItY2ZhZDdjNWEtZmJlODRjNjItNWZmODhlZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:05.321888Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:05.322143Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732211465322115 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T17:51:05.332581Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:05.332638Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T17:51:05.332647Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.333210Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:978:2769], serverId# [4:979:2770], sessionId# [0:0:0] 2024-11-21T17:51:05.334066Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:980:2771], serverId# [4:981:2772], sessionId# [0:0:0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] Leader for TabletID 72057594046678944 is [1:229:2151] sender: [1:230:2060] recipient: [1:212:2140] 2024-11-21T17:49:43.507785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:43.507808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:43.507813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:43.507818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:43.507824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:43.507828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:43.507838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:43.507913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:43.519341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:43.519359Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:43.522221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:43.522301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:43.522326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:43.524653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:43.524824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:43.524900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:43.524937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:43.525338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:43.525553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:43.525559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:43.525584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:43.525589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:43.525593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:43.525604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.526570Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:229:2151] sender: [1:342:2060] recipient: [1:17:2064] 2024-11-21T17:49:43.538537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:43.538620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.538699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:43.538746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:43.538753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.539602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:43.539623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:43.539669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.539683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:43.539687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:43.539691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:43.539961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.539967Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:43.539970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:43.540351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.540371Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.540376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:43.540382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:43.540962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:43.541407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:43.541455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:43.541613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:43.541633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 237 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:43.541638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:43.541684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:43.541691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:43.541720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:43.541728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:43.542046Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:43.542051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:43.542082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:43.542085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:309:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:49:43.542137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:43.542141Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:43.542148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:43.542150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:43.542154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:43.542156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:43.542173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:43.542176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:43.542183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:43.542187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:43.542189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:49:43.542383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:43.542393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:49:43.542397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:49:43.542401Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:49:43.542405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:43.542415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... rocessing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:02.470708Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:02.470711Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:02.718190Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:02.718215Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:02.718230Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:02.718233Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:02.963189Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:02.963215Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:02.963230Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:02.963234Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:03.197659Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:03.197686Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:03.197704Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:03.197708Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:03.467701Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:03.467736Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:03.467757Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:03.467761Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:03.728381Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:03.728417Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:03.728439Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:03.728444Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:04.001379Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:04.001413Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:04.001430Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:04.001434Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:04.275272Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:04.275311Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:04.275333Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:04.275339Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:04.547827Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:04.547858Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:04.547871Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:04.547876Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:04.854754Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:04.854782Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T17:51:04.854799Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:04.854803Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:04.895983Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1065:2830], Recipient [7:229:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T17:51:04.896004Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:51:04.896030Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:51:04.896080Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 40us result status StatusPathDoesNotExist 2024-11-21T17:51:04.896113Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:51:04.896176Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1066:2831], Recipient [7:229:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T17:51:04.896181Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:51:04.896188Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:51:04.896198Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 11us result status StatusPathDoesNotExist 2024-11-21T17:51:04.896213Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:51:04.896269Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1067:2832], Recipient [7:229:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2024-11-21T17:51:04.896274Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:51:04.896279Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:51:04.896293Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 13us result status StatusPathDoesNotExist 2024-11-21T17:51:04.896305Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> CdcStreamChangeCollector::OldImage [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> KqpScheme::DropKeyColumn |82.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: 2024-11-21T17:51:00.624809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:00.625349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:00.625381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e82/r3tmp/tmptwtWXv/pdisk_1.dat 2024-11-21T17:51:00.727894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:00.744994Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:00.787309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:00.787345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:00.797887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:00.901592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:00.919696Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T17:51:00.919768Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:00.925669Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:00.925720Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:00.925845Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:51:00.925864Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:51:00.925870Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:51:00.925906Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:00.928816Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:51:00.928873Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:00.928892Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T17:51:00.928896Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:00.928898Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:51:00.928901Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:00.929198Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T17:51:00.929226Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:00.930079Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:51:00.930101Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:51:00.930177Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T17:51:00.930210Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:00.930216Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.930224Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:51:00.930229Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:00.930264Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:00.930307Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:51:00.930322Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:51:00.930479Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:00.930504Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:00.930589Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:51:00.930595Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:51:00.930601Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:51:00.930629Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:00.930635Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:51:00.930645Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:00.930654Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T17:51:00.930658Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:51:00.930663Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:51:00.930667Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:00.930752Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:51:00.930757Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:51:00.930770Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:00.930774Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:00.930778Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T17:51:00.930782Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:51:00.930827Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T17:51:00.930842Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:51:00.930868Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:51:00.930877Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T17:51:00.930967Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:00.930976Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:00.941327Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:00.941373Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:00.941528Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:51:00.941537Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:01.115800Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T17:51:01.115858Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T17:51:01.116604Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T17:51:01.116619Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:01.116673Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:01.116679Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:01.116687Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T17:51:01.116738Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:01.116765Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:01.116869Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:01.116876Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:01.116892Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:51:01.116902Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:51:01.117208Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:01.117294Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:01.117458Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:01.117465Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:01.117470Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:01.117501Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:01.117519Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:01.117569Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T17:51:01.117574Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:01.117603Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:01.117609Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... 05.251234Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2024-11-21T17:51:05.251293Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:05.251349Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:05.251534Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T17:51:05.251545Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037890 tableId# [OwnerId: 72057594046644480, LocalPathId: 6] schema version# 1 2024-11-21T17:51:05.251589Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:05.251637Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:05.252140Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 1000 txid# 281474976715657} 2024-11-21T17:51:05.252152Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2024-11-21T17:51:05.252163Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:51:05.252581Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T17:51:05.252594Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:51:05.252600Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2024-11-21T17:51:05.252620Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:05.252631Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:05.252643Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:05.252693Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:51:05.252698Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:51:05.252704Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:05.252837Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:51:05.252845Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.252854Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037890 step# 1000 txid# 281474976715657} 2024-11-21T17:51:05.252858Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2024-11-21T17:51:05.252864Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:51:05.253072Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2024-11-21T17:51:05.253078Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T17:51:05.253357Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:05.253373Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:05.253383Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T17:51:05.253402Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:05.253407Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:05.253412Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:51:05.253425Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:05.253432Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:05.253440Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.253631Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T17:51:05.253639Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2024-11-21T17:51:05.253643Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2024-11-21T17:51:05.253653Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:05.253659Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:05.253668Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T17:51:05.254325Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:05.254361Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2024-11-21T17:51:05.254369Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T17:51:05.254487Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:05.254556Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:05.254593Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:51:05.254598Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:05.254752Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2024-11-21T17:51:05.254760Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2024-11-21T17:51:05.270937Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:798:2660], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:05.270980Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:808:2665], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:05.270996Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:05.272162Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:51:05.273406Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:05.273431Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:05.273445Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T17:51:05.482280Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:05.482326Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T17:51:05.482342Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2024-11-21T17:51:05.482909Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:812:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:51:05.533142Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7xjq1835vx57j225xbpc29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=N2RmZmJkYTItZTg3ZjY2M2EtNmE4MDZkZDMtZWI4ODEzY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:05.533322Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:994:2762], serverId# [4:995:2763], sessionId# [0:0:0] 2024-11-21T17:51:05.533394Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:51:05.533795Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732211465533744 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T17:51:05.533832Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732211465533744 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T17:51:05.544355Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:51:05.544432Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T17:51:05.544444Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:51:05.545707Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1001:2768], serverId# [4:1002:2769], sessionId# [0:0:0] 2024-11-21T17:51:05.546761Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1003:2770], serverId# [4:1004:2771], sessionId# [0:0:0] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> KqpScheme::CreateTableWithDefaultSettings [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysComplex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::OldImage [GOOD] Test command err: 2024-11-21T17:50:58.483594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:58.483998Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:58.484020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ea2/r3tmp/tmp5DDrnw/pdisk_1.dat 2024-11-21T17:50:58.587171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.604337Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:58.605135Z node 1 :TABLET_SAUSAGECACHE INFO: Config updated MemoryLimit: 33554432 2024-11-21T17:50:58.647062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:58.647099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:58.657758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:58.762223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.777002Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:50:58.777072Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:58.783559Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:58.783592Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:50:58.783718Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:50:58.783735Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:50:58.783740Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:50:58.783776Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:50:58.786909Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:50:58.786989Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:50:58.787016Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:50:58.787022Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:50:58.787026Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:50:58.787031Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:58.787345Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:50:58.787371Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:50:58.787385Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:50:58.787393Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:58.787399Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:58.787408Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:50:58.787413Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:58.787450Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:50:58.787508Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:50:58.787526Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:50:58.787838Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:50:58.798165Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:50:58.798215Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:50:58.972383Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:50:58.973023Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:50:58.973038Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:58.973124Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:58.973133Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:50:58.973144Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:50:58.973204Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:50:58.973236Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:50:58.973396Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:58.973408Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:50:58.973669Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:50:58.973762Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:58.974061Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:50:58.974072Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:58.974170Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:50:58.974176Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:50:58.974184Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:58.974394Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:58.974404Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:50:58.974410Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:50:58.974424Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:50:58.974431Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:50:58.974439Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:58.975017Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:50:58.975377Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:50:58.975403Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:50:58.975410Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:50:58.976876Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:50:58.976916Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:50:58.976926Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T17:50:58.976929Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T17:50:58.997964Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:50:59.276366Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:50:59.276391Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:59.276441Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:59.276448Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:50:59.276457Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T17:50:59.276499Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T17:50:59.276531Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:50:59.276560Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:59.276721Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:59.287859Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T17:50:59.287886Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T17:50:59.287901Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:59.287907Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:59.287919Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:59.287962Z node 1 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], ex ... 2024-11-21T17:51:05.197344Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:05.197427Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:05.197808Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:51:05.197817Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.197952Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:51:05.197958Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:51:05.197966Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:05.198169Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:05.198178Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:05.198183Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:51:05.198200Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:05.198209Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:05.198219Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.198446Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:05.198749Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:05.198775Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:51:05.198781Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:05.200188Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:05.200222Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:51:05.200333Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T17:51:05.200339Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T17:51:05.221421Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:05.501454Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 498 RawX2: 17179871629 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:05.501484Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.501522Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:05.501531Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:05.501540Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:05.501606Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T17:51:05.501637Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:05.501675Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:05.501839Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:05.513788Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T17:51:05.513814Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T17:51:05.513827Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:05.513833Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:05.513845Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.513871Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:05.513886Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2024-11-21T17:51:05.513913Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.514494Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2024-11-21T17:51:05.514510Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:05.515848Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:856:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:05.515868Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:866:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:05.515876Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:05.516744Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:05.517718Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:05.723813Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:05.724394Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:870:2693], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:05.800759Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xjq9b2g4785hmd9hqj6mn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzllZGVkZmEtODA2ZTE5Mi0zM2U0MmEzNy00ODBmOWVjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:05.802038Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:966:2757], serverId# [4:967:2758], sessionId# [0:0:0] 2024-11-21T17:51:05.802184Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T17:51:05.802849Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xjq9b2g4785hmd9hqj6mn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzllZGVkZmEtODA2ZTE5Mi0zM2U0MmEzNy00ODBmOWVjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:05.803556Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xjq9b2g4785hmd9hqj6mn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzllZGVkZmEtODA2ZTE5Mi0zM2U0MmEzNy00ODBmOWVjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:05.803690Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:05.804012Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732211465803975 Step: 2001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T17:51:05.814529Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:05.814598Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2024-11-21T17:51:05.814639Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T17:51:05.814649Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.814986Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2024-11-21T17:51:05.814998Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.827118Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xjqjre9m2pah8jd6dpx67, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZDg3MTExOC0xMWZmOTNkZC02OTVhNjllNy1jNDM1ODJmYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:05.827297Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:05.827623Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732211465827589 Step: 2001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T17:51:05.838107Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:05.838175Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T17:51:05.838184Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:05.838830Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1001:2782], serverId# [4:1002:2783], sessionId# [0:0:0] 2024-11-21T17:51:05.839797Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1003:2784], serverId# [4:1004:2785], sessionId# [0:0:0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:03.833080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:03.833101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.833105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:03.833109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:03.833113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:03.833116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:03.833129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.833221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:03.843541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:03.843561Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.846181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:03.846281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:03.846304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:03.849085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:03.849172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:03.849262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.849486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.850338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.850686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.850698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.850710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:03.850717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.850722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:03.850761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:03.852505Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.873145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.873221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.873273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:03.873311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:03.873318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.880567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.880599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:03.880648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.880661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:03.880666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:03.880671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:03.885065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.885088Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:03.885094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:03.886587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.886607Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.886614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.886621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.887302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:03.887919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:03.887977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:03.888159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.888189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.888197Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.888272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:03.888282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.888312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.888322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.888704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.888714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.888741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.888745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:03.888799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.888804Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:03.888818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:03.888821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.888826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:03.888831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.888835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:03.888839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:03.888848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.888853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:03.888857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:51:03.896743Z node 164 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:03.896752Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:03.896756Z node 164 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:51:03.896760Z node 164 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T17:51:03.896764Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:51:03.896776Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:51:03.896854Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:03.896859Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:51:03.896871Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:51:03.897287Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T17:51:03.897298Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:51:03.897303Z node 164 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T17:51:03.897341Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T17:51:03.897368Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2024-11-21T17:51:03.897679Z node 164 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:03.897699Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 704374638697 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:03.897707Z node 164 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2024-11-21T17:51:03.897731Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T17:51:03.897744Z node 164 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T17:51:03.897748Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:51:03.897760Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:51:03.897768Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:51:03.897773Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2024-11-21T17:51:03.897780Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:51:03.897785Z node 164 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T17:51:03.897789Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T17:51:03.897798Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:51:03.897804Z node 164 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2024-11-21T17:51:03.897808Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:51:03.897811Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:51:03.897984Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:03.898004Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:51:03.898011Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:51:03.898046Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:03.898058Z node 164 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:51:03.898344Z node 164 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:03.898352Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:03.898392Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:51:03.898415Z node 164 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:03.898420Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [164:197:2200], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2024-11-21T17:51:03.898425Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [164:197:2200], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T17:51:03.898554Z node 164 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:03.898562Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:03.898570Z node 164 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:51:03.898575Z node 164 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:51:03.898579Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:51:03.898657Z node 164 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:03.898664Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:03.898668Z node 164 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:51:03.898671Z node 164 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:51:03.898675Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:51:03.898683Z node 164 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2024-11-21T17:51:03.898687Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [164:122:2148] 2024-11-21T17:51:03.898746Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:03.898751Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:51:03.898761Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:51:03.899100Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:03.899340Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:03.899360Z node 164 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:51:03.899368Z node 164 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T17:51:03.899377Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 TestWaitNotification wait txId: 1003 2024-11-21T17:51:03.899721Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:51:03.899731Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:51:03.899799Z node 164 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:51:03.899819Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:51:03.899826Z node 164 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [164:711:2673] TestWaitNotification: OK eventTxId 1003 >> KqpOlapTypes::Timestamp >> KqpConstraints::CreateTableSerialTypeForbidden [GOOD] >> KqpConstraints::AddSerialColumnForbidden >> KqpScheme::CreateTableWithPartitionAtKeysSimpleUncompat [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysSimpleCompat >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad >> KqpScheme::SchemaVersionMissmatchWithRead [GOOD] >> KqpScheme::SchemaVersionMissmatchWithWrite |82.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpConstraints::SerialTypeSmallSerial |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling >> YdbTableBulkUpsert::DataValidation [GOOD] >> KqpScheme::DropKeyColumn [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] >> KqpScheme::DropIndexDataColumn >> Balancing::Balancing_OneTopic_TopicApi [GOOD] >> Balancing::Balancing_OneTopic_PQv1 >> KqpConstraints::SerialTypeNegative1 [GOOD] >> KqpConstraints::SerialTypeForNonKeyColumn >> KqpScheme::InvalidationAfterDropCreate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2024-11-21T17:50:44.817997Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791505841339480:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:44.818226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:50:44.840376Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:44.842125Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:44.843006Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791504766438737:2256];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:44.843102Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011d3/r3tmp/tmp1nJ8AN/pdisk_1.dat 2024-11-21T17:50:44.871133Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24048, node 1 2024-11-21T17:50:44.884127Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/0011d3/r3tmp/yandexpJPAXA.tmp 2024-11-21T17:50:44.884141Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/0011d3/r3tmp/yandexpJPAXA.tmp 2024-11-21T17:50:44.884221Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/0011d3/r3tmp/yandexpJPAXA.tmp 2024-11-21T17:50:44.884284Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:50:44.888789Z INFO: TTestServer started on Port 31701 GrpcPort 24048 TClient is connected to server localhost:31701 PQClient connected to localhost:24048 === TenantModeEnabled() = 0 === Init PQ - start server on port 24048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:44.918092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:44.918123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:44.919521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:44.946583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:44.946618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:44.947883Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:50:44.948182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:44.957298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:50:44.957383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.957456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:50:44.957507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:50:44.957521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.958478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:50:44.958501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:50:44.958553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.958566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:50:44.958567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2024-11-21T17:50:44.958571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2024-11-21T17:50:44.959060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:50:44.959073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2024-11-21T17:50:44.959076Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:50:44.959146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.959170Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:50:44.959175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T17:50:44.959604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.959615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.959618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:50:44.959622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:50:44.960190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:44.960686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2024-11-21T17:50:44.960734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:50:44.961278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211445006, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:50:44.961323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 7439791505841340016 RawX2: 4294969640 } } Step: 1732211445006 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:50:44.961346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:50:44.961411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2024-11-21T17:50:44.961424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:50:44.961456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:50:44.961486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:50:44.961884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:50:44.961895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:50:44.961943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:50:44.961953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439791505841340061:2378], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2024-11-21T17:50:44.961961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:44.961966Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2024-11-21T17:50:44.961978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2024-11-21T17:50:44.961986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:50:44.961991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2024-11-21T17:50:44.961994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:50:44.961997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2024-11-21T17:50:44.961999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2024-11-21T17:50:44.962010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:50:44.962021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2024-11-21T17:50:44.962024Z node ... 86224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_6672238805596438178_v1" ClientId: "cli" PipeClient { RawX1: 7439791594745396290 RawX2: 4503621102209550 } Path: "/Root/PQ/rt3.dc1--topic1" } 2024-11-21T17:51:05.774638Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T17:51:05.774749Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1 2024-11-21T17:51:05.774771Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_6672238805596438178_v1:1 with generation 1 2024-11-21T17:51:05.774943Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2024-11-21T17:51:05.775026Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 got read request: guid# 57b1a95b-60aab0ee-381c2ba6-dc81144c 2024-11-21T17:51:05.776308Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1732211465768 } Cookie: 18446744073709551615 } 2024-11-21T17:51:05.776326Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2024-11-21T17:51:05.776357Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 sending to client partition status 2024-11-21T17:51:05.776787Z :INFO: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (empty maybe) 2024-11-21T17:51:05.777622Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2024-11-21T17:51:05.777690Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2024-11-21T17:51:05.777704Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2024-11-21T17:51:05.777710Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2024-11-21T17:51:05.777724Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2024-11-21T17:51:05.777726Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1TEvPartitionReady. Aval parts: 1 2024-11-21T17:51:05.777736Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 performing read request: guid# ebe9adec-33c68fc5-ade942dd-5807fe9, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T17:51:05.777766Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid ebe9adec-33c68fc5-ade942dd-5807fe9 2024-11-21T17:51:05.778210Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1732211465666 CreateTimestampMS: 1732211465666 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1732211465667 CreateTimestampMS: 1732211465666 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1732211465667 CreateTimestampMS: 1732211465666 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T17:51:05.778249Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2024-11-21T17:51:05.778272Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid ebe9adec-33c68fc5-ade942dd-5807fe9 has messages 1 2024-11-21T17:51:05.778315Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 read done: guid# ebe9adec-33c68fc5-ade942dd-5807fe9, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 371 2024-11-21T17:51:05.778332Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 response to read: guid# ebe9adec-33c68fc5-ade942dd-5807fe9 2024-11-21T17:51:05.778422Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 Process answer. Aval parts: 0 2024-11-21T17:51:05.779438Z :DEBUG: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] [] Got ReadResponse, serverBytesSize = 371, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2024-11-21T17:51:05.779472Z :DEBUG: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2024-11-21T17:51:05.779567Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2024-11-21T17:51:05.779585Z :DEBUG: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] [] Returning serverBytesSize = 371 to budget 2024-11-21T17:51:05.779590Z :DEBUG: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] [] In ContinueReadingDataImpl, ReadSizeBudget = 371, ReadSizeServerDelta = 52428429 2024-11-21T17:51:05.779699Z :DEBUG: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T17:51:05.779744Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T17:51:05.779753Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2024-11-21T17:51:05.779758Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (2-2) 2024-11-21T17:51:05.779769Z :DEBUG: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2024-11-21T17:51:05.779774Z :DEBUG: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] [] Returning serverBytesSize = 0 to budget 2024-11-21T17:51:05.779804Z :INFO: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] Closing read session. Close timeout: 0.000000s 2024-11-21T17:51:05.779811Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2024-11-21T17:51:05.779819Z :INFO: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 12 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:51:05.779841Z :NOTICE: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:51:05.779848Z :DEBUG: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] [] Abort session to cluster 2024-11-21T17:51:05.779845Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 grpc read done: success# 1, data# { read_request { bytes_size: 371 } } 2024-11-21T17:51:05.779893Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 got read request: guid# 9ff09beb-e0064f5f-fea101c9-17f83732 2024-11-21T17:51:05.779943Z :NOTICE: [] [] [6324958f-cd1f4e3a-e23422be-704db36b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:51:05.780087Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 grpc read done: success# 0, data# { } 2024-11-21T17:51:05.780097Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 grpc read failed 2024-11-21T17:51:05.780101Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 grpc closed 2024-11-21T17:51:05.780114Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_6672238805596438178_v1 is DEAD 2024-11-21T17:51:05.780351Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_6672238805596438178_v1 2024-11-21T17:51:05.780496Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7439791594745396290:2574] disconnected; active server actors: 1 2024-11-21T17:51:05.780509Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7439791594745396290:2574] client cli disconnected session shared/cli_5_1_6672238805596438178_v1 2024-11-21T17:51:06.043613Z node 5 :KQP_EXECUTER ERROR: ActorId: [5:7439791599040363624:2581] TxId: 281474976715699. Ctx: { TraceId: 01jd7xjqst19m6ss277gkz1j9n, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZGZhNTJmMmEtZjIxZjAwMDUtNTdlMmM5MTgtZWVlMjU1MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 6 2024-11-21T17:51:06.044851Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7439791599040363628:2581], TxId: 281474976715699, task: 2. Ctx: { TraceId : 01jd7xjqst19m6ss277gkz1j9n. SessionId : ydb://session/3?node_id=5&id=ZGZhNTJmMmEtZjIxZjAwMDUtNTdlMmM5MTgtZWVlMjU1MzE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [5:7439791599040363624:2581], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |82.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |82.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling >> KqpScheme::CreateTableWithReadReplicasUncompat >> KqpScheme::ChangefeedAwsRegion >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] >> KqpOlapTypes::Timestamp [GOOD] >> KqpOlapTypes::TimestampCmpErr >> KqpScheme::MoveTableWithSerialTypes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2024-11-21T17:51:00.258515Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001b2a/r3tmp/tmps58NXG//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T17:51:00.260010Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T17:51:01.341572Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001b2a/r3tmp/tmps58NXG//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2024-11-21T17:51:01.342644Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T17:51:02.471403Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001b2a/r3tmp/tmps58NXG//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 3 2024-11-21T17:51:02.474310Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T17:51:03.562661Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001b2a/r3tmp/tmps58NXG//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 4 2024-11-21T17:51:03.563886Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:3:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T17:51:04.764079Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001b2a/r3tmp/tmps58NXG//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 5 2024-11-21T17:51:04.764331Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:4:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T17:51:05.869020Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001b2a/r3tmp/tmps58NXG//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 6 2024-11-21T17:51:05.871098Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:5:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |82.5%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::CreateTableWithPartitionAtKeysSimpleCompat [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysSigned >> TopicAutoscaling::PartitionSplit_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::DataValidation [GOOD] Test command err: 2024-11-21T17:51:02.254615Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791580897948836:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:02.254892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001207/r3tmp/tmpsdqHRf/pdisk_1.dat 2024-11-21T17:51:02.309995Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29448, node 1 2024-11-21T17:51:02.336685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:02.336696Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:02.336699Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:02.336743Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10334 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:51:02.355438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:02.355468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:51:02.357304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:02.385791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:02.387021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:02.387033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:02.388089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:02.388168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:02.388173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:51:02.388776Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:02.388790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:02.389075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:02.389226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:02.390228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211462436, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:02.390242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:02.390316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:02.390920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:02.390983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:02.391000Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:02.391011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:02.391021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:02.391037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:51:02.391514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:02.391528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:02.391533Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:02.391546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:51:02.608068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestNotNullColumns, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:02.608262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:51:02.608435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:02.608449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:02.609330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestNotNullColumns 2024-11-21T17:51:02.609404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:02.609470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:02.609495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:51:02.609660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:02.609673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:02.609678Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:02.609711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:02.609720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:02.609722Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:51:02.610214Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:51:02.612157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:51:02.612197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:51:02.616806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:51:02.632602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:51:02.632613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:51:02.632640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:51:02.633145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:51:02.633977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211462681, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:02.633989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211462681 2024-11-21T17:51:02.634016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:51:02.635824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:02.635922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:02.635934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:51:02.636491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:02.636500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:02.636505Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:51:02.636546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:02.636548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:02.636549Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 720575 ... 17:51:06.074028Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:06.077679Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.077796Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:06.077803Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.078205Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:06.078247Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:06.078250Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:51:06.079018Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:06.079022Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:06.079563Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.080450Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211466125, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:06.080463Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:06.080534Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:06.080894Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:06.080935Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:06.080944Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:06.080955Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:06.080963Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:06.080974Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T17:51:06.081365Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:06.081388Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:06.081392Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:06.081405Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T17:51:06.083397Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:06.293420Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestInvalidData, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.293607Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:51:06.293803Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:06.293810Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.300737Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestInvalidData 2024-11-21T17:51:06.300840Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:06.300909Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:06.300932Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:51:06.301538Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:06.301545Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:06.301548Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:06.301598Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:06.301600Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:06.301602Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:51:06.309163Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:51:06.309207Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:51:06.311643Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:51:06.311841Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:51:06.336740Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:51:06.336754Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:51:06.336787Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:51:06.344816Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:51:06.346329Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211466391, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:06.346339Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211466391 2024-11-21T17:51:06.346374Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:51:06.357841Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:06.357978Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:06.357996Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:51:06.358643Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:06.358655Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:06.358659Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:51:06.358712Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:06.358715Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:06.358717Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:51:06.360656Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732211466391 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 385 } } 2024-11-21T17:51:06.360998Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:51:06.361008Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.361014Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T17:51:06.372591Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:51:06.372627Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:51:06.372644Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Decimal(22,9) value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Date value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Datetime value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Timestamp value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Interval value CLIENT_INTERNAL_ERROR
: Error: GRpc error: (13): Unable to parse request
: Error: Grpc error response on endpoint localhost:26129 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Yson value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Json value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid JSON for JsonDocument provided: TAPE_ERROR: The JSON document has an improper structure: missing or superfluous commas, braces, missing keys, etc. BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid DyNumber string representation >> KqpOlapScheme::DropColumn >> CdcStreamChangeCollector::NewImage [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysComplex [GOOD] >> KqpScheme::CreateTableWithFamiliesRegular >> KqpConstraints::AddSerialColumnForbidden [GOOD] >> KqpConstraints::CreateTableWithDefaultForbidden >> KqpScheme::CreateFamilyWithCompressionLevel >> KqpScheme::SchemaVersionMissmatchWithWrite [GOOD] >> KqpScheme::SchemaVersionMissmatchWithIndexRead >> KqpConstraints::SerialTypeSmallSerial [GOOD] >> KqpConstraints::SerialTypeSerial2 >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] Test command err: 2024-11-21T17:51:02.710446Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791579663039794:2252];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:02.710518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001213/r3tmp/tmpNsXgui/pdisk_1.dat 2024-11-21T17:51:02.775216Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7738, node 1 2024-11-21T17:51:02.793421Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:02.793448Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:02.793450Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:02.793486Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:51:02.810289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:02.810312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:02.811889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:02.818024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:02.818985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:02.818997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:02.819635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:02.819689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:02.819698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:51:02.820045Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:02.820138Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:02.820156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:51:02.820548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:02.821430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211462870, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:02.821441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:51:02.821491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:51:02.821897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:02.821939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:02.821953Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:51:02.821960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:51:02.821973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:51:02.821982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:51:02.822401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:51:02.822420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:51:02.822424Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:02.822437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:51:02.980651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791579663040503:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:02.980674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:03.051371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.051493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:51:03.051703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:03.051713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.052367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestTable 2024-11-21T17:51:03.052414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:03.052459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:03.052476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:51:03.052525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:51:03.052678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:51:03.052688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:51:03.052692Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:03.052719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:51:03.052726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:51:03.052728Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:51:03.054185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:51:03.054208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T17:51:03.054617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:51:03.106720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:51:03.106731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:51:03.106751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T17:51:03.107250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:51:03.108080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211463157, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:03.108091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211463157 2024-11-21T17:51:03.108116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T17:51:03.108582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:03.108657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:03.108689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:51:03.108898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:51:03.108913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:51:03.108917Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, ... MESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:51:06.579979Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:2 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:51:06.580427Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:06.580445Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:06.580451Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:06.580532Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:06.580536Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:06.580538Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:51:06.580554Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:06.580556Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:06.580558Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2024-11-21T17:51:06.580570Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:06.580573Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:06.580574Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2024-11-21T17:51:06.580593Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:51:06.592825Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:51:06.592885Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:51:06.593013Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:2 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:51:06.593019Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 2 -> 3 2024-11-21T17:51:06.595758Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:51:06.595970Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:2 ProgressState at tabletId# 72057594046644480 2024-11-21T17:51:06.617737Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:51:06.617750Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:51:06.617785Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:51:06.623195Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:51:06.659026Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:2 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:51:06.659041Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:51:06.659087Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 3 -> 128 2024-11-21T17:51:06.664863Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:2 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:51:06.666140Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211466713, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:06.666159Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211466713 2024-11-21T17:51:06.666199Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:51:06.666223Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTableIndex TPropose operationId#281474976715658:1 HandleReply TEvOperationPlan, step: 1732211466713, at schemeshard: 72057594046644480 2024-11-21T17:51:06.666257Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T17:51:06.666286Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:2 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211466713 2024-11-21T17:51:06.666293Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 129 2024-11-21T17:51:06.667891Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:06.668055Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:06.668071Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T17:51:06.668090Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 1/3 2024-11-21T17:51:06.668120Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:51:06.668147Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:2 ProgressState at tablet: 72057594046644480 2024-11-21T17:51:06.668711Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:06.668719Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:06.668725Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:51:06.668768Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:06.668770Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:06.668770Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:51:06.668780Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:06.668784Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:06.668785Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 2 2024-11-21T17:51:06.668794Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:06.668796Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:06.668796Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T17:51:06.669943Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715658 Step: 1732211466713 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 482 } } 2024-11-21T17:51:06.670178Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732211466713 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 248 } } 2024-11-21T17:51:06.670206Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:51:06.670212Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T17:51:06.670218Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 129 -> 240 2024-11-21T17:51:06.670249Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:51:06.670251Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.670252Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T17:51:06.671571Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T17:51:06.671592Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 2/3 2024-11-21T17:51:06.671622Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:51:06.671628Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/3 2024-11-21T17:51:06.671640Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:51:06.671668Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T17:51:06.671671Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T17:51:06.723830Z node 10 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable'unknown table |82.5%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::DropNonExistingExternalDataSource |82.5%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: 2024-11-21T17:51:01.263676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:01.264311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:01.264342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e7d/r3tmp/tmp7sOwUs/pdisk_1.dat 2024-11-21T17:51:01.383488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:01.405086Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:01.405927Z node 1 :TABLET_SAUSAGECACHE INFO: Config updated MemoryLimit: 33554432 2024-11-21T17:51:01.453869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:01.453911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:01.464494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:01.569580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:01.584210Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:51:01.584385Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:01.591382Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:01.591423Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:01.591590Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:51:01.591614Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:51:01.591623Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:51:01.591668Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:01.595378Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:51:01.595473Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:01.595504Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:51:01.595510Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:01.595515Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:51:01.595521Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:01.595851Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:51:01.595877Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:51:01.595891Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:51:01.595900Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:01.595907Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:01.595918Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:51:01.595923Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:01.595966Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:01.596029Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:51:01.596049Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:51:01.596425Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:01.606745Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:01.606797Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:01.784092Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:51:01.784991Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:01.785017Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:01.785163Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:01.785175Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:01.785188Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:01.785308Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T17:51:01.785349Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:01.785523Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:01.785542Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T17:51:01.785921Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:01.786052Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:01.786428Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:51:01.786439Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:01.786570Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:51:01.786576Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:51:01.786586Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:01.786834Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:01.786844Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:01.786850Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:51:01.786869Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:01.786881Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:01.786892Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:01.787557Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:01.787972Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:01.788002Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:51:01.788010Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:01.789760Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:01.789808Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:51:01.789823Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T17:51:01.789829Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T17:51:01.811392Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:02.112984Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:02.113014Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:02.113048Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:02.113056Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:02.113064Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:02.113107Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T17:51:02.113136Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:02.113161Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:02.113305Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:02.124336Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T17:51:02.124361Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T17:51:02.124374Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:02.124379Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:02.124392Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:02.124420Z node 1 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], ex ... 2024-11-21T17:51:06.873867Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:06.873970Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:06.874440Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T17:51:06.874451Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:06.874590Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T17:51:06.874597Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T17:51:06.874607Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:06.874834Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:06.874844Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:06.874850Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T17:51:06.874871Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:06.874881Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T17:51:06.874892Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:06.875104Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:06.875459Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T17:51:06.875488Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T17:51:06.875494Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:06.884799Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:06.884870Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T17:51:06.884886Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T17:51:06.884892Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T17:51:06.906743Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:07.193557Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 498 RawX2: 17179871629 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:07.193589Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:07.193633Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:07.193643Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:07.193654Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T17:51:07.193719Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T17:51:07.193758Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:07.193804Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:07.193992Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:07.206577Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T17:51:07.206610Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T17:51:07.206629Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:07.206636Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:07.206651Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:07.206686Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:51:07.206705Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2024-11-21T17:51:07.206723Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:07.210536Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2024-11-21T17:51:07.210571Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T17:51:07.212613Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:856:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.212645Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:866:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.212655Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.213595Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:07.214728Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:07.463339Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:07.464531Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:870:2693], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:07.542607Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xjrycbmsskatgrhranfyh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MmM0ZDRlYmMtZDEyYmZlOGItODY4MTZmOTYtODNiMTRlN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:07.543850Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:966:2757], serverId# [4:967:2758], sessionId# [0:0:0] 2024-11-21T17:51:07.544031Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T17:51:07.544811Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xjrycbmsskatgrhranfyh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MmM0ZDRlYmMtZDEyYmZlOGItODY4MTZmOTYtODNiMTRlN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:07.545519Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xjrycbmsskatgrhranfyh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MmM0ZDRlYmMtZDEyYmZlOGItODY4MTZmOTYtODNiMTRlN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:07.545678Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:07.546112Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732211467546066 Step: 2001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T17:51:07.556648Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:07.556701Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2024-11-21T17:51:07.556735Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T17:51:07.556746Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:07.557110Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2024-11-21T17:51:07.557122Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:07.570428Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xjs96eq4c64bmy8z468hf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Mzc2OWE3NDYtZjllMzQ3NmMtNjUyYzEwYTgtZjUyZTdiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:07.570611Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:07.570990Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732211467570950 Step: 2001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T17:51:07.584321Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:07.584419Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T17:51:07.584438Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:07.585463Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1001:2782], serverId# [4:1002:2783], sessionId# [0:0:0] 2024-11-21T17:51:07.586675Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1003:2784], serverId# [4:1004:2785], sessionId# [0:0:0] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey >> KqpConstraints::SerialTypeForNonKeyColumn [GOOD] >> KqpConstraints::SerialTypeSerial >> KqpScheme::InvalidationAfterDropCreate [GOOD] >> KqpScheme::InvalidationAfterDropCreateCompatSchema >> KqpOlapTypes::TimestampCmpErr [GOOD] >> KqpScheme::AddChangefeed >> KqpScheme::DropIndexDataColumn [GOOD] >> KqpScheme::DropExternalDataSource >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit >> KqpScheme::CreateTableWithReadReplicasUncompat [GOOD] >> KqpScheme::CreateTableWithReadReplicasCompat >> KqpScheme::ChangefeedAwsRegion [GOOD] >> KqpScheme::ChangefeedAttributes >> KqpOlapScheme::DropColumn [GOOD] >> KqpOlapScheme::DropColumnOldSchemeBulkUpsert >> KqpScheme::CreateTableWithFamiliesRegular [GOOD] >> KqpScheme::CreateTableWithDefaultFamily >> TSchemeShardSubDomainTest::CreateDropSolomon >> KqpConstraints::CreateTableWithDefaultForbidden [GOOD] >> KqpConstraints::AlterTableAddColumnWithDefaultValue >> KqpScheme::CreateFamilyWithCompressionLevel [GOOD] >> KqpScheme::CreateExternalTableValidation >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 >> KqpScheme::CreateTableWithPartitionAtKeysSigned [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysUuid >> KqpScheme::MoveTableWithSerialTypes [GOOD] >> KqpScheme::PathWithNoRoot |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |82.5%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpConstraints::SerialTypeSerial2 [GOOD] >> KqpScheme::DropNonExistingExternalDataSource [GOOD] >> KqpScheme::DropResourcePool >> KqpConstraints::SerialTypeSerial4 |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data >> KqpScheme::SchemaVersionMissmatchWithIndexRead [GOOD] |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> KqpScheme::SchemaVersionMissmatchWithIndexWrite >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data >> KqpScheme::CreateTableWithReadReplicasCompat [GOOD] >> KqpScheme::DropExternalDataSource [GOOD] >> KqpScheme::CreateTableWithTtlOnIntColumn >> KqpScheme::DropExternalTable >> KqpOlapScheme::DropColumnOldSchemeBulkUpsert [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess >> KqpScheme::InvalidationAfterDropCreateCompatSchema [GOOD] >> KqpScheme::ChangefeedAttributes [GOOD] >> KqpConstraints::SerialTypeSerial [GOOD] >> KqpOlapScheme::DropColumnAfterAdd |82.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpConstraints::SerialTypeBigSerial >> KqpScheme::AsyncReplicationConnectionString >> KqpScheme::FamilyColumnTest >> KqpScheme::PathWithNoRoot [GOOD] >> KqpScheme::CreateExternalTableValidation [GOOD] >> KqpConstraints::AlterTableAddColumnWithDefaultValue [GOOD] >> KqpScheme::CreateTableWithPartitionAtKeysUuid [GOOD] >> KqpConstraints::SerialTypeSerial4 [GOOD] >> TPQTest::TestReadRuleVersions >> KqpOlapScheme::DropColumnAfterAdd [GOOD] >> KqpScheme::CreateTableWithDefaultFamily [GOOD] >> KqpScheme::DropResourcePool [GOOD] >> KqpScheme::SchemaVersionMissmatchWithIndexWrite [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey [GOOD] >> KqpScheme::FamilyColumnTest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> KqpConstraints::AddNonColumnDoesnotReturnInternalError >> KqpConstraints::SerialTypeBigSerial [GOOD] >> KqpScheme::ModifyUnknownPermissions >> KqpScheme::CreateTableWithPgColumn >> KqpScheme::CreateTableWithDecimalColumn >> KqpScheme::AsyncReplicationConnectionString [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert Test command err: Size: 8002 Create chunk: 0.000018s Read by index: 0.000006s Iterate: 0.000006s Size: 8256 Create chunk: 0.000048s Read by index: 0.000011s Iterate: 0.000012s Size: 8532 Create chunk: 0.000015s Read by index: 0.000005s Iterate: 0.000003s Size: 7769 Create chunk: 0.000024s Read by index: 0.000008s Iterate: 0.000008s Size: 2853 Create chunk: 0.000013s Read by index: 0.000019s Iterate: 0.000006s Size: 2419 Create chunk: 0.000015s Read by index: 0.000017s Iterate: 0.000008s Size: 2929 Create chunk: 0.000011s Read by index: 0.000015s Iterate: 0.000006s Size: 2472 Create chunk: 0.000015s Read by index: 0.000020s Iterate: 0.000011s >> KqpScheme::DropExternalTable [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> KqpConstraints::SerialTypeSerial8 >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning [GOOD] >> KqpScheme::DropNonExistingResourcePool >> TopicAutoscaling::Simple_AutoscaleAwareSDK [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> KqpScheme::CreateExternalTableWithSettings >> THiveTest::TestCreate100Tablets >> KqpScheme::Int8Int16 >> KqpConstraints::IndexedTableAndNotNullColumn >> KqpScheme::CreateTableWithPgColumn [GOOD] >> KqpScheme::AsyncReplicationConnectionStringWithSsl >> KqpScheme::CreateTableWithDecimalColumn [GOOD] >> KqpOlapScheme::DropColumnErrors >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK >> KqpConstraints::SerialTypeSerial8 [GOOD] >> KqpScheme::DropDependentExternalDataSource >> KqpScheme::TouchIndexAfterMoveIndexRead >> TopicAutoscaling::ControlPlane_BackCompatibility >> KqpScheme::DropNonExistingResourcePool [GOOD] >> TopicAutoscaling::Simple_PQv1 >> TPartitionTests::GetPartitionWriteInfoError >> KqpScheme::Int8Int16 [GOOD] >> KqpScheme::ModifyUnknownPermissions [GOOD] >> KqpScheme::Int8Int16Olap >> KqpScheme::DropDependentExternalDataSource [GOOD] >> KqpScheme::AsyncReplicationConnectionStringWithSsl [GOOD] >> KqpConstraints::Utf8AndDefault >> KqpScheme::CreateExternalTableWithSettings [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexRead [GOOD] >> KqpScheme::DropResourcePoolClassifier >> TPartitionTests::GetPartitionWriteInfoError [GOOD] >> TPQTest::TestReadRuleVersions [GOOD] >> KqpConstraints::IndexedTableAndNotNullColumn [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [GOOD] >> KqpScheme::CreateTableWithTtlOnIntColumn [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad [GOOD] >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable [GOOD] >> THiveTest::TestCreate100Tablets [GOOD] >> KqpOlapScheme::DropColumnErrors [GOOD] >> KqpScheme::ModifyPermissionsByIncorrectPaths >> KqpScheme::AsyncReplicationEndpointAndDatabase >> KqpScheme::Int8Int16Olap [GOOD] >> KqpConstraints::Utf8AndDefault [GOOD] >> KqpScheme::CreateExternalTableWithUpperCaseSettings >> KqpScheme::DropResourcePoolClassifier [GOOD] >> TPartitionTests::DifferentWriteTxBatchingOptions >> TPQTest::TestPartitionedBlobFails >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn >> THiveTest::TestCreateSubHiveCreateTablet >> KqpOlapScheme::DropColumnAfterInsert >> KqpScheme::ModifyPermissionsByIncorrectPaths [GOOD] >> KqpScheme::CreateTableWithTtlOnDatetime64Column >> KqpScheme::AsyncReplicationEndpointAndDatabase [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK >> TopicAutoscaling::CDC_Write >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn [GOOD] >> KqpOlapScheme::AddColumn >> KqpScheme::CreateExternalTableWithUpperCaseSettings [GOOD] >> KqpScheme::DropNonExistingResourcePoolClassifier >> KqpScheme::CreateTableWithTtlOnDatetime64Column [GOOD] >> KqpOlapScheme::AddColumn [GOOD] >> KqpScheme::DropNonExistingResourcePoolClassifier [GOOD] >> KqpScheme::CreateTableWithStoreExternalBlobs >> KqpScheme::CreateTableWithStoreExternalBlobs [GOOD] >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> Balancing::Balancing_OneTopic_PQv1 [GOOD] >> TopicAutoscaling::Simple_PQv1 [GOOD] >> TopicAutoscaling::ControlPlane_BackCompatibility [GOOD] >> BsControllerConfig::MergeBoxes [GOOD] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 [GOOD] >> KqpScheme::AddChangefeed [GOOD] >> KqpScheme::AddChangefeedWhenDisabled >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> KqpScheme::OlapSharding_KeyOnly >> ForceDropWithReboots::ForceDelete [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> KqpOlapScheme::DropColumnAfterInsert [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> KqpScheme::OlapSharding_KeyOnly [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK >> KqpScheme::CreateResourcePool >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK >> Balancing::Balancing_ManyTopics_TopicApi >> TBlobStorageProxyTest::TestSingleFailure >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention >> TBlobStorageProxyTest::TestSingleFailure [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK >> TBlobStorageProxyTest::TestEmptyRange >> THiveTest::TestCheckSubHiveMigration >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets >> KqpScheme::CreateResourcePool [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable [GOOD] >> KqpScheme::AddChangefeedWhenDisabled [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:03.029457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:03.029476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.029480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:03.029483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:03.029486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:03.029489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:03.029501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.029564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:03.037812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:03.037830Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.039669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:03.039773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:03.039798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:03.042481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:03.042549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:03.042617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.042771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.043398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.043614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.043622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.043630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:03.043634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.043638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:03.043664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:03.044835Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.057633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.057705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.057752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:03.057783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:03.057790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.058308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.058325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:03.058356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.058362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:03.058365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:03.058368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:03.058663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.058671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:03.058674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:03.058950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.058956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.058960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.058964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.059381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:03.059729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:03.059775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:03.059911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.059928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.059932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.059968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:03.059972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.059989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.059996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.060332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.060339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.060368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.060371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:03.060427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.060431Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:03.060439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:03.060441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.060445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:03.060448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.060451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:03.060454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:03.060461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.060465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:03.060467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:13.790609Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:51:13.790625Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:51:13.790786Z node 234 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:13.790797Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:13.790803Z node 234 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:51:13.790808Z node 234 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T17:51:13.790813Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:51:13.790829Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:51:13.791001Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T17:51:13.791010Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:51:13.791016Z node 234 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T17:51:13.791041Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T17:51:13.791077Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2024-11-21T17:51:13.791173Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:13.791197Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 1005022349418 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:13.791206Z node 234 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2024-11-21T17:51:13.791232Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T17:51:13.791245Z node 234 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T17:51:13.791249Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:51:13.791263Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:51:13.791273Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:51:13.791279Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2024-11-21T17:51:13.791288Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:51:13.791293Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T17:51:13.791298Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T17:51:13.791307Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:51:13.791316Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2024-11-21T17:51:13.791321Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:51:13.791324Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:51:13.792139Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:51:13.792158Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:51:13.792179Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:13.792221Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:51:13.792250Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:51:13.792312Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:51:13.792710Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:13.792802Z node 234 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:13.792809Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:13.792859Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:51:13.792887Z node 234 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:13.792892Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [234:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2024-11-21T17:51:13.792898Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [234:201:2204], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T17:51:13.793070Z node 234 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:13.793082Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:13.793087Z node 234 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:51:13.793092Z node 234 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:51:13.793097Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:51:13.793224Z node 234 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:13.793234Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:13.793238Z node 234 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:51:13.793243Z node 234 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:51:13.793247Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:51:13.793258Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2024-11-21T17:51:13.793264Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [234:120:2146] 2024-11-21T17:51:13.793313Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:13.793318Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:51:13.793333Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:51:13.793813Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:13.794094Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:13.794116Z node 234 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:51:13.794127Z node 234 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T17:51:13.794140Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 TestWaitNotification wait txId: 1003 2024-11-21T17:51:13.794548Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:51:13.794559Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:51:13.794641Z node 234 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:51:13.794658Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:51:13.794662Z node 234 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [234:862:2799] TestWaitNotification: OK eventTxId 1003 >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> TTypeCodecsTest::TestBoolCodec [GOOD] >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2024-11-21T17:51:15.510175Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001bc2/r3tmp/tmp9Sy81J//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T17:51:15.511320Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropDependentExternalDataSource [GOOD] Test command err: Trying to start YDB, gRPC: 7868, MsgBus: 3162 2024-11-21T17:51:06.122883Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791597666162180:2152];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:06.123027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000faf/r3tmp/tmpKzL0ff/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7868, node 1 2024-11-21T17:51:06.180976Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:06.188379Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:06.188383Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:06.188385Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:06.188416Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3162 2024-11-21T17:51:06.224423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:06.224447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:06.226546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:51:06.257775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.263335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:06.290150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.317751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.332630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.489351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791597666163613:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.489406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.528713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.536322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.547147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.562113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.575093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.595062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.608183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791597666164116:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.608245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.608673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791597666164121:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.609487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:06.622602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791597666164123:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking }
: Error: Type annotation, code: 1030
:3:47: Error: At function: KiAlterTable!
:3:47: Error: AlterTable : db.[/Root/KeyValue] Column: "Key" is a key column. Key column drop is not supported Trying to start YDB, gRPC: 2203, MsgBus: 8295 2024-11-21T17:51:07.004518Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791604420000631:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000faf/r3tmp/tmpN5RrQC/pdisk_1.dat 2024-11-21T17:51:07.008056Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:07.014961Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2203, node 2 2024-11-21T17:51:07.025410Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:07.025421Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:07.025422Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:07.025455Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8295 TClient is connected to server localhost:8295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:07.104302Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:07.104361Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:07.105373Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:07.107133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.110816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.123504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.141830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.153171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.349200Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791604420002029:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.349232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.352572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operati ... meOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:09.444470Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:09.454285Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:09.632784Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791610413533587:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.632812Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.638965Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.646711Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.654663Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.716580Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.786327Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.797913Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.937631Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791610413534122:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.937642Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791610413534127:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.937658Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.938347Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:09.939766Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439791610413534129:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:10.108522Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.109921Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17156, MsgBus: 27105 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000faf/r3tmp/tmpBOKIdz/pdisk_1.dat 2024-11-21T17:51:10.409809Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:10.410147Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17156, node 5 2024-11-21T17:51:10.420174Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:10.420190Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:10.420193Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:10.420239Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27105 TClient is connected to server localhost:27105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:10.503381Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:10.503420Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:10.503772Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.504458Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:10.508539Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:10.516643Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.525448Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.556933Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:10.571691Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.678699Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791617605018116:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.678726Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.683205Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.692855Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.706639Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.719318Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.781899Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.838791Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.855127Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791617605018635:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.855153Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791617605018640:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.855159Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.855844Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:10.864539Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439791617605018642:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:11.053731Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.055326Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] Test command err: 2024-11-21T17:50:56.882474Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001b1b/r3tmp/tmp0GsddU//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T17:50:56.882622Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001b1b/r3tmp/tmp0GsddU//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2024-11-21T17:50:56.883168Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T17:50:56.891310Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T17:51:13.054896Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001b1b/r3tmp/tmpDha3M7//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T17:51:13.055034Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/6fwh/001b1b/r3tmp/tmpDha3M7//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2024-11-21T17:51:13.055380Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T17:51:13.055418Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropNonExistingResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 30565, MsgBus: 25227 2024-11-21T17:51:08.048320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791608487757538:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:08.048335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f7c/r3tmp/tmp0NtUf8/pdisk_1.dat 2024-11-21T17:51:08.116662Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30565, node 1 2024-11-21T17:51:08.135240Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:08.135258Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:08.135260Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:08.135304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:08.149753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:08.149789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:08.150390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25227 TClient is connected to server localhost:25227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:08.205533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.208987Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:08.211624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.270472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.288729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.304507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.408952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791608487758863:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.408983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.437263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.444666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.457224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.464552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.472107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.485746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.541394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791608487759383:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.541415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.541441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791608487759388:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.541995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:08.547438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791608487759390:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 22238, MsgBus: 5636 2024-11-21T17:51:08.946314Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791607242238064:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:08.951139Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f7c/r3tmp/tmp9G9nOW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22238, node 2 2024-11-21T17:51:08.965244Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:08.973217Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:08.973230Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:08.973232Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:08.973278Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5636 TClient is connected to server localhost:5636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:09.048808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:09.048841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:09.049149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.049821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:09.051111Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:09.059033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:09.074219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:09.097010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:09.117877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.262207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791611537206758:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.262243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found ... n type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.976992Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.989242Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791616716279451:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.989280Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.989331Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791616716279456:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.990008Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:10.997623Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439791616716279458:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:11.606821Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T17:51:11.670146Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.728893Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:51:11.805593Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.874310Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.936812Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.186649Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791625306215223:2670], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T17:51:12.186674Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } Trying to start YDB, gRPC: 25208, MsgBus: 63836 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f7c/r3tmp/tmpfbsL9x/pdisk_1.dat 2024-11-21T17:51:12.526296Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:12.528393Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25208, node 5 2024-11-21T17:51:12.537276Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:12.537290Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:12.537291Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:12.537339Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63836 TClient is connected to server localhost:63836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:12.616469Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:12.616506Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:12.617561Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:12.619191Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:12.632814Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.647965Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:12.666215Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:12.684875Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:12.809165Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791622954126125:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:12.809187Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:12.814328Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.820161Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.831871Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.839086Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.846238Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.860945Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.869315Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791622954126630:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:12.869336Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791622954126635:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:12.869340Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:12.870023Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:12.873187Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439791622954126637:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:13.527785Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T17:51:13.575395Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:13.616146Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:51:13.683729Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:51:13.746588Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:51:13.797346Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] >> KqpScheme::AddChangefeedNegative >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention [GOOD] >> SubDomainWithReboots::RootWithStoragePoolsAndTable [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK [GOOD] >> Balancing::Balancing_ManyTopics_TopicApi [GOOD] >> TPQTest::TestPartitionedBlobFails [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::ControlPlane_CDC ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddColumn [GOOD] Test command err: Trying to start YDB, gRPC: 1864, MsgBus: 5472 2024-11-21T17:51:06.913984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791600280391797:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:06.914336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fa3/r3tmp/tmp2XYjQ1/pdisk_1.dat 2024-11-21T17:51:06.971136Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1864, node 1 2024-11-21T17:51:06.990011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:06.990023Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:06.990025Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:06.990062Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5472 2024-11-21T17:51:07.013851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:07.013876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:07.015038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:07.044093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.060703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.123679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.142800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.152755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.224771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791604575360648:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.224807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.272411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.289690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.298004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.309777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.326601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.338340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.357361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791604575361162:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.357404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.357495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791604575361167:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.358301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:07.364917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791604575361169:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:07.578699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64385, MsgBus: 27657 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fa3/r3tmp/tmpKHXO3m/pdisk_1.dat 2024-11-21T17:51:07.842588Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:07.844413Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64385, node 2 2024-11-21T17:51:07.856248Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:07.856261Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:07.856263Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:07.856308Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27657 TClient is connected to server localhost:27657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:07.931998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:07.932028Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:07.932863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.933240Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:07.934776Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:07.949363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.973370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.000419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.029155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.185328Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791608026817294:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.185358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.192867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose ... ent=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:51:12.435574Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439791625812626981:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:51:12.435591Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439791625812626981:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:51:12.435613Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439791625812626981:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:51:12.435635Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439791625812626981:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:51:12.435657Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439791625812626981:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:51:12.435676Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439791625812626981:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:51:12.435701Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439791625812626981:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:51:12.435723Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439791625812626981:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:51:12.435746Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439791625812626981:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:51:12.436221Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:51:12.436251Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:51:12.436268Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:51:12.436277Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:51:12.436294Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:51:12.436303Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:51:12.436312Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:51:12.436323Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:51:12.436340Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:51:12.436349Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:51:12.436355Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:51:12.436364Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:51:12.436425Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:51:12.436435Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:51:12.436457Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:51:12.436466Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:51:12.436479Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:51:12.436488Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:51:12.436510Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:51:12.436520Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:51:12.436536Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:51:12.436543Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=320;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=320;columns=3; 2024-11-21T17:51:12.498397Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791625812627125:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:12.498416Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791625812627130:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:12.498418Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:12.499127Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:12.500727Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791625812627132:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:12.620819Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211472551, txId: 18446744073709551615] shutting down 2024-11-21T17:51:12.625174Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.682511Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211472684, txId: 18446744073709551615] shutting down 2024-11-21T17:51:12.725436Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211472712, txId: 18446744073709551615] shutting down 2024-11-21T17:51:12.765747Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211472754, txId: 18446744073709551615] shutting down FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=352;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=352;columns=4; 2024-11-21T17:51:12.820536Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211472796, txId: 18446744073709551615] shutting down 2024-11-21T17:51:12.863813Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211472852, txId: 18446744073709551615] shutting down 2024-11-21T17:51:12.907352Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211472894, txId: 18446744073709551615] shutting down 2024-11-21T17:51:12.944087Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211472943, txId: 18446744073709551615] shutting down 2024-11-21T17:51:12.970426Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211473000, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::Int8Int16Olap [GOOD] Test command err: Trying to start YDB, gRPC: 17407, MsgBus: 10606 2024-11-21T17:51:07.258383Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791602020545427:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:07.258462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f98/r3tmp/tmpnKL9iA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17407, node 1 2024-11-21T17:51:07.339239Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:07.350383Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:07.350398Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:07.350401Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:07.350450Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:07.367303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:07.367347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:07.368974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10606 TClient is connected to server localhost:10606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:07.420932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.424159Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:07.441161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.504325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.525255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.539113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.650560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791602020546838:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.650599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.703737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.712433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.729102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.736436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.751156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.767548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.784052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791602020547352:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.784075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.784154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791602020547357:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.786222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:07.794139Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:51:07.794218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791602020547359:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:08.027338Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2024-11-21T17:51:08.034109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23842, MsgBus: 9131 2024-11-21T17:51:08.365272Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791608684161653:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:08.365371Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f98/r3tmp/tmpTYbKIW/pdisk_1.dat 2024-11-21T17:51:08.379804Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23842, node 2 2024-11-21T17:51:08.391012Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:08.391028Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:08.391031Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:08.391075Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9131 TClient is connected to server localhost:9131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:08.464676Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:08.464715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:08.466822Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:08.473989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.478237Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:08.487395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.509557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.543488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.560532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 720575940466444 ... 057594046644480 waiting... 2024-11-21T17:51:11.416204Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791617961109204:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.416244Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.423280Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.479150Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.489067Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.503250Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.516588Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.523389Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.531460Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791617961109710:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.531499Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.531528Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791617961109715:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.532092Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:11.536313Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439791617961109717:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:11.778813Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.791422Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7439791617961110078:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:51:11.791486Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7439791617961110078:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:51:11.791535Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7439791617961110078:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:51:11.791556Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7439791617961110078:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:51:11.791578Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7439791617961110078:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:51:11.791599Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7439791617961110078:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:51:11.791616Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7439791617961110078:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:51:11.791637Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7439791617961110078:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:51:11.791658Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7439791617961110078:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:51:11.791676Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7439791617961110078:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:51:11.791696Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7439791617961110078:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:51:11.791714Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[5:7439791617961110078:2460];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:51:11.792755Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:51:11.792771Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:51:11.792789Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:51:11.792794Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:51:11.792907Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:51:11.792914Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:51:11.792924Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:51:11.792931Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:51:11.792944Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:51:11.792948Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:51:11.792955Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:51:11.792960Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:51:11.793031Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:51:11.793038Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:51:11.793065Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:51:11.793069Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:51:11.793081Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:51:11.793085Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:51:11.793106Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:51:11.793110Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:51:11.793122Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:51:11.793125Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=368;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=368;columns=4; 2024-11-21T17:51:11.902158Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211471893, txId: 18446744073709551615] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AsyncReplicationEndpointAndDatabase [GOOD] Test command err: Trying to start YDB, gRPC: 3219, MsgBus: 12080 2024-11-21T17:51:07.388352Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791601420734059:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:07.388369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f93/r3tmp/tmpxnfzmf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3219, node 1 2024-11-21T17:51:07.459030Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:07.479899Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:07.479913Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:07.479915Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:07.479949Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:07.488907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:07.488938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:07.489925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12080 TClient is connected to server localhost:12080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:51:07.541104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.543462Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:07.568758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.597159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.621580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.634209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.710385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791601420735586:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.710418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.741829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.751060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.767771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.778585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.843449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.860922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.889729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791601420736103:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.889759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.889872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791601420736108:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.890740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:07.894083Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:51:07.894192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791601420736110:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:08.128941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13161, MsgBus: 5745 2024-11-21T17:51:08.560957Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791607427938061:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:08.561154Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f93/r3tmp/tmpUR2ijM/pdisk_1.dat 2024-11-21T17:51:08.577933Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13161, node 2 2024-11-21T17:51:08.583512Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:08.583525Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:08.583527Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:08.583568Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5745 TClient is connected to server localhost:5745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:08.667415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:08.667447Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:08.667895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.669278Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:08.670572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:08.673928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.688855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.706959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.719038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.887284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791607427939600:2374], Da ... t proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.509275Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.696909Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791617517791803:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.696934Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.702020Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.759952Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.767059Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.774200Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.780891Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.788497Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.865768Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791617517792318:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.865799Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791617517792323:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.865818Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.866523Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:10.868214Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439791617517792325:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:11.024742Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.033281Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25616, MsgBus: 27709 2024-11-21T17:51:11.284928Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439791619188978955:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:11.284950Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f93/r3tmp/tmpR40ZxD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25616, node 5 2024-11-21T17:51:11.300323Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:11.302756Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:11.302773Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:11.302775Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:11.302808Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27709 TClient is connected to server localhost:27709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:11.385145Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:11.385183Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:11.386196Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:11.387452Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:11.388282Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:11.393171Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:11.404301Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:11.425301Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:11.441998Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:11.626929Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791619188980508:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.626954Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.633355Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.639988Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.653392Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.663632Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.678042Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.703634Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.717325Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791619188981012:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.717362Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.717381Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791619188981017:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.718254Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:11.720143Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439791619188981019:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:11.971924Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.983715Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::OlapSharding_KeyOnly [GOOD] Test command err: Trying to start YDB, gRPC: 27056, MsgBus: 24128 2024-11-21T17:51:07.514379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791602763304064:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:07.514432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f8d/r3tmp/tmpnDDLuV/pdisk_1.dat 2024-11-21T17:51:07.576397Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27056, node 1 2024-11-21T17:51:07.610806Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:07.610823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:07.610825Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:07.610867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:07.612219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:07.612271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:07.613362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24128 TClient is connected to server localhost:24128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:07.712909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.716547Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:07.725586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.753607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:07.778279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.800695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.902633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791602763305458:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.902661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.944970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.008906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.020424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.031771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.048555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.061599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.075645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791607058273270:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.075668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.075738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791607058273275:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.076650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:08.089297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791607058273277:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:51:08.312058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21246, MsgBus: 63141 2024-11-21T17:51:08.847518Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791605003712811:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:08.847570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f8d/r3tmp/tmpMn28IG/pdisk_1.dat 2024-11-21T17:51:08.866311Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21246, node 2 2024-11-21T17:51:08.876340Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:08.876350Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:08.876351Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:08.876379Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63141 TClient is connected to server localhost:63141 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:51:08.949014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:08.949047Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:08.950162Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:08.950438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.954780Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:08.965491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.989453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:09.011847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:09.024686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.177638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791609298681647:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access p ... 2024-11-21T17:51:10.149902Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:10.152204Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439791616175247894:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:10.363694Z node 3 :KQP_YQL ERROR: TraceId: 01jd7xjw0s74647s7qqa48v93a, SessionId: ydb://session/3?node_id=3&id=YWM0OWM1M2MtMzJiNmNlOGItZTM1YzkzYjktZDVjZGI1MTQ= 2024-11-21 17:51:10.363 ERROR ydb-core-kqp-ut-scheme(pid=741208, tid=0x00007FA02E5C7640) [common provider] yql_provider_gateway.cpp:28: Unknown permission name: Trying to start YDB, gRPC: 14068, MsgBus: 4133 2024-11-21T17:51:10.694216Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439791614573839224:2109];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:10.694339Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f8d/r3tmp/tmpc7pBiC/pdisk_1.dat 2024-11-21T17:51:10.709411Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14068, node 4 2024-11-21T17:51:10.716787Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:10.716802Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:10.716804Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:10.716848Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4133 TClient is connected to server localhost:4133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:10.794303Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:10.794333Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:10.795339Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:10.797614Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.805283Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.813796Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.832204Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.888424Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:11.002583Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791618868807999:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.002603Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.006551Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.012582Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.018756Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.026461Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.040272Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.047510Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.114442Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791618868808504:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.114474Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.114538Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791618868808509:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.115384Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:11.117299Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439791618868808511:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:11.300982Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24355, MsgBus: 7256 2024-11-21T17:51:11.599928Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439791620065239645:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:11.599962Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f8d/r3tmp/tmpy1cVnQ/pdisk_1.dat 2024-11-21T17:51:11.626850Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24355, node 5 2024-11-21T17:51:11.636673Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:11.636690Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:11.636692Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:11.636747Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7256 TClient is connected to server localhost:7256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:11.703531Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:11.703563Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:51:11.703979Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.704734Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:11.705674Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:11.970780Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791620065240244:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.970807Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::IndexedTableAndNotNullColumnAddNotNullColumn [GOOD] Test command err: Trying to start YDB, gRPC: 24050, MsgBus: 6953 2024-11-21T17:51:06.078409Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791599305744851:2104];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:06.078606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fb4/r3tmp/tmpbZ67O1/pdisk_1.dat 2024-11-21T17:51:06.132375Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24050, node 1 2024-11-21T17:51:06.175401Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:06.175415Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:06.175417Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:06.175448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:06.178702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:06.178729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:06.179660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6953 TClient is connected to server localhost:6953 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:06.252709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.256998Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:06.259098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:06.340768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.375337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.387458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.461158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791599305746341:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.461215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.507875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.567228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.632841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.662772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.677725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.691462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.713585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791599305746872:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.713611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.713714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791599305746877:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.714481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:06.716951Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T17:51:06.717032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791599305746879:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 3947, MsgBus: 13642 2024-11-21T17:51:07.229427Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791602053406693:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:07.229522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fb4/r3tmp/tmp8M0LB9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3947, node 2 2024-11-21T17:51:07.272979Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:07.272994Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:07.272997Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:07.273047Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:07.277081Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13642 TClient is connected to server localhost:13642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:07.328657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:07.328739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:07.332770Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:07.341239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.344783Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:07.352910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:07.362138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.383270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.396118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.633392Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791602053408112:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.633428Z node 2 :KQP_WORKLOAD_SERVICE W ... TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3970, MsgBus: 9669 2024-11-21T17:51:11.494941Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439791620513857033:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:11.495111Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fb4/r3tmp/tmprJek6p/pdisk_1.dat 2024-11-21T17:51:11.507627Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3970, node 6 2024-11-21T17:51:11.514178Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:11.514191Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:11.514194Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:11.514245Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9669 TClient is connected to server localhost:9669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:11.597864Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:11.597899Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:11.598356Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.599572Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:11.600814Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:11.614708Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:11.645072Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.671618Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:11.691545Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:11.932638Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791620513858586:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.932665Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.941129Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.950464Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.958010Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.971862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.978347Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.993675Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.013325Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791624808826384:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:12.013347Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:12.013452Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791624808826389:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:12.014057Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:12.020593Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791624808826391:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:12.191537Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.319418Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710757:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.336768Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.367938Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710762:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.385862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710765:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.423384Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710767:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.442885Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710770:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.477342Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710772:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.490787Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710775:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.521692Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710777:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.541048Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710780:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.571786Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710782:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.589038Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710785:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.622494Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710787:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.638615Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710790:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.679976Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710792:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.700776Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710795:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.757572Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710797:0, at schemeshard: 72057594046644480 2024-11-21T17:51:12.808486Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710800:0, at schemeshard: 72057594046644480 |82.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:10914:2156] recipient: [1:10713:2165] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:10914:2156] recipient: [1:10713:2165] Leader for TabletID 72057594037932033 is [1:10916:2167] sender: [1:10917:2156] recipient: [1:10713:2165] 2024-11-21T17:50:18.174902Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:18.175703Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:18.176079Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:18.176169Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T17:50:18.176367Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:18.176374Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T17:50:18.176426Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T17:50:18.177306Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T17:50:18.177332Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T17:50:18.177355Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T17:50:18.177371Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:18.177382Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T17:50:18.177391Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:10916:2167] sender: [1:10942:2156] recipient: [1:110:2157] 2024-11-21T17:50:18.188455Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T17:50:18.188529Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:18.198911Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T17:50:18.198970Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:18.198990Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T17:50:18.199000Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:18.199023Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T17:50:18.199030Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:18.199034Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T17:50:18.199043Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:18.209424Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T17:50:18.209506Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T17:50:18.209702Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T17:50:18.209709Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T17:50:18.209749Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T17:50:18.212054Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12097 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12098 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12099 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12100 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 800 PDiskFilter { Property { Type: ROT } } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 800 PDiskFilter { Property { Type: SSD } } } } Command { QueryBaseConfig { ... OTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2024-11-21T17:51:08.970420Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 293:1001 Path# /dev/disk1 2024-11-21T17:51:08.970425Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 295:1001 Path# /dev/disk1 2024-11-21T17:51:08.970430Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 292:1000 Path# /dev/disk2 2024-11-21T17:51:08.970435Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 293:1002 Path# /dev/disk2 2024-11-21T17:51:08.970440Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 292:1001 Path# /dev/disk1 2024-11-21T17:51:08.970445Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 291:1000 Path# /dev/disk3 2024-11-21T17:51:08.970450Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 290:1000 Path# /dev/disk3 2024-11-21T17:51:08.970455Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2024-11-21T17:51:08.970463Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2024-11-21T17:51:08.970468Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 290:1002 Path# /dev/disk1 2024-11-21T17:51:08.970472Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 289:1000 Path# /dev/disk3 2024-11-21T17:51:08.970477Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2024-11-21T17:51:08.970482Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 289:1001 Path# /dev/disk1 2024-11-21T17:51:08.970486Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 288:1000 Path# /dev/disk3 2024-11-21T17:51:08.970491Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 291:1002 Path# /dev/disk1 2024-11-21T17:51:08.970496Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2024-11-21T17:51:08.970501Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 289:1002 Path# /dev/disk2 2024-11-21T17:51:08.970505Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 288:1002 Path# /dev/disk1 2024-11-21T17:51:08.970510Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 287:1000 Path# /dev/disk3 2024-11-21T17:51:08.970515Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2024-11-21T17:51:08.970520Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2024-11-21T17:51:08.970527Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 287:1002 Path# /dev/disk1 2024-11-21T17:51:08.970532Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 286:1000 Path# /dev/disk3 2024-11-21T17:51:08.970536Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2024-11-21T17:51:08.970540Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 286:1001 Path# /dev/disk1 2024-11-21T17:51:08.970545Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 285:1000 Path# /dev/disk3 2024-11-21T17:51:08.970549Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2024-11-21T17:51:08.970554Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2024-11-21T17:51:08.970559Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 285:1002 Path# /dev/disk1 2024-11-21T17:51:08.970564Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 284:1000 Path# /dev/disk3 2024-11-21T17:51:08.970569Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2024-11-21T17:51:08.970574Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2024-11-21T17:51:08.970578Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 284:1002 Path# /dev/disk1 2024-11-21T17:51:08.970583Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 283:1000 Path# /dev/disk3 2024-11-21T17:51:08.970588Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2024-11-21T17:51:08.970593Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2024-11-21T17:51:08.970598Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 283:1002 Path# /dev/disk1 2024-11-21T17:51:08.970603Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 282:1000 Path# /dev/disk3 2024-11-21T17:51:08.970608Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 271:1000 Path# /dev/disk3 2024-11-21T17:51:08.970612Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2024-11-21T17:51:08.970619Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 282:1002 Path# /dev/disk1 2024-11-21T17:51:08.970623Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 281:1000 Path# /dev/disk3 2024-11-21T17:51:08.970628Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2024-11-21T17:51:08.970633Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2024-11-21T17:51:08.970638Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 281:1002 Path# /dev/disk1 2024-11-21T17:51:08.970642Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 271:1001 Path# /dev/disk2 2024-11-21T17:51:08.970646Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 280:1000 Path# /dev/disk3 2024-11-21T17:51:08.970651Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2024-11-21T17:51:08.970666Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2024-11-21T17:51:08.970672Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 280:1002 Path# /dev/disk1 2024-11-21T17:51:08.970676Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 279:1000 Path# /dev/disk3 2024-11-21T17:51:08.970681Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2024-11-21T17:51:08.970686Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 279:1002 Path# /dev/disk1 2024-11-21T17:51:08.970690Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 278:1000 Path# /dev/disk3 2024-11-21T17:51:08.970695Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2024-11-21T17:51:08.970699Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 274:1000 Path# /dev/disk1 2024-11-21T17:51:08.970704Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 277:1000 Path# /dev/disk3 2024-11-21T17:51:08.970709Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2024-11-21T17:51:08.970713Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 276:1000 Path# /dev/disk3 2024-11-21T17:51:08.970718Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 272:1000 Path# /dev/disk1 2024-11-21T17:51:08.970723Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 275:1000 Path# /dev/disk3 2024-11-21T17:51:08.970727Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 275:1001 Path# /dev/disk2 2024-11-21T17:51:08.970731Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 273:1000 Path# /dev/disk1 2024-11-21T17:51:08.970736Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 272:1001 Path# /dev/disk3 2024-11-21T17:51:08.970740Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 275:1002 Path# /dev/disk1 2024-11-21T17:51:08.970745Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 274:1001 Path# /dev/disk3 2024-11-21T17:51:08.970749Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 273:1001 Path# /dev/disk3 2024-11-21T17:51:08.970754Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 278:1002 Path# /dev/disk1 2024-11-21T17:51:08.970759Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 274:1002 Path# /dev/disk2 2024-11-21T17:51:08.970764Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2024-11-21T17:51:08.970768Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 277:1002 Path# /dev/disk1 2024-11-21T17:51:08.970773Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 273:1002 Path# /dev/disk2 2024-11-21T17:51:08.970785Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 276:1001 Path# /dev/disk1 2024-11-21T17:51:08.970790Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 286:1002 Path# /dev/disk2 2024-11-21T17:51:08.970795Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 271:1002 Path# /dev/disk1 2024-11-21T17:51:08.970802Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 272:1002 Path# /dev/disk2 2024-11-21T17:51:08.970807Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 276:1002 Path# /dev/disk2 2024-11-21T17:51:09.026785Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2024-11-21T17:51:09.043905Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } >> TPartitionTests::FailedTxsDontBlock >> KqpScheme::AddChangefeedNegative [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 >> Balancing::Balancing_ManyTopics_PQv1 >> TPQTest::TestReadSessions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::DropColumnAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 65533, MsgBus: 25265 2024-11-21T17:51:07.695573Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791601892474664:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f87/r3tmp/tmpH5Q0bT/pdisk_1.dat 2024-11-21T17:51:07.739650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:07.783803Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65533, node 1 2024-11-21T17:51:07.791284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:07.791309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:07.793202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:07.812468Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:07.812478Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:07.812481Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:07.812522Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25265 TClient is connected to server localhost:25265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:07.888507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.900391Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:51:08.068752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791606187442408:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.068777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.098059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.106504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791606187442483:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:51:08.106572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791606187442483:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:51:08.106615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791606187442483:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:51:08.106633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791606187442483:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:51:08.106651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791606187442483:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:51:08.106673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791606187442483:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:51:08.106689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791606187442483:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:51:08.106707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791606187442483:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:51:08.106727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791606187442483:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:51:08.106745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791606187442483:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:51:08.106760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791606187442483:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:51:08.106780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791606187442483:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:51:08.107726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:51:08.107740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:51:08.107752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:51:08.107760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:51:08.107777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:51:08.107781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:51:08.107790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:51:08.107796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:51:08.107805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:51:08.107809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:51:08.107814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:51:08.107818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:51:08.107876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:51:08.107882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:51:08.107900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:51:08.107903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:51:08.107920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:51:08.107924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:51:08.107941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:51:08.107945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:51:08.107959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 202 ... 44480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:10.824961Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:10.824991Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:10.826095Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:10.827353Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Uint64 NOT NULL, int_column Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:51:11.024855Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791621911061303:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.024889Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.116936Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.125806Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439791621911061350:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:51:11.125843Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439791621911061350:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:51:11.125914Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439791621911061350:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:51:11.126029Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439791621911061350:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:51:11.126057Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439791621911061350:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:51:11.126076Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439791621911061350:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:51:11.126097Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439791621911061350:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:51:11.126117Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439791621911061350:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:51:11.126138Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439791621911061350:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:51:11.126159Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439791621911061350:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:51:11.126179Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439791621911061350:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:51:11.126199Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439791621911061350:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:51:11.126773Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:51:11.126783Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:51:11.126796Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:51:11.126801Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:51:11.126820Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:51:11.126824Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:51:11.126836Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:51:11.126841Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:51:11.126851Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:51:11.126855Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:51:11.126861Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:51:11.126866Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:51:11.126920Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:51:11.126927Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:51:11.126947Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:51:11.126951Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:51:11.126962Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:51:11.126966Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:51:11.126984Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:51:11.126988Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:51:11.127000Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:51:11.127003Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:51:11.128585Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:51:11.174722Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1392;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1392;columns=2; 2024-11-21T17:51:11.187284Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791621911061440:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.187313Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.189591Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.192598Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T17:51:15.725695Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439791617616093563:2195];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:15.725727Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] Test command err: Size: 128 Create chunk: 0.000006s Read by index: 0.000005s Iterate: 0.000004s Size: 252 Create chunk: 0.000012s Read by index: 0.000005s Iterate: 0.000005s Size: 1887 Create chunk: 0.000012s Read by index: 0.000016s Iterate: 0.000006s Size: 1658 Create chunk: 0.000016s Read by index: 0.000016s Iterate: 0.000008s Size: 1889 Create chunk: 0.000012s Read by index: 0.000013s Iterate: 0.000005s Size: 1660 Create chunk: 0.000016s Read by index: 0.000016s Iterate: 0.000008s Size: 2407 Create chunk: 0.000026s Read by index: 0.000019s Iterate: 0.000013s Size: 2061 Create chunk: 0.000027s Read by index: 0.000042s Iterate: 0.000015s ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithStoreExternalBlobs [GOOD] Test command err: Trying to start YDB, gRPC: 19778, MsgBus: 26281 2024-11-21T17:51:07.282734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791604380103655:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:07.282754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f9d/r3tmp/tmpaHM8Q4/pdisk_1.dat 2024-11-21T17:51:07.342017Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19778, node 1 2024-11-21T17:51:07.364079Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:07.364095Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:07.364097Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:07.364135Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26281 2024-11-21T17:51:07.382951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:07.382985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:07.384297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:07.465556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.468532Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:07.482660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.552694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.580429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.592122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.684948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791604380105202:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.684975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.740832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.756085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.773343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.781689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.793014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.799323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.820123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791604380105707:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.820160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.820355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791604380105721:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.821247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:07.826889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791604380105724:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:51:08.116157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.140350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.144423Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found Trying to start YDB, gRPC: 15245, MsgBus: 16604 2024-11-21T17:51:08.381959Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791605332934387:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:08.383051Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f9d/r3tmp/tmp3H4IyQ/pdisk_1.dat 2024-11-21T17:51:08.400954Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15245, node 2 2024-11-21T17:51:08.411316Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:08.411332Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:08.411334Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:08.411372Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16604 TClient is connected to server localhost:16604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:51:08.481339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:08.481364Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:08.482093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:51:08.485356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.486774Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:08.488941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.503839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.525327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.589310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCr ... -21T17:51:12.551301Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791626081918168:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:12.753066Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18824, MsgBus: 12819 2024-11-21T17:51:13.134092Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439791627883269316:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:13.134155Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f9d/r3tmp/tmpG3jpKI/pdisk_1.dat 2024-11-21T17:51:13.142427Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18824, node 7 2024-11-21T17:51:13.151966Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:13.151987Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:13.151988Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:13.152020Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12819 TClient is connected to server localhost:12819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:13.234498Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:13.234526Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:13.235580Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:13.236847Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... --!syntax_v1 CREATE TABLE `/Root/TableWithTtlSettings` ( Key Uint64, Datetime64Column Datetime64, PRIMARY KEY (Key) ) WITH ( TTL = Interval("P1D") ON Datetime64Column ) 2024-11-21T17:51:13.407047Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791627883269916:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:13.407079Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:13.409668Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7587, MsgBus: 17924 2024-11-21T17:51:13.728534Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439791627460606823:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:13.728584Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f9d/r3tmp/tmp1aK3Ht/pdisk_1.dat 2024-11-21T17:51:13.740291Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7587, node 8 2024-11-21T17:51:13.755308Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:13.755323Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:13.755325Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:13.755376Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17924 TClient is connected to server localhost:17924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:13.828989Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:13.829029Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:13.830029Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:13.831834Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:13.841248Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:13.852516Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:13.873686Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:13.884044Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:14.003712Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439791631755575658:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:14.003729Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:14.006573Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:14.011973Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:14.022081Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:14.029016Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:14.035927Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:14.043048Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:14.051438Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439791631755576162:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:14.051463Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:14.051531Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439791631755576167:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:14.052264Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:14.056377Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439791631755576169:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:14.228998Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |82.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 4341, MsgBus: 14071 2024-11-21T17:51:07.878085Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791602747149655:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:07.878139Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f82/r3tmp/tmpdZhjHw/pdisk_1.dat 2024-11-21T17:51:07.961619Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4341, node 1 2024-11-21T17:51:07.977894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:07.977915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:07.978105Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:07.978107Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:07.978108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:07.978139Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:07.978750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14071 TClient is connected to server localhost:14071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:08.076192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.080600Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:08.090329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.116603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.149906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.161911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.251713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791607042118335:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.251745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.293586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.303173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.359389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.367016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.384376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.395449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.412381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791607042118852:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.412412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.412781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791607042118857:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:08.413695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:08.422670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791607042118859:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 20159, MsgBus: 2836 2024-11-21T17:51:08.786237Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791607928715692:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f82/r3tmp/tmpcGrTjM/pdisk_1.dat 2024-11-21T17:51:08.796391Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:08.827783Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20159, node 2 2024-11-21T17:51:08.836967Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:08.836980Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:08.836982Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:08.837022Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2836 TClient is connected to server localhost:2836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:08.892391Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:08.892417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:08.893713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:08.894791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.897573Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:08.899481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:08.912270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:08.983613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:08.999920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:09.155149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791612223684396:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.155176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.532915Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.563951Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.726619Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791616355500731:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.726648Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.733507Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.743370Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.755258Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.768042Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.781214Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.795765Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.811844Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791616355501235:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.811870Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.811891Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791616355501240:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.812624Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:10.815463Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439791616355501242:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:11.004862Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.006661Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1237, MsgBus: 29595 2024-11-21T17:51:11.295278Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439791619880101215:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:11.295448Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f82/r3tmp/tmps92gjh/pdisk_1.dat 2024-11-21T17:51:11.310542Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1237, node 5 2024-11-21T17:51:11.319830Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:11.319844Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:11.319845Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:11.319875Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29595 TClient is connected to server localhost:29595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:11.399224Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:11.399258Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:11.399664Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.400283Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:51:11.409446Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.429161Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:11.493024Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:11.504912Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:11.649028Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791619880102766:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.649140Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.651994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.675247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.705287Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.724773Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.738262Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.750864Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:11.772350Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791619880103280:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.772386Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.772609Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791619880103285:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:11.773579Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:11.776919Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T17:51:11.777019Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439791619880103287:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:12.111493Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDelete [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:51:05.377281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:05.377308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:05.377312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:05.377316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:05.377328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:05.377331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:05.377338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:05.377448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:05.388743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:05.388781Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:51:05.392177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:05.392365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:05.392404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:05.398834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:05.398941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:05.399064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:05.399364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:05.400187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:05.400460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:05.400471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:05.400483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:05.400490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:05.400495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:05.400534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:51:05.404171Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:51:05.421401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:05.421512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.421590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:05.421659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:05.421669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.422664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:05.422717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:05.422787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.422799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:05.422804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:05.422810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:05.423400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.423413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:05.423418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:05.423733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.423743Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.423749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:05.423756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:05.424407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:05.424771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:05.424818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:05.425001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:05.425028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:05.425042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:05.425102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:05.425108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:05.425134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:05.425146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:05.425500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:05.425509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:05.425548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:05.425552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:05.425627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.425633Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:05.425643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:05.425647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:05.425652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:05.425657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:05.425661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:05.425665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:05.425675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:05.425680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:05.425684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... erId: 72057594046678944, LocalPathId: 3] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 1002 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:51:21.335535Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:51:21.335548Z node 63 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 130 2024-11-21T17:51:21.335565Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:51:21.335569Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:51:21.335660Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T17:51:21.335826Z node 63 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:21.335830Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:51:21.335842Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:51:21.335855Z node 63 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:21.335857Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [63:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2024-11-21T17:51:21.335860Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [63:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T17:51:21.335886Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:51:21.335892Z node 63 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1002:0 ProgressState 2024-11-21T17:51:21.335898Z node 63 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:51:21.335900Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:51:21.335903Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T17:51:21.335905Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:51:21.335908Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:51:21.335910Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:51:21.335925Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:51:21.335928Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 2, subscribers: 1 2024-11-21T17:51:21.335930Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2024-11-21T17:51:21.335932Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:51:21.335995Z node 63 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:51:21.336001Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:51:21.336004Z node 63 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:51:21.336006Z node 63 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T17:51:21.336008Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:51:21.336065Z node 63 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:51:21.336070Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:51:21.336072Z node 63 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:51:21.336075Z node 63 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:51:21.336077Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:51:21.336081Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T17:51:21.336084Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [63:424:2391] 2024-11-21T17:51:21.336305Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:51:21.336314Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:51:21.336502Z node 63 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:51:21.336665Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:21.336699Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:51:21.336753Z node 63 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:51:21.336776Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 Forgetting tablet 72075186233409547 2024-11-21T17:51:21.337002Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:51:21.337029Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:51:21.337082Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:21.337085Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:51:21.337096Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:51:21.337121Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:21.337124Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:51:21.337130Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:51:21.337328Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:51:21.337354Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:51:21.337358Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [63:431:2398] 2024-11-21T17:51:21.337410Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:51:21.337414Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:51:21.337645Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:51:21.337650Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:51:21.337669Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:51:21.337675Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:51:21.337714Z node 63 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:51:21.337719Z node 63 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T17:51:21.337758Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:21.337777Z node 63 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 24us result status StatusPathDoesNotExist 2024-11-21T17:51:21.337796Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:02.708269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:02.708293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:02.708296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:02.708299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:02.708304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:02.708306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:02.708319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:02.708390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:02.716349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:02.716371Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:02.718229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:02.718305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:02.718340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:02.720715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:02.720781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:02.720882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:02.721069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:02.721718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:02.722023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:02.722035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:02.722046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:02.722050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:02.722054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:02.722095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:02.723223Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:02.739394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:02.739466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.739525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:02.739565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:02.739573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.740350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:02.740376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:02.740422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.740434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:02.740438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:02.740442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:02.740840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.740851Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:02.740855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:02.741220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.741229Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.741235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:02.741241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:02.741693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:02.742121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:02.742172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:02.742346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:02.742370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:02.742377Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:02.742430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:02.742436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:02.742461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:02.742472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:02.742846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:02.742857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:02.742895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:02.742902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:02.742979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.742986Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:02.742997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:02.743001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:02.743006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:02.743011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:02.743015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:02.743018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:02.743030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:02.743035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:02.743039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... t schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 2024-11-21T17:51:19.899903Z node 260 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:51:19.899912Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:51:19.899914Z node 260 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:51:19.899917Z node 260 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2024-11-21T17:51:19.899920Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T17:51:19.899984Z node 260 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:51:19.899989Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:51:19.899991Z node 260 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:51:19.899993Z node 260 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 11 2024-11-21T17:51:19.899995Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:51:19.900000Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T17:51:19.900157Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2024-11-21T17:51:19.900173Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:51:19.900293Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T17:51:19.900298Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T17:51:19.900304Z node 260 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000011 2024-11-21T17:51:19.900356Z node 260 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000011, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:19.900368Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 1116691499113 } } Step: 5000011 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:19.900373Z node 260 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000011, at schemeshard: 72057594046678944 2024-11-21T17:51:19.900386Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T17:51:19.900391Z node 260 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2024-11-21T17:51:19.900393Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T17:51:19.900399Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:51:19.900404Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:51:19.900407Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2024-11-21T17:51:19.900411Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T17:51:19.900413Z node 260 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2024-11-21T17:51:19.900415Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2024-11-21T17:51:19.900420Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:51:19.900423Z node 260 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2024-11-21T17:51:19.900425Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2024-11-21T17:51:19.900427Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T17:51:19.900510Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:51:19.900692Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:51:19.900844Z node 260 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:19.900848Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:19.900865Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:51:19.900878Z node 260 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:19.900883Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [260:203:2206], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2024-11-21T17:51:19.900885Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [260:203:2206], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710763 2024-11-21T17:51:19.900974Z node 260 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:51:19.900981Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:51:19.900984Z node 260 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:51:19.900986Z node 260 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T17:51:19.900989Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T17:51:19.901031Z node 260 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:51:19.901035Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:51:19.901037Z node 260 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:51:19.901039Z node 260 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T17:51:19.901041Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:51:19.901047Z node 260 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2024-11-21T17:51:19.901050Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [260:120:2146] 2024-11-21T17:51:19.901083Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:19.901086Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:51:19.901091Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:51:19.901346Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:51:19.901499Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:51:19.901509Z node 260 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2024-11-21T17:51:19.901515Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2024-11-21T17:51:19.901552Z node 260 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1004 2024-11-21T17:51:19.901743Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:51:19.901748Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:51:19.901789Z node 260 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:51:19.901799Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:51:19.901802Z node 260 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [260:974:2913] TestWaitNotification: OK eventTxId 1004 |82.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::TooManyImmediateTxs |82.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |82.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |82.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |82.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |82.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> TPartitionTests::TooManyImmediateTxs [GOOD] |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithPgColumn [GOOD] Test command err: Trying to start YDB, gRPC: 65342, MsgBus: 5869 2024-11-21T17:51:05.727608Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791592942323658:2244];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:05.727654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00107b/r3tmp/tmpIgWnOw/pdisk_1.dat 2024-11-21T17:51:05.776474Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65342, node 1 2024-11-21T17:51:05.795972Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:05.795988Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:05.795990Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:05.796027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5869 2024-11-21T17:51:05.827544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:05.827566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:05.828669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:05.862167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.864859Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.873087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:05.890536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.907434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.917593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.084166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791597237292297:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.084200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.117283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.126693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.138634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.156461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.170249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.184336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.201145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791597237292804:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.201176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.201294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791597237292809:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.202338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:06.214381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791597237292811:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:06.455984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28771, MsgBus: 22620 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00107b/r3tmp/tmpBvvjAt/pdisk_1.dat 2024-11-21T17:51:06.647030Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:06.647554Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28771, node 2 2024-11-21T17:51:06.664420Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:06.664434Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:06.664436Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:06.664468Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22620 2024-11-21T17:51:06.740451Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:06.740479Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:22620 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:51:06.744569Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:06.760963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.762680Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:06.773450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.789474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.812204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.841455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.984308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791600184217794:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.984332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource p ... opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.228563Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.241798Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.464075Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791609722164094:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.464129Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.464170Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791609722164099:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.464918Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:09.467263Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439791609722164101:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:09.633381Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31324, MsgBus: 18740 2024-11-21T17:51:10.100054Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439791614870053740:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:10.100074Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00107b/r3tmp/tmp2Ou4aw/pdisk_1.dat 2024-11-21T17:51:10.110227Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31324, node 5 2024-11-21T17:51:10.120571Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:10.120587Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:10.120590Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:10.120633Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18740 TClient is connected to server localhost:18740 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:10.200521Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:10.200556Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:10.201717Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:10.202470Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.207545Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:10.213726Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.228045Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.289499Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.300888Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.396625Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791614870055292:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.396654Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.403381Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.457414Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.465848Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.522733Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.529737Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.545589Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.613401Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791614870055812:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.613424Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.613528Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791614870055817:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.614221Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:10.620742Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439791614870055819:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:10.850310Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.861767Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.872186Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.885124Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.899057Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.911713Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.925978Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.940528Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.954357Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.965071Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.976149Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> KqpScanSpilling::SpillingPragmaParseError >> KqpScanSpilling::HandleErrorsCorrectly >> KqpUserConstraint::KqpReadNull-UploadNull >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling >> TPartitionTests::FailedTxsDontBlock [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable [GOOD] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] >> TopicAutoscaling::ControlPlane_CDC [GOOD] >> KqpScanSpilling::SelfJoin >> TPartitionTests::WriteSubDomainOutOfSpace >> TPQTestInternal::TestToHex [GOOD] |82.6%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> TSchemeShardServerLess::StorageBilling [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> TPQUtilsTest::TLastCounter [GOOD] |82.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> KqpScanSpilling::SpillingPragmaParseError [GOOD] >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK >> TPartitionTests::TestNonConflictingActsBatchOk >> TPartitionTests::GetUsedStorage >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] >> KqpScanSpilling::SelfJoin [GOOD] >> TPartitionTests::GetUsedStorage [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] >> TQuotaTracker::TestSmallMessages [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> TPartitionTests::TestTxBatchInFederation |82.7%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |82.7%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |82.7%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] >> TQuotaTracker::TestBigMessages [GOOD] >> TSourceIdTests::ExpensiveCleanup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:08.787091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:08.787119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:08.787124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:08.787131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:08.787149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:08.787153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:08.787163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:08.787262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:08.798028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:08.798050Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:08.803005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:08.803845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:08.803883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:08.806045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:08.806333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:08.806436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:08.806543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:08.809222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:08.809610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:08.809626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:08.809669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:08.809677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:08.809684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:08.809703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:08.811255Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:08.827620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:08.827705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:08.827774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:08.827854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:08.827864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:08.828975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:08.829020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:08.829071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:08.829082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:08.829087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:08.829092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:08.829630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:08.829644Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:08.829650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:08.830082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:08.830094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:08.830100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:08.830107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:08.830700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:08.831085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:08.831135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:08.831328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:08.831354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:08.831363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:08.831417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:08.831424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:08.831456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:08.831470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:08.831889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:08.831899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:08.831943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:08.831948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:08.832058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:08.832066Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:08.832077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:08.832081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:08.832087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:08.832092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:08.832097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:08.832101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:08.832113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:08.832119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:08.832123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:08.832441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:08.832458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:08.832464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:08.832469Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:08.832474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:08.832489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... LAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:09.307696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:09.307724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:51:09.307745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:09.307750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 1 2024-11-21T17:51:09.307754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T17:51:09.307764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:51:09.307770Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 104:0 ProgressState 2024-11-21T17:51:09.307779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T17:51:09.307783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:51:09.307789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T17:51:09.307793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:51:09.307798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:51:09.307801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:51:09.307837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:51:09.307843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2024-11-21T17:51:09.307847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:51:09.307850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:51:09.308051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:51:09.308067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:51:09.308072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:51:09.308076Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:51:09.308080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:09.308156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:51:09.308163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:51:09.308167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:51:09.308170Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:51:09.308173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:51:09.308181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2024-11-21T17:51:09.309675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:51:09.309688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:51:09.309855Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:51:09.310203Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:51:09.310322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:09.310401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:51:09.310469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 Forgetting tablet 72075186233409547 2024-11-21T17:51:09.310771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:51:09.310819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:51:09.311001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:09.311009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:51:09.311034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:51:09.311065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:09.311070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:51:09.311081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:09.311256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:51:09.311345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:51:09.311353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:51:09.311711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:51:09.311721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:51:09.311761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:51:09.311770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T17:51:09.311846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T17:51:09.311852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T17:51:09.311932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:51:09.311950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:51:09.311954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2100:3705] TestWaitNotification: OK eventTxId 104 2024-11-21T17:51:09.313094Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:09.313137Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 59us result status StatusPathDoesNotExist 2024-11-21T17:51:09.313184Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:51:09.313264Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:09.313277Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 13us result status StatusPathDoesNotExist 2024-11-21T17:51:09.313291Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingPragmaParseError [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6fwh/001ced/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk5 Trying to start YDB, gRPC: 22336, MsgBus: 27393 2024-11-21T17:51:26.313347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791683361461727:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:26.313633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ced/r3tmp/tmp2kiDo9/pdisk_1.dat 2024-11-21T17:51:26.416824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:26.416854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:26.417101Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:26.419997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22336, node 1 2024-11-21T17:51:26.428586Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:26.428597Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:26.428602Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:26.428630Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27393 TClient is connected to server localhost:27393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:26.494852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.516773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.530032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.544636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.553616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.698758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791683361463274:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.698783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.720585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.727111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.733934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.788275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.843014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.852965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.861556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791683361463793:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.861574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.861587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791683361463798:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.862085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:26.866200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791683361463800:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:27.033307Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791687656431387:2458], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:3:40: Error: Bad "EnableSpillingNodes" setting for "$all" cluster: (yexception) tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp:70: Key 'GraceJoin1' not found in enum NYql::NDq::EEnabledSpillingNodes. Valid options are: 'None', 'GraceJoin', 'Aggregation', 'All'. 2024-11-21T17:51:27.033437Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2EyMjljZWYtMTZlMzg3MDEtYTUwNjFmNzItZDM4ODQxMDc=, ActorId: [1:7439791687656431380:2454], ActorState: ExecuteState, TraceId: 01jd7xkc9n53qcydswwr59b8mq, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: |82.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TSourceIdTests::ExpensiveCleanup [GOOD] >> TPartitionTests::TestBatchingWithChangeConfig >> TPartitionTests::TestTxBatchInFederation [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoin [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6fwh/001cf4/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk1 Trying to start YDB, gRPC: 24686, MsgBus: 21594 2024-11-21T17:51:26.429428Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791686055083097:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:26.429558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001cf4/r3tmp/tmpAxtDZK/pdisk_1.dat 2024-11-21T17:51:26.475687Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24686, node 1 2024-11-21T17:51:26.485791Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:26.485805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:26.485807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:26.485859Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21594 TClient is connected to server localhost:21594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:26.527333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.531002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:26.531020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:26.532130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:26.539884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.554840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.573053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.582478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.689977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791686055084646:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.690010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.723674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.729081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.740806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.794803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.849525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.860019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.867766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791686055085163:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.867782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791686055085168:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.867789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.868216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:26.873227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791686055085170:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:51:27.112949Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=1;ch_limit=50;inputs=0;input_channels_count=0; 2024-11-21T17:51:27.112997Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2024-11-21T17:51:27.113012Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=2;input_channels_count=2; 2024-11-21T17:51:27.113015Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052947:2513], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Start compute actor [1:7439791690350052947:2513], task: 1 2024-11-21T17:51:27.113022Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052947:2513], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Set periodic stats 0.100000s 2024-11-21T17:51:27.113027Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052947:2513], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. EVLOGKQP START 2024-11-21T17:51:27.113028Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2024-11-21T17:51:27.113172Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:45 :META:Table { TableId { OwnerId: 72057594046644480 TableId: 6 } TablePath: "/Root/KeyValue" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Columns { Id: 1 Name: "Key" Type: 4 } Columns { Id: 2 Name: "Value" Type: 4097 } KeyColumnTypes: 4 Reads { ShardId: 72075186224037911 KeyRanges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } } ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC EnableShardsSequentialScan: true KeyColumnTypeInfos { } ReadType: ROWS 2024-11-21T17:51:27.113188Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052948:2514], TxId: 281474976710682, task: 2. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Start compute actor [1:7439791690350052948:2514], task: 2 2024-11-21T17:51:27.113192Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052948:2514], TxId: 281474976710682, task: 2. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Set periodic stats 0.100000s 2024-11-21T17:51:27.113222Z node 1 :KQP_COMPUTE DEBUG: Register LocalFileSpillingActor [1:7439791690350052956:3595] at service [1:7597699455116079460:27756] 2024-11-21T17:51:27.113233Z node 1 :KQP_COMPUTE DEBUG: Register LocalFileSpillingActor [1:7439791690350052957:3596] at service [1:7597699455116079460:27756] 2024-11-21T17:51:27.113238Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052950:2515], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Start compute actor [1:7439791690350052950:2515], task: 3 2024-11-21T17:51:27.113247Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052950:2515], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Set periodic stats 0.100000s 2024-11-21T17:51:27.113308Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052948:2514], ... /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116451Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052950:2515], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646927 2024-11-21T17:51:27.116456Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052950:2515], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116459Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052950:2515], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116460Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052950:2515], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T17:51:27.116463Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [10] 2024-11-21T17:51:27.116464Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [10] 2024-11-21T17:51:27.116465Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, waiting for chunk delivery in output channelId: 4, seqNo: [11] 2024-11-21T17:51:27.116467Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646923 2024-11-21T17:51:27.116468Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Finish input channelId: 4, from: [1:7439791690350052950:2515] 2024-11-21T17:51:27.116469Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116471Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052950:2515], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646927 2024-11-21T17:51:27.116472Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052950:2515], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116472Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052950:2515], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T17:51:27.116474Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [10] 2024-11-21T17:51:27.116476Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [10] 2024-11-21T17:51:27.116476Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished 2024-11-21T17:51:27.116477Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052950:2515], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2024-11-21T17:51:27.116484Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. pass away 2024-11-21T17:51:27.116492Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710682;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:51:27.116498Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116505Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116515Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116531Z node 1 :KQP_COMPUTE DEBUG: [CloseFile] from: [1:7439791690350052960:3598], error: (empty maybe) 2024-11-21T17:51:27.116583Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116602Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116607Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116661Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116669Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116673Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116674Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T17:51:27.116678Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 4, seqNo: [11] 2024-11-21T17:51:27.116680Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished, waiting for chunk delivery in output channelId: 5, seqNo: [11] 2024-11-21T17:51:27.116726Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T17:51:27.116733Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T17:51:27.116735Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 4, seqNo: [11] 2024-11-21T17:51:27.116735Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished 2024-11-21T17:51:27.116736Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791690350052951:2516], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jd7xkcbh0nnn0xf3f0xgx3hp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQ4ZTlmNmItNmUwYjE1MGQtNTNlODA5MGUtNmZlNTUxMTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2024-11-21T17:51:27.116741Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. pass away 2024-11-21T17:51:27.116745Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710682;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:51:27.116857Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211487160, txId: 281474976710681] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: 2024-11-21T17:51:26.647675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:26.648051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:26.648068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00176b/r3tmp/tmpe6lFf0/pdisk_1.dat 2024-11-21T17:51:26.745187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.764473Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:26.807177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:26.807217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:26.817760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:26.921633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:27.180516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:822:2675], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:27.180542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:832:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:27.180549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:27.181221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:51:27.347945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:836:2683], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:51:27.422125Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7xkceca00kgd9k4dqm91ry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzI5NTc0OTYtMmY0ODIwNWUtZjA4MmFiYzQtOWFkYmRkMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |82.7%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::RootWithStoragePoolsAndTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:51:03.882413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:03.882440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:03.882446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:03.882451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:03.882469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:03.882474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:03.882484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:03.882567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:03.892925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:03.892950Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:51:03.895458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:03.895569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:03.895603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:03.898141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:03.898215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:03.898355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:03.898554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:03.899254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:03.899549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:03.899561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:03.899585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:03.899592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:03.899598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:03.899647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:51:03.901048Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:51:03.915516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:03.915610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:03.915689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:03.915756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:03.915764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:03.916673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:03.916717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:03.916783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:03.916794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:03.916798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:03.916803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:03.920566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:03.920587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:03.920594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:03.924644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:03.924676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:03.924685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:03.924694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:03.925462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:03.926086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:03.926151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:03.926374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:03.926402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:03.926423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:03.926482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:03.926490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:03.926527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:03.926540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:03.927140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:03.927153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:03.927203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:03.927208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:03.927315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:03.927323Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:03.927337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:03.927342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:03.927348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:03.927353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:03.927360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:03.927364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:03.927376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:03.927382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:03.927386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:51:22.306283Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:51:22.306286Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:51:22.306288Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:51:22.306290Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:51:22.306297Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:51:22.306726Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 193 } } 2024-11-21T17:51:22.306743Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:51:22.306759Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 193 } } 2024-11-21T17:51:22.306768Z node 72 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 193 } } 2024-11-21T17:51:22.306929Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 309237647651 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:51:22.306934Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:51:22.306943Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 309237647651 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:51:22.306947Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:51:22.306951Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 351 RawX2: 309237647651 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:51:22.306973Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:22.306976Z node 72 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:51:22.306979Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:51:22.306982Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T17:51:22.307753Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:51:22.307782Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:51:22.307965Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:51:22.307981Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:51:22.308038Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:51:22.308047Z node 72 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:51:22.308056Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:51:22.308059Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:51:22.308062Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:51:22.308066Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:51:22.308069Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:51:22.308073Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:51:22.308091Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:51:22.308453Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:51:22.308458Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:51:22.308498Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:51:22.308509Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:51:22.308512Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [72:426:2401] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:51:22.308556Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:22.308584Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 37us result status StatusSuccess 2024-11-21T17:51:22.308656Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:22.308695Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:22.308711Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table_0" took 17us result status StatusSuccess 2024-11-21T17:51:22.308760Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:02.774132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:02.774151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:02.774154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:02.774158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:02.774162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:02.774164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:02.774195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:02.774276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:02.783492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:02.783516Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:02.785434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:02.785513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:02.785535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:02.787726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:02.787797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:02.787894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:02.788051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:02.788750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:02.789029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:02.789039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:02.789050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:02.789057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:02.789062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:02.789097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:02.790338Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:02.803960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:02.804043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.804105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:02.804141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:02.804149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.804868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:02.804897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:02.804948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.804961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:02.804966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:02.804971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:02.805351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.805360Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:02.805362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:02.805755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.805771Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.805778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:02.805784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:02.806374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:02.806862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:02.806922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:02.807105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:02.807124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:02.807129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:02.807179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:02.807185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:02.807211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:02.807219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:02.807566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:02.807574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:02.807611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:02.807618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:02.807699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:02.807706Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:02.807718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:02.807722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:02.807727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:02.807732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:02.807737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:02.807740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:02.807751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:02.807757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:02.807760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 6678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:27.048868Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:27.048871Z node 204 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:51:27.048875Z node 204 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T17:51:27.048879Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:51:27.048889Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:51:27.048955Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:27.048960Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:51:27.048967Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:51:27.049092Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2024-11-21T17:51:27.049108Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:51:27.049184Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2024-11-21T17:51:27.049188Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2024-11-21T17:51:27.049192Z node 204 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000008 2024-11-21T17:51:27.049271Z node 204 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:27.049286Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 876173330536 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:27.049294Z node 204 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000008, at schemeshard: 72057594046678944 2024-11-21T17:51:27.049308Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2024-11-21T17:51:27.049315Z node 204 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T17:51:27.049318Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:51:27.049326Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:51:27.049332Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:51:27.049336Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2024-11-21T17:51:27.049341Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2024-11-21T17:51:27.049345Z node 204 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T17:51:27.049348Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2024-11-21T17:51:27.049354Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:51:27.049358Z node 204 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2024-11-21T17:51:27.049361Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T17:51:27.049365Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T17:51:27.049635Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:27.049688Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:51:27.049694Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:51:27.049722Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:51:27.049727Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:51:27.049738Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:27.049932Z node 204 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:51:27.050145Z node 204 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:27.050151Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:27.050172Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:51:27.050189Z node 204 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:27.050196Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [204:203:2206], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2024-11-21T17:51:27.050200Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [204:203:2206], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710761 2024-11-21T17:51:27.050304Z node 204 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:27.050312Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:27.050316Z node 204 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:51:27.050320Z node 204 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:51:27.050323Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:51:27.050387Z node 204 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:27.050393Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:27.050396Z node 204 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2024-11-21T17:51:27.050400Z node 204 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:51:27.050403Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:51:27.050410Z node 204 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2024-11-21T17:51:27.050414Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [204:122:2148] 2024-11-21T17:51:27.050452Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:27.050455Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:51:27.050462Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:51:27.050750Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:27.050935Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2024-11-21T17:51:27.050948Z node 204 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T17:51:27.050955Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2024-11-21T17:51:27.050988Z node 204 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T17:51:27.051229Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:51:27.051235Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:51:27.051283Z node 204 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:51:27.051294Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:51:27.051298Z node 204 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [204:858:2793] TestWaitNotification: OK eventTxId 1003 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] Test command err: 2024-11-21T17:50:57.811496Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791561508601757:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:57.811769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:50:57.836461Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e1c/r3tmp/tmpVk46lO/pdisk_1.dat 2024-11-21T17:50:57.863683Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26875, node 1 2024-11-21T17:50:57.873059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e1c/r3tmp/yandexh6b9z5.tmp 2024-11-21T17:50:57.873073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e1c/r3tmp/yandexh6b9z5.tmp 2024-11-21T17:50:57.873132Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e1c/r3tmp/yandexh6b9z5.tmp 2024-11-21T17:50:57.873177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:50:57.877218Z INFO: TTestServer started on Port 3326 GrpcPort 26875 TClient is connected to server localhost:3326 PQClient connected to localhost:26875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:57.912571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:57.912598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:57.913651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:57.942204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:57.944443Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:57.959622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:50:58.144139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791565803569792:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.144166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.144271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791565803569819:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.144952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:50:58.145952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791565803569847:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.145976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.146586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791565803569821:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:50:58.170799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.178737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.195992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439791565803570179:2599] 2024-11-21T17:51:02.811713Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791561508601757:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:02.811744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:03.391914Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T17:51:03.395050Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:51:03.395465Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439791587278406956:2764], Recipient [1:7439791561508602191:2204]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.395481Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.395483Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T17:51:03.395490Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439791587278406952:2761], Recipient [1:7439791561508602191:2204]: {TEvModifySchemeTransaction txid# 281474976715674 TabletId# 72057594046644480} 2024-11-21T17:51:03.395491Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:51:03.400872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976715674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:03.400966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.401027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T17:51:03.401075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T17:51:03.401087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T17:51:03.401095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T17:51:03.401102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T17:51:03.401127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715674:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:03.401263Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:51:03.401282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2024-11-21T17:51:03.401292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2024-11-21T17:51:03.401481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715674, response: Status: StatusAccepted TxId: 281474976715674 SchemeshardId: 72057594046644480 PathId: 13, at schemeshard: 72057594046644480 2024-11-21T17:51:03.401510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715674, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/test-topic 2024-11-21T17:51:03.401522Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644 ... 30279:2464]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T17:51:23.407766Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T17:51:23.407849Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][origin] Send TEvPeriodicTopicStats PathId: 14 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2024-11-21T17:51:23.407899Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][origin] ProcessPendingStats. PendingUpdates size 0 2024-11-21T17:51:23.407942Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [4:7439791665984430069:2437], Recipient [4:7439791640214625173:2149]: NKikimrPQ.TEvPeriodicTopicStats PathId: 14 Generation: 1 Round: 1 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2024-11-21T17:51:23.407951Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2024-11-21T17:51:23.407955Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 14] DataSize 0 UsedReserveSize 0 2024-11-21T17:51:23.407960Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099999s, queue# 1 2024-11-21T17:51:23.407969Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [4:7439791665984430069:2437], Recipient [4:7439791640214625173:2149]: NKikimrSchemeOp.TDescribePath PathId: 14 SchemeshardId: 72057594046644480 2024-11-21T17:51:23.407975Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:51:23.409960Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [4:7439791665984430072:2438], Partition 0, Sender [4:7439791665984430137:2443], Recipient [4:7439791665984430133:2441], Cookie: 0 2024-11-21T17:51:23.409977Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [4:7439791665984430137:2443], Recipient [4:7439791665984430133:2441]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:23.409979Z node 4 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:23.413541Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [4:7439791665984430072:2438], Partition 0, Sender [0:0:0], Recipient [4:7439791665984430133:2441], Cookie: 0 2024-11-21T17:51:23.413551Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [4:7439791665984430133:2441]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:23.413560Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:23.413566Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:23.413577Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:23.413583Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:23.413586Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:23.418528Z node 4 :PQ_READ_PROXY DEBUG: new Describe topic request 2024-11-21T17:51:23.418561Z node 4 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "/Root/dir/origin" 2024-11-21T17:51:23.418599Z node 4 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/dir/origin 2024-11-21T17:51:23.421753Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [4:7439791665984430199:2456], Partition 2, Sender [4:7439791665984430289:2469], Recipient [4:7439791665984430279:2464], Cookie: 0 2024-11-21T17:51:23.421765Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [4:7439791665984430289:2469], Recipient [4:7439791665984430279:2464]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:23.421767Z node 4 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:23.421771Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [4:7439791665984430207:2457], Partition 1, Sender [4:7439791665984430288:2468], Recipient [4:7439791665984430282:2466], Cookie: 0 2024-11-21T17:51:23.421773Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [4:7439791665984430288:2468], Recipient [4:7439791665984430282:2466]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:23.421774Z node 4 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:23.424815Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [4:7439791665984430207:2457], Partition 1, Sender [0:0:0], Recipient [4:7439791665984430282:2466], Cookie: 0 2024-11-21T17:51:23.424822Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [4:7439791665984430199:2456], Partition 2, Sender [0:0:0], Recipient [4:7439791665984430279:2464], Cookie: 0 2024-11-21T17:51:23.424829Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [4:7439791665984430279:2464]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:23.424831Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:23.424834Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [4:7439791665984430282:2466]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:23.424837Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:23.424837Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:23.424843Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:23.424847Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:23.424849Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:23.424851Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:23.424852Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:23.424854Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:23.424857Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:23.508310Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [4:7439791640214625173:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2024-11-21T17:51:23.508328Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2024-11-21T17:51:23.508330Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2024-11-21T17:51:23.508332Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T17:51:23.508350Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2024-11-21T17:51:23.508773Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [4:7439791640214625173:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2024-11-21T17:51:23.508779Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2024-11-21T17:51:23.508780Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2024-11-21T17:51:23.513922Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [4:7439791665984430072:2438], Partition 0, Sender [0:0:0], Recipient [4:7439791665984430133:2441], Cookie: 0 2024-11-21T17:51:23.513943Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [4:7439791665984430133:2441]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:23.513947Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:23.513956Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:23.513975Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:23.513993Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:23.513997Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:23.525176Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [4:7439791665984430207:2457], Partition 1, Sender [0:0:0], Recipient [4:7439791665984430282:2466], Cookie: 0 2024-11-21T17:51:23.525182Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [4:7439791665984430199:2456], Partition 2, Sender [0:0:0], Recipient [4:7439791665984430279:2464], Cookie: 0 2024-11-21T17:51:23.525191Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [4:7439791665984430279:2464]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:23.525191Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [4:7439791665984430282:2466]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:23.525193Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:23.525194Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:23.525200Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:23.525201Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:23.525211Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:23.525213Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:23.525214Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:23.525216Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:23.525216Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:23.525220Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:50:54.995902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:54.997461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:54.997474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:54.997479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:54.997483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:54.997485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:54.997492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:54.999575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:55.027333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:55.027350Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:55.036180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:55.036742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:55.036775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:55.046882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:55.059230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:55.059334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.087969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:55.102254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.155971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:55.155980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:55.155985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:55.155995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.162950Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:50:55.184893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:55.203589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:55.219150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:55.219171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.219969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:55.220018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:55.220032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:55.220037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:55.220398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:55.220412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:55.221343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.221352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.221357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.221370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.221996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:55.222353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:55.222394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:55.222557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:55.222579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:55.222596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.222650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:55.222657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:55.222680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:55.222692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:55.223052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:55.223058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:55.223093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:55.223098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:50:55.231209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:55.231224Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:55.231235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:55.231250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.231256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:55.231259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:55.231263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:55.231265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:55.231278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:55.231282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:55.231284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:50:55.231572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:55.231580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:50:55.231583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:50:55.231586Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:50:55.231589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:55.231601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... EMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet72075186233409549 2024-11-21T17:51:29.467565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:51:29.467569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2024-11-21T17:51:29.467578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet72075186233409549 2024-11-21T17:51:29.467606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2024-11-21T17:51:29.467623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2024-11-21T17:51:29.467631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2024-11-21T17:51:29.467804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2024-11-21T17:51:29.467833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2024-11-21T17:51:29.468054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409549 2024-11-21T17:51:29.468059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 1] 2024-11-21T17:51:29.468091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 2] 2024-11-21T17:51:29.468124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409549 2024-11-21T17:51:29.468129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:658:2573], at schemeshard: 72075186233409549, txId: 107, path id: 1 2024-11-21T17:51:29.468133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:658:2573], at schemeshard: 72075186233409549, txId: 107, path id: 2 2024-11-21T17:51:29.468144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2024-11-21T17:51:29.468150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2024-11-21T17:51:29.468164Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2024-11-21T17:51:29.468168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2024-11-21T17:51:29.468171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2024-11-21T17:51:29.468371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2024-11-21T17:51:29.468381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2024-11-21T17:51:29.468384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2024-11-21T17:51:29.468388Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 9 2024-11-21T17:51:29.468391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2024-11-21T17:51:29.468472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2024-11-21T17:51:29.468479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2024-11-21T17:51:29.468481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2024-11-21T17:51:29.468483Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:51:29.468485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 4 2024-11-21T17:51:29.468490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2024-11-21T17:51:29.468830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2024-11-21T17:51:29.468837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2024-11-21T17:51:29.468917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2024-11-21T17:51:29.468935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2024-11-21T17:51:29.468937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T17:51:29.468940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2024-11-21T17:51:29.468950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:804:2683] message: TxId: 107 2024-11-21T17:51:29.468953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T17:51:29.468956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T17:51:29.468959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T17:51:29.468973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 2 2024-11-21T17:51:29.469235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2024-11-21T17:51:29.469276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2024-11-21T17:51:29.469446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T17:51:29.469451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:2190:4040] TestWaitNotification: OK eventTxId 107 2024-11-21T17:51:29.481047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 766 RawX2: 4294969954 } TabletId: 72075186233409552 State: 4 2024-11-21T17:51:29.487553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2024-11-21T17:51:29.488016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2024-11-21T17:51:29.488123Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409552 2024-11-21T17:51:29.488726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2024-11-21T17:51:29.488780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 1 2024-11-21T17:51:29.489111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2024-11-21T17:51:29.489118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 2], at schemeshard: 72075186233409549 2024-11-21T17:51:29.489129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2024-11-21T17:51:29.489470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409549:4 2024-11-21T17:51:29.489477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2024-11-21T17:51:29.489644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2024-11-21T17:51:29.603711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2024-11-21T17:51:29.603749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2024-11-21T17:51:29.603765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2024-11-21T17:51:29.603779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2024-11-21T17:51:29.603785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2024-11-21T17:51:29.603793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2024-11-21T17:51:29.603802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2024-11-21T17:51:29.603808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T17:51:29.603816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T17:51:29.655772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:29.655885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":1600452180,"quantity":59,"finish":1600452239,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":0},"id":"72057594046678944-3-1600452180-1600452239-0","cloud_id":"CLOUD_ID_VAL","source_wt":1600452240,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 2020-09-18T18:04:00.026000Z, LastBillTime: 2020-09-18T18:02:00.000000Z, lastBilled: 2020-09-18T18:02:00.000000Z--2020-09-18T18:02:59.000000Z, toBill: 2020-09-18T18:03:00.000000Z--2020-09-18T18:03:59.000000Z, next retry at: 2020-09-18T18:05:00.000000Z 2024-11-21T17:51:29.656654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete grabMeteringMessage has happened 2024-11-21T17:51:29.656701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TFakeMetering got TEvMetering::TEvWriteMeteringJson ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::TouchIndexAfterMoveIndexRead [GOOD] Test command err: Trying to start YDB, gRPC: 24128, MsgBus: 18865 2024-11-21T17:51:05.734460Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791595702003878:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:05.776470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fc0/r3tmp/tmp7SNgJc/pdisk_1.dat 2024-11-21T17:51:05.796140Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24128, node 1 2024-11-21T17:51:05.828396Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:05.828408Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:05.828410Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:05.828445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18865 TClient is connected to server localhost:18865 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:51:05.878335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:05.878354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:05.879265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:05.890412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.892385Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:05.900173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.967609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.994286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.011733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.072753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791599996972564:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.072781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.138422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.146460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.156740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.170648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.182646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.199345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.222513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791599996973079:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.222534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.222595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791599996973084:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.223285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:06.230683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791599996973086:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:51:06.492426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13990, MsgBus: 8372 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fc0/r3tmp/tmpq9aTDi/pdisk_1.dat 2024-11-21T17:51:06.842086Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:06.843608Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 13990, node 2 2024-11-21T17:51:06.852467Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:06.852482Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:06.852484Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:06.852528Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8372 TClient is connected to server localhost:8372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:06.916719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.919813Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:06.922570Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:06.922604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:06.923679Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:06.936100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.954737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.977265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.043294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.161439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791602726345754:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.161484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource po ... loadService] [TPoolFetcherActor] ActorId: [4:7439791610361142278:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.361816Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.364616Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.371271Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.384107Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.399862Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.419041Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.431209Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.447866Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791610361142772:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.447899Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.447955Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791610361142777:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.448844Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:09.457838Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439791610361142779:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:09.744295Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:51:09.769194Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.832669Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:1, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 31170, MsgBus: 9126 2024-11-21T17:51:10.081775Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439791614904478268:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:10.081985Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fc0/r3tmp/tmpT4VsBY/pdisk_1.dat 2024-11-21T17:51:10.092735Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31170, node 5 2024-11-21T17:51:10.104081Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:10.104098Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:10.104100Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:10.104142Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9126 TClient is connected to server localhost:9126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:10.182474Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:10.182505Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:10.183561Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:10.184901Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.185729Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.193435Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.209064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.228469Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:10.244503Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.413277Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791614904479827:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.413314Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.417568Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.424005Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.431141Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.437835Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.445203Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.452342Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.460374Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791614904480318:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.460400Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.460438Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791614904480323:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.461032Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:10.465435Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439791614904480325:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:10.677668Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:51:10.693228Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.736581Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AddChangefeedNegative [GOOD] Test command err: Trying to start YDB, gRPC: 3580, MsgBus: 27723 2024-11-21T17:51:06.543319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791599084055047:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fa9/r3tmp/tmpMxawkh/pdisk_1.dat 2024-11-21T17:51:06.578438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:06.617895Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:06.640079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:06.640112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3580, node 1 2024-11-21T17:51:06.641519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:06.660726Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:06.660751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:06.660752Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:06.660788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27723 TClient is connected to server localhost:27723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:06.740894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.748674Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int64 NOT NULL, timestamp Timestamp NOT NULL, ui64_type Uint64 NOT NULL, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:51:06.893563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791599084055499:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.893588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.926060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.935015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791599084055576:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:51:06.935101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791599084055576:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:51:06.935152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791599084055576:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:51:06.935179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791599084055576:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:51:06.935203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791599084055576:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:51:06.935242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791599084055576:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:51:06.935265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791599084055576:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:51:06.935293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791599084055576:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:51:06.935318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791599084055576:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:51:06.935341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791599084055576:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:51:06.935364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791599084055576:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:51:06.935388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439791599084055576:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:51:06.936743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:51:06.936762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:51:06.936775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:51:06.936780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:51:06.936798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:51:06.936807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:51:06.936820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:51:06.936836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:51:06.936845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:51:06.936850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:51:06.936856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:51:06.936860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:51:06.936926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:51:06.936932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:51:06.936948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:51:06.936953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:51:06.936963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:51:06.936968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:51:06.936985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:51:06.936989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:51:06.937001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Re ... 51:19.372312Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:19.380883Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:19.396932Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:19.405857Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:19.492733Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439791653264915376:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:19.492753Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:19.497434Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:19.503739Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:19.510095Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:19.516623Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:19.523546Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:19.530964Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:19.539394Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439791653264915866:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:19.539424Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:19.539425Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439791653264915871:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:19.539826Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:19.543940Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7439791653264915873:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:19.666717Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14514, MsgBus: 2987 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fa9/r3tmp/tmpAOPBhF/pdisk_1.dat 2024-11-21T17:51:19.941703Z node 17 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:19.941778Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 14514, node 17 2024-11-21T17:51:19.949794Z node 17 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:19.949805Z node 17 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:19.949806Z node 17 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:19.949838Z node 17 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2987 TClient is connected to server localhost:2987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:20.035078Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:20.035110Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:20.036098Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:20.036371Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:20.043663Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:20.050850Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:20.064692Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:20.077991Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:20.193923Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:7439791660618704066:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:20.193947Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:20.199749Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:20.205784Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:20.217543Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:20.223677Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:20.231066Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:20.238449Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:20.246621Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:7439791660618704571:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:20.246641Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:20.246645Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:7439791660618704576:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:20.247184Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:20.251268Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [17:7439791660618704578:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:20.432002Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:20.447354Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] Test command err: 2024-11-21T17:51:02.598008Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791579556645446:2070];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:02.598080Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001211/r3tmp/tmph0FU6K/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2101, node 1 2024-11-21T17:51:02.696696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:02.696722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:02.697188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:51:02.697205Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:02.698352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:02.738196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:02.738213Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:02.738215Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:02.738280Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:02.768220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:02.768971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:02.768986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:02.769872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:02.769963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:02.769969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:51:02.773026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:02.773044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:02.773436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:02.774615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:02.775840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211462821, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:02.775857Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:02.775934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:02.776476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:02.776537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:02.776555Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:02.776574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:02.776588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:02.776607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:51:02.777354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:02.777374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:02.777379Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:02.777411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:51:02.858778Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:47450) has now valid token of root@builtin 2024-11-21T17:51:02.871535Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2024-11-21T17:51:03.443260Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439791585591020377:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:03.443299Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001211/r3tmp/tmp31Mps2/pdisk_1.dat 2024-11-21T17:51:03.465171Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13410, node 4 2024-11-21T17:51:03.485116Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:03.485136Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:03.485138Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:03.485181Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:03.543693Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:03.543738Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:03.545376Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:03.547731Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.547862Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:03.547869Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.548392Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:03.548441Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:03.548445Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:51:03.548973Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:03.548982Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:03.549413Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.550301Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211463598, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:03.550312Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:03.550378Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 waiting... 2024-11-21T17:51:03.550876Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:03.550930Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:03.550942Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:03.550956Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:03.550965Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:03.550978Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication ... =timeout;self_id=[25:7439791616012110073:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:15.187347Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E1121 17:51:20.315787387 756915 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:20.320599881 746761 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:20.326307147 746761 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:20.330121105 746761 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:20.335427074 746761 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:20.339417384 756919 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:20.344902167 746761 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:20.348766143 756919 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:20.355103426 756965 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:20.359793405 756965 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:20.366088747 756965 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:20.370709312 746761 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2024-11-21T17:51:20.880777Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7439791660006824470:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:20.881009Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001211/r3tmp/tmpMAzkHT/pdisk_1.dat 2024-11-21T17:51:20.892096Z node 28 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15646, node 28 2024-11-21T17:51:20.903873Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:20.903884Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:20.903887Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:20.903911Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:20.981295Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:20.981327Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:20.982906Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:20.984253Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:20.984363Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:20.984372Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:20.984747Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:20.984801Z node 28 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:20.984809Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:51:20.985114Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:20.985123Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:20.985310Z node 28 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:20.985434Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:20.986142Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211481035, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:20.986154Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:20.986209Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:20.986516Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:20.986557Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:20.986569Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:20.986586Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:20.986598Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:20.986611Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:51:20.986748Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:20.986758Z node 28 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:20.986761Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:20.986770Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:51:25.880945Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7439791660006824470:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:25.880981Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E1121 17:51:31.000905662 757578 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:31.005867143 757578 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:31.012177071 757578 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:31.016657052 757578 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:31.023063776 757578 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:31.027819106 757578 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:31.033405020 762275 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:31.037457799 757578 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:31.042770156 757578 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:31.047204570 757573 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:31.053655332 757572 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E1121 17:51:31.058091578 757573 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. >> Balancing::Balancing_ManyTopics_PQv1 [GOOD] >> TPersQueueMirrorer::TestBasicRemote >> TReplicationTests::CreateSequential >> TReplicationTests::Create >> KqpScanSpilling::SelfJoinQueryService >> KqpUserConstraint::KqpReadNull+UploadNull ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithDecimalColumn [GOOD] Test command err: Trying to start YDB, gRPC: 64024, MsgBus: 12075 2024-11-21T17:51:05.628359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791592634456892:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:05.628528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fc6/r3tmp/tmpg1yW9V/pdisk_1.dat 2024-11-21T17:51:05.676373Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64024, node 1 2024-11-21T17:51:05.688524Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:05.688545Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:05.688547Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:05.688582Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12075 TClient is connected to server localhost:12075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:51:05.728873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:05.728905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:05.734365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:05.755181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.758022Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:05.819865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.850132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.886244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.897777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.943080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791592634458420:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:05.943149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:05.977244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:05.985610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:05.999351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.007246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.023698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.036124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.053806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791596929426219:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.053835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.053917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791596929426224:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.054699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:06.070226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791596929426226:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:06.248413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16077, MsgBus: 22166 2024-11-21T17:51:06.553862Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791596672388459:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:06.554078Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fc6/r3tmp/tmpGv16KG/pdisk_1.dat 2024-11-21T17:51:06.564578Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16077, node 2 2024-11-21T17:51:06.572554Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:06.572579Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:06.572581Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:06.572619Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22166 TClient is connected to server localhost:22166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:06.654107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:06.654142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:06.655264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:06.656484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.660754Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:06.669712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.729465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.775794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.789574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.960784Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791596672390009:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access p ... ype: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.065922Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791613004821318:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.065963Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.068270Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.078547Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.088048Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.103915Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.116625Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.130023Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.148171Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791613004821824:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.148206Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.148337Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791613004821831:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:09.149161Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:09.159705Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439791613004821833:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:09.580305Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:09.600811Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15844, MsgBus: 30841 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fc6/r3tmp/tmpahvMGR/pdisk_1.dat 2024-11-21T17:51:09.772685Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:09.772984Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15844, node 5 2024-11-21T17:51:09.783401Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:09.783413Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:09.783415Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:09.783447Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30841 TClient is connected to server localhost:30841 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:09.863235Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:09.863262Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:09.864328Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:09.865534Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:09.869745Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:09.879081Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:09.894943Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:09.907530Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:10.028619Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791616553443029:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.028638Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.033922Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.039464Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.045964Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.053219Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.059773Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.067076Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.076280Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791616553443533:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.076307Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.076351Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439791616553443538:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:10.076976Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:10.080066Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439791616553443540:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:10.261548Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.273480Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.284194Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.297346Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:10.311221Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ControlPlane_CDC [GOOD] Test command err: 2024-11-21T17:50:58.848826Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791562029402502:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:58.848841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001df7/r3tmp/tmpVtKfTD/pdisk_1.dat 2024-11-21T17:50:58.891261Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:58.906864Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10929, node 1 2024-11-21T17:50:58.924655Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001df7/r3tmp/yandexSI99j3.tmp 2024-11-21T17:50:58.924670Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001df7/r3tmp/yandexSI99j3.tmp 2024-11-21T17:50:58.924727Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001df7/r3tmp/yandexSI99j3.tmp 2024-11-21T17:50:58.924788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:50:58.929804Z INFO: TTestServer started on Port 19409 GrpcPort 10929 TClient is connected to server localhost:19409 PQClient connected to localhost:10929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:50:58.950423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:58.950451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:58.951482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:58.986279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:50:58.992895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:50:59.121403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791566324370560:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:59.121425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791566324370552:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:59.121441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:59.122093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:50:59.122169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791566324370595:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:59.122191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:59.123563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791566324370566:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:50:59.151237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:59.158114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:59.174497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439791566324370890:2584] 2024-11-21T17:51:03.849107Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791562029402502:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:03.849139Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:04.433896Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T17:51:04.449051Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:51:04.452617Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439791587799207699:2759], Recipient [1:7439791562029402895:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:04.452636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:04.452639Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T17:51:04.452660Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439791587799207695:2756], Recipient [1:7439791562029402895:2187]: {TEvModifySchemeTransaction txid# 281474976715674 TabletId# 72057594046644480} 2024-11-21T17:51:04.452662Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:51:04.467465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "autoscalit-topic" TotalGroupCount: 5 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 5 MaxPartitionCount: 10 ScaleThresholdSeconds: 500 ScaleUpPartitionWriteSpeedThresholdPercent: 80 ScaleDownPartitionWriteSpeedThresholdPercent: 20 PartitionStrategyType: CAN_SPLIT } } } } TxId: 281474976715674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:04.467680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/autoscalit-topic, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:04.467755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: autoscalit-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T17:51:04.467771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T17:51:04.467777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T17:51:04.467784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T17:51:04.467787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T17:51:04.467789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2024-11-21T17:51:04.467794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 5 2024-11-21T17:51:04.467798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 6 2024-11-21T17:51:04.467809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 7 2024-11-21T17:51:04.467840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715674:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:04.468061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:51:04.468072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2024-11-21T17:51:04.468080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 8 2024-11-21T17:51:04.468338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715674, response: Status: StatusAccepted Tx ... 34/Root/origin/feed/streamImpl\322\001\000\332\001\000\342\001\000\352\001\005/Root\372\001\010\010\000\030\0010\0018\000\220\002\001\232\002\014\010\003\020k\030\003 \007(\0050\001\242\002\020\010\000\030\0010\0018\205\200\204\200\200\200\204\200\001j\000r\022\t\213v\330\330\032s?g\021W\010\000\000\005\000\000\000\202\001\034\n\032\010\000\022\026\n\022\00072075186224037892\020\000" } CmdWrite { Key: "_txinfo" Value: "\020\236\351\346\376\2642\030\233\247\200\200\200\200@(\240\215\0060\236\351\346\376\26428\233\247\200\200\200\200@" } CmdWrite { Key: "_config" Value: "\022\024\010\377\377\377\377\007\030\200\243\0058\200\200@@\200\200@h\001\030\000 2024-11-21T17:51:27.853391Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T17:51:27.853394Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:27.853396Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715675, path id: [OwnerId: 72057594046644480, LocalPathId: 15] 2024-11-21T17:51:27.853419Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:27.853425Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:7439791668583757609:2222], at schemeshard: 72057594046644480, txId: 281474976715675, path id: 15 2024-11-21T17:51:27.853428Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T17:51:27.853430Z node 5 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715675:0 ProgressState 2024-11-21T17:51:27.853434Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:51:27.853439Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715675:0 progress is 1/1 2024-11-21T17:51:27.853441Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715675 ready parts: 1/1 2024-11-21T17:51:27.853440Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [5:7439791690058595310:2446], Recipient [5:7439791690058595146:2446]: NKikimr::TEvKeyValue::TEvIntermediate 2024-11-21T17:51:27.853443Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715675, ready parts: 1/1, is published: false 2024-11-21T17:51:27.853445Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715675 ready parts: 1/1 2024-11-21T17:51:27.853447Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715675:0 2024-11-21T17:51:27.853448Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715675:0 2024-11-21T17:51:27.853467Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 5 2024-11-21T17:51:27.853473Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715675, publications: 1, subscribers: 1 2024-11-21T17:51:27.853474Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715675, [OwnerId: 72057594046644480, LocalPathId: 15], 3 2024-11-21T17:51:27.853520Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [5:7439791668583757609:2222], Recipient [5:7439791668583757451:2135]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 15] Version: 3 } 2024-11-21T17:51:27.853526Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T17:51:27.853532Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715675 2024-11-21T17:51:27.853542Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715675 2024-11-21T17:51:27.853547Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715675 2024-11-21T17:51:27.853549Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715675, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], version: 3 2024-11-21T17:51:27.853550Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 4 2024-11-21T17:51:27.853560Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715675, subscribers: 1 2024-11-21T17:51:27.853563Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [5:7439791690058595146:2446], Recipient [5:7439791690058595146:2446]: NKikimr::TEvKeyValue::TEvCollect 2024-11-21T17:51:27.853565Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [5:7439791690058595274:2458] 2024-11-21T17:51:27.853567Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:51:27.853578Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [5:7439791690058595146:2446], Recipient [5:7439791690058595146:2446]: NKikimrClient.TResponse Status: 1 Cookie: 5 WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } 2024-11-21T17:51:27.853584Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2024-11-21T17:51:27.853585Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:51:27.853587Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715675, State EXECUTED 2024-11-21T17:51:27.853588Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TPersQueue::SendEvReadSetAckToSenders 2024-11-21T17:51:27.853588Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T17:51:27.853589Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715675, NewState WAIT_RS_ACKS 2024-11-21T17:51:27.853590Z node 5 :PERSQUEUE DEBUG: [TxId: 281474976715675] PredicateAcks: 0/0 2024-11-21T17:51:27.853591Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2024-11-21T17:51:27.853592Z node 5 :PERSQUEUE DEBUG: [TxId: 281474976715675] PredicateAcks: 0/0 2024-11-21T17:51:27.853594Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] add an TxId 281474976715675 to the list for deletion 2024-11-21T17:51:27.853596Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715675, NewState DELETING 2024-11-21T17:51:27.853598Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete key for TxId 281474976715675 2024-11-21T17:51:27.853601Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T17:51:27.853602Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715675 2024-11-21T17:51:27.853603Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T17:51:27.853605Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7439791690058595313:2463], Recipient [5:7439791690058595146:2446]: NKikimr::TEvKeyValue::TEvCompleteGC 2024-11-21T17:51:27.853609Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [5:7439791690058595274:2458] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715675 at schemeshard: 72057594046644480 2024-11-21T17:51:27.853629Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794752, Sender [5:7439791690058595146:2446], Recipient [5:7439791690058595146:2446]: NKikimrClient.TKeyValueRequest Cookie: 5 CmdDeleteRange { Range { From: "tx_00000281474976715675" IncludeFrom: true To: "tx_00000281474976715675" IncludeTo: true } } CmdWrite { Key: "_txinfo" Value: "\020\236\351\346\376\2642\030\233\247\200\200\200\200@(\240\215\0060\236\351\346\376\26428\233\247\200\200\200\200@" } 2024-11-21T17:51:27.853660Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [5:7439791690058595316:2446], Recipient [5:7439791690058595146:2446]: NKikimr::TEvKeyValue::TEvIntermediate 2024-11-21T17:51:27.853707Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [5:7439791690058595146:2446], Recipient [5:7439791690058595146:2446]: NKikimrClient.TResponse Status: 1 Cookie: 5 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2024-11-21T17:51:27.853712Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2024-11-21T17:51:27.853713Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:51:27.853714Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715675, State DELETING 2024-11-21T17:51:27.853715Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] empty tx queue 2024-11-21T17:51:27.853716Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976715675 2024-11-21T17:51:27.853724Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [5:7439791690058595146:2446], Recipient [5:7439791690058595146:2446]: NKikimr::TEvKeyValue::TEvCollect 2024-11-21T17:51:27.853746Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7439791690058595318:2464], Recipient [5:7439791690058595146:2446]: NKikimr::TEvKeyValue::TEvCompleteGC 2024-11-21T17:51:27.853771Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [5:7439791690058595283:2902], Recipient [5:7439791668583757451:2135]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:27.853778Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:27.853779Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T17:51:27.854372Z node 5 :PQ_READ_PROXY DEBUG: new Describe topic request 2024-11-21T17:51:27.854391Z node 5 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "/Root/origin/feed" 2024-11-21T17:51:27.854417Z node 5 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/origin/feed 2024-11-21T17:51:27.946401Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791690058595146:2446], Partition 0, Sender [0:0:0], Recipient [5:7439791690058595211:2450], Cookie: 0 2024-11-21T17:51:27.946427Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791690058595211:2450]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:27.946431Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:27.946443Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:27.946467Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:27.946469Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:27.946474Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2024-11-21T17:51:09.581225Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:09.581245Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:51:09.586448Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001}. Step TInitConfigStep 2024-11-21T17:51:09.586508Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001}. Step TInitInternalFieldsStep 2024-11-21T17:51:09.586557Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [1:176:2191] 2024-11-21T17:51:09.586714Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001}. Completed. 2024-11-21T17:51:09.586722Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [1:176:2191] 2024-11-21T17:51:09.586731Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:09.586792Z node 1 :PERSQUEUE INFO: new Cookie owner1|8a56094e-cc8f59ff-415ac648-8420131e_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2024-11-21T17:51:09.586815Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:51:09.586886Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2024-11-21T17:51:09.586921Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2024-11-21T17:51:09.586970Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:51:09.617608Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T17:51:09.617674Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2024-11-21T17:51:09.617719Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR 2024-11-21T17:51:09.886855Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2024-11-21T17:51:09.886926Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 101 PartNo 0 PackedSize 118 count 1 nextOffset 102 batches 1 2024-11-21T17:51:09.886991Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 101,1 HeadOffset 100 endOffset 101 curOffset 102 D0000100001_00000000000000000101_00000_0000000001_00000| size 104 WTime 1129 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:09.927888Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T17:51:09.927971Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2024-11-21T17:51:09.928003Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 4, partNo: 0, Offset: 101 is stored on disk Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX 2024-11-21T17:51:10.117007Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 6 partNo 0 2024-11-21T17:51:10.117050Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 6 partNo 0 FormedBlobsCount 0 NewHead: Offset 102 PartNo 0 PackedSize 118 count 1 nextOffset 103 batches 1 2024-11-21T17:51:10.117098Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 102,1 HeadOffset 100 endOffset 102 curOffset 103 D0000100001_00000000000000000102_00000_0000000001_00000| size 104 WTime 2130 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:10.157857Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T17:51:10.157892Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2024-11-21T17:51:10.157907Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 6, partNo: 0, Offset: 102 is stored on disk Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX 2024-11-21T17:51:10.374976Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 7 partNo 0 2024-11-21T17:51:10.375023Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob sourceId 'SourceId' seqNo 7 partNo 0 result is X0000100001_00000000000000000100_00000_0000000003_00000 size 312 2024-11-21T17:51:10.375034Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} old key X0000100001_00000000000000000100_00000_0000000003_00000 new key D0000100001_00000000000000000100_00000_0000000003_00000 size 312 WTime 3231 2024-11-21T17:51:10.375056Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 7 partNo 0 FormedBlobsCount 1 NewHead: Offset 110 PartNo 0 PackedSize 118 count 1 nextOffset 111 batches 1 2024-11-21T17:51:10.375098Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 110,1 HeadOffset 100 endOffset 103 curOffset 111 D0000100001_00000000000000000110_00000_0000000001_00000| size 104 WTime 3231 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:10.406694Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T17:51:10.406731Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2024-11-21T17:51:10.406749Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 7, partNo: 0, Offset: 110 is stored on disk Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured ... 4ac5db-de2cdfae-229a6d90-70c27e43_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 2024-11-21T17:51:23.995229Z node 4 :PERSQUEUE INFO: new Cookie src2|94059aa3-9ece7602-fc0d717e-5801b516_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src2 Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create distr tx with id = 3 and act no: 4 Create immediate tx with id = 5 and act no: 6 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 6 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Send disk status response with cookie: 0 Got batch complete: 2 Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 5 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 5 Got batch complete: 10 Send disk status response with cookie: 0 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 10 and act no: 11 Create distr tx with id = 12 and act no: 13 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 3 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Send disk status response with cookie: 0 Wait immediate tx complete 10 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "MinSeqNo violation failure on src2" } Wait tx committed for tx 12 2024-11-21T17:51:28.051024Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:28.051053Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:28.054234Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [5:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> TReplicationTests::Create [GOOD] >> TReplicationTests::CreateDropRecreate >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestTxBatchInFederation [GOOD] Test command err: 2024-11-21T17:51:26.033461Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:26.033480Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:51:26.035929Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:26.036095Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\320\017\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Create distr tx with id = 0 and act no: 1 Got batch complete: 1001 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\320\017\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000000" Value: "\010\001\020\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\350\007\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\350\003\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\320\017\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\352\007\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\352\003\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2024-11-21T17:51:26.280185Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:26.280206Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:26.283458Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:26.283772Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [2:177:2192] 2024-11-21T17:51:26.283832Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2024-11-21T17:51:26.283852Z node 2 :PERSQUEUE INFO: new Cookie owner1|eace9edc-d16c38b6-5f40cbec-bae82128_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:51:26.522616Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:26.522641Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:51:26.525945Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:26.526021Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T17:51:26.526065Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:177:2192] 2024-11-21T17:51:26.526252Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:26.526276Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitMetaStep 2024-11-21T17:51:26.526291Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitInfoRangeStep 2024-11-21T17:51:26.526365Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitDataRangeStep 2024-11-21T17:51:26.526398Z node 3 :PERSQUEUE DEBUG: Got data topic Root/PQ/rt3.dc1--account--topic partition 1 offset 0 count 10 size 0 so 0 eo 10 d0000000001_00000000000000000000_00000_0000000010_00000 2024-11-21T17:51:26.526408Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitDataStep 2024-11-21T17:51:26.526412Z node 3 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Completed. 2024-11-21T17:51:26.526418Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:177:2192] 2024-11-21T17:51:26.526426Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 10 Head Offset 10 PartNo 0 PackedSize 0 count 0 nextOffset 10 batches 0 SYNC INIT DATA KEY: d0000000001_00000000000000000000_00000_0000000010_00000 size 0 2024-11-21T17:51:26.526477Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:51:26.526482Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 send read request for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T17:51:26.526506Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2024-11-21T17:51:26.526526Z node 3 :PERSQUEUE INFO: new Cookie owner1|41466749-2190cdec-62e3871-6e24cae1_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:51:26.526578Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 offset 3 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 3 2024-11-21T17:51:26.526591Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] read cookie 0 added 1 blobs, size 0 count 7 last offset 4 2024-11-21T17:51:26.526600Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Reading cookie 0. Send blob request. Send disk status response with cookie: 0 2024-11-21T17:51:26.526639Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:51:26.526659Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2024-11-21T17:51:26.526705Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 1 2024-11-21T17:51:26.526717Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2024-11-21T17:51:26.526732Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2024-11-21T17:51:26.526755Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2024-11-21T17:51:26.526801Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 10 endOffset 10 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:51:26.557360Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T17:51:26.557397Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2024-11-21T17:51:26.557424Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:51:26.842290Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2024-11-21T17:51:26.862611Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2024-11-21T17:51:26.862667Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2024-11-21T17:51:26.862714Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2024-11-21T17:51:26.862759Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob sourceId 'SourceId' seqNo 1 partNo 0 result is x0000000001_00000000000000000100_00000_0000000001_00000 size 104 2024-11-21T17:51 ... tured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Create distr tx with id = 8 and act no: 9 Create distr tx with id = 10 and act no: 11 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 17 Wait batch completion Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Wait kv request Wait tx committed for tx 0 Wait immediate tx complete 3 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 6 Wait tx committed for tx 10 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2024-11-21T17:51:26.322992Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:26.323017Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:26.377250Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitConfigStep 2024-11-21T17:51:26.377344Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T17:51:26.377461Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:177:2192] 2024-11-21T17:51:26.377647Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:26.377680Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitMetaStep 2024-11-21T17:51:26.377696Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitInfoRangeStep 2024-11-21T17:51:26.377840Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitDataRangeStep 2024-11-21T17:51:26.377882Z node 1 :PERSQUEUE DEBUG: Got data topic Root/PQ/rt3.dc1--account--topic partition 1 offset 0 count 10 size 0 so 0 eo 10 d0000000001_00000000000000000000_00000_0000000010_00000 2024-11-21T17:51:26.377892Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Step TInitDataStep 2024-11-21T17:51:26.377896Z node 1 :PERSQUEUE DEBUG: Initializing topic 'Root/PQ/rt3.dc1--account--topic' partition 1. Completed. 2024-11-21T17:51:26.377902Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:177:2192] 2024-11-21T17:51:26.377910Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 10 Head Offset 10 PartNo 0 PackedSize 0 count 0 nextOffset 10 batches 0 SYNC INIT DATA KEY: d0000000001_00000000000000000000_00000_0000000010_00000 size 0 2024-11-21T17:51:26.377967Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:51:26.377973Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 send read request for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T17:51:26.378018Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2024-11-21T17:51:26.378044Z node 1 :PERSQUEUE INFO: new Cookie owner1|61030e2-f783796a-53bed272-1ba5914e_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:51:26.378103Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 offset 3 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 3 2024-11-21T17:51:26.378115Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] read cookie 0 added 1 blobs, size 0 count 7 last offset 4 2024-11-21T17:51:26.378124Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Reading cookie 0. Send blob request. Send disk status response with cookie: 0 2024-11-21T17:51:26.378162Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:51:26.378195Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2024-11-21T17:51:26.378251Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 1 2024-11-21T17:51:26.378261Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2024-11-21T17:51:26.378274Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2024-11-21T17:51:26.378304Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2024-11-21T17:51:26.378344Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 10 endOffset 10 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:51:26.408970Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T17:51:26.409006Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2024-11-21T17:51:26.409040Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:51:26.704562Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2024-11-21T17:51:26.724851Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2024-11-21T17:51:26.724890Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2024-11-21T17:51:26.724922Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2024-11-21T17:51:26.724951Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob sourceId 'SourceId' seqNo 1 partNo 0 result is x0000000001_00000000000000000100_00000_0000000001_00000 size 104 2024-11-21T17:51:26.724973Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 old key x0000000001_00000000000000000100_00000_0000000001_00000 new key d0000000001_00000000000000000100_00000_0000000001_00000 size 104 WTime 1329 2024-11-21T17:51:26.724998Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 1 partNo 0 FormedBlobsCount 1 NewHead: Offset 200 PartNo 0 PackedSize 118 count 1 nextOffset 201 batches 1 2024-11-21T17:51:26.725029Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 200,1 HeadOffset 100 endOffset 101 curOffset 201 d0000000001_00000000000000000200_00000_0000000001_00000| size 105 WTime 1329 2024-11-21T17:51:26.745332Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 2024-11-21T17:51:26.745361Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2024-11-21T17:51:26.745375Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 1, partNo: 0, Offset: 200 is stored on disk processed_blobs=41800 quoted_time=9.980000s Iteration 0 Iteration 1 Iteration 2 Iteration 3 Iteration 4 Iteration 5 Iteration 6 Iteration 7 Iteration 8 Iteration 9 Iteration 10 Iteration 11 Iteration 12 Iteration 13 Iteration 14 Iteration 15 Iteration 16 Iteration 17 Iteration 18 Iteration 19 Iteration 20 Iteration 21 Iteration 22 Iteration 23 Iteration 24 Iteration 25 Iteration 26 Iteration 27 Iteration 28 Iteration 29 Iteration 30 Iteration 31 Iteration 32 Iteration 33 Iteration 34 Iteration 35 Iteration 36 Iteration 37 Iteration 38 Iteration 39 Iteration 40 Iteration 41 Iteration 42 Iteration 43 Iteration 44 Iteration 45 Iteration 46 Iteration 47 Iteration 48 Iteration 49 Iteration 50 Iteration 51 Iteration 52 Iteration 53 Iteration 54 Iteration 55 Iteration 56 Iteration 57 Iteration 58 Iteration 59 Iteration 60 Iteration 61 Iteration 62 Iteration 63 Iteration 64 Iteration 65 Iteration 66 Iteration 67 Iteration 68 Iteration 69 Iteration 70 Iteration 71 Iteration 72 Iteration 73 Iteration 74 Iteration 75 Iteration 76 Iteration 77 Iteration 78 Iteration 79 Iteration 80 Iteration 81 Iteration 82 Iteration 83 Iteration 84 Iteration 85 Iteration 86 Iteration 87 Iteration 88 Iteration 89 Iteration 90 Iteration 91 Iteration 92 Iteration 93 Iteration 94 Iteration 95 Iteration 96 Iteration 97 Iteration 98 Iteration 99 Iteration 100 Iteration 101 Iteration 102 Iteration 103 Iteration 104 Iteration 105 Iteration 106 Iteration 107 Iteration 108 Iteration 109 Iteration 110 Iteration 111 Iteration 112 Iteration 113 Iteration 114 Iteration 115 Iteration 116 Iteration 117 Iteration 118 Iteration 119 Iteration 120 Iteration 121 Iteration 122 Iteration 123 Iteration 124 Iteration 125 Iteration 126 Iteration 127 Iteration 128 Iteration 129 Iteration 130 Iteration 131 Iteration 132 Iteration 133 Iteration 134 Iteration 135 Iteration 136 Iteration 137 Iteration 138 Iteration 139 Iteration 140 Iteration 141 Iteration 142 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::CreateInParallel [GOOD] |82.8%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicationTests::CreateWithoutCredentials >> TReplicationTests::Alter >> TPartitionTests::SetOffset |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::Describe >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig >> TestKinesisHttpProxy::CreateStreamWithInvalidName |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |82.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |82.8%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |82.8%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp >> TestYmqHttpProxy::TestGetQueueUrlWithIAM |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> KqpScanSpilling::SelfJoinQueryService [GOOD] >> TPartitionTests::SetOffset [GOOD] >> TestYmqHttpProxy::TestCreateQueue >> TestKinesisHttpProxy::MissingAction >> TPartitionTests::ShadowPartitionCounters >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match >> TestKinesisHttpProxy::GoodRequestPutRecords >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest |82.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |82.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> Secret::Simple [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] |82.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |82.8%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoinQueryService [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6fwh/001ce2/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk2 Trying to start YDB, gRPC: 23594, MsgBus: 21424 2024-11-21T17:51:32.567759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791708467131141:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:32.567945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001ce2/r3tmp/tmpkeHi9m/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23594, node 1 2024-11-21T17:51:32.623890Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:32.625801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:32.625813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:32.625816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:32.625858Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21424 TClient is connected to server localhost:21424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:51:32.669697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:32.669726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:32.670987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:32.694288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:32.706003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:32.767008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:32.783361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:32.792839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:32.855637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791708467132700:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:32.855667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:32.888151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:32.894645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:32.901833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:32.916683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:32.971207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:32.979741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:32.993059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791708467133215:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:32.993076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791708467133220:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:32.993080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:32.993546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:32.998016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791708467133222:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 (StructType '('"Key" $3) '('"Value" $4))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($20) (block '( (let $21 (lambda '($22) (block '( (let $23 (VariantType (TupleType $5 $5))) (let $24 (Variant $22 '0 $23)) (let $25 (Variant $22 '1 $23)) (return $24 $25) )))) (return (FromFlow (MultiMap (ToFlow $20) $21))) ))) '('('"_logical_id" '688) '('"_id" '"f0e5d982-d366ac52-cb0b7eef-738391e1")))) (let $7 (DqCnUnionAll (TDqOutput $6 '1))) (let $8 '('('"_logical_id" '531) '('"_id" '"372e219e-c36a9e6a-4a0eed15-b0ad4514") '('"_wide_channels" $5))) (let $9 (DqPhyStage '($7) (lambda '($26) (block '( (let $27 (lambda '($28) (Member $28 '"Key") (Member $28 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $26) $27))) ))) $8)) (let $10 (DqCnMap (TDqOutput $6 '0))) (let $11 (DqCnBroadcast (TDqOutput $9 '0))) (let $12 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $13 '('('"_logical_id" '603) '('"_id" '"ef879a0a-ab6f0410-5a79c3d5-85d30edd") '('"_wide_channels" $12))) (let $14 (DqPhyStage '($10 $11) (lambda '($29 $30) (block '( (let $31 (lambda '($38) (block '( (let $39 (Member $38 '"Value")) (return (Member $38 '"Key") $39 $39 (Exists $39)) )))) (let $32 (lambda '($44 $45 $46 $47) $44 $45 $46)) (let $33 (lambda '($50 $51) $50 $51 $51)) (let $34 '('"2")) (let $35 '('0 '0 '1 '1)) (let $36 '('0 '"2" '1 '"3")) (let $37 (GraceJoinCore (WideMap (WideFilter (ExpandMap (ToFlow $29) $31) (lambda '($40 $41 $42 $43) $43)) $32) (WideMap (WideFilter (ToFlow $30) (lambda '($48 $49) (Exists $49))) $33) 'Inner $34 $34 $35 $36 '('"t1.Value") '('"t2.Value") '('"Broadcast"))) (return (FromFlow (WideSort $37 '('('0 (Bool 'true)))))) ))) $13)) (let $15 (DqCnMerge (TDqOutput $14 '0) '('('0 '"Asc")))) (let $16 (DqPhyStage '($15) (lambda '($52) (FromFlow (NarrowMap (ToFlow $52) (lambda '($53 $54 $55 $56) (AsStruct '('"t1.Key" $53) '('"t1.Value" $54) '('"t2.Key" $55) '('"t2.Value" $56)))))) '('('"_logical_id" '615) '('"_id" '"6535da1-f260420c-eeff729-f5bba904")))) (let $17 '($6 $9 $14 $16)) (let $18 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $19 (DqCnResult (TDqOutput $16 '0) $18)) (return (KqpPhysicalQuery '((KqpPhysicalTx $17 '($19) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $12) '0 '0)) '('('"type" '"query")))) ) |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] >> TPQTabletTests::ProposeTx_Unknown_WriteId |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> Secret::SimpleQueryService [GOOD] >> TPQTabletTests::Read_TEvTxCommit_After_Restart >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: 2024-11-21T17:51:32.869005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:32.869561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:32.869597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016eb/r3tmp/tmpquHbvz/pdisk_1.dat 2024-11-21T17:51:32.977084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:32.995509Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:33.038342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:33.038394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:33.048888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:33.153282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.421765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:822:2675], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:33.421793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:832:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:33.421803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:33.422734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:51:33.590230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:836:2683], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:51:33.665977Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd7xkjhd59r092vdhgbmdq4w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWEyMGQ2OTctMTlmNDlmMWYtNTUyNmRhNTgtMTEwMDRjNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:33.667358Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:950:2756], TxId: 281474976715660, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkjhd59r092vdhgbmdq4w. SessionId : ydb://session/3?node_id=1&id=MWEyMGQ2OTctMTlmNDlmMWYtNTUyNmRhNTgtMTEwMDRjNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2024-11-21T17:51:33.667860Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:950:2756], TxId: 281474976715660, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkjhd59r092vdhgbmdq4w. SessionId : ydb://session/3?node_id=1&id=MWEyMGQ2OTctMTlmNDlmMWYtNTUyNmRhNTgtMTEwMDRjNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2024-11-21T17:51:33.669912Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:951:2757], TxId: 281474976715660, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xkjhd59r092vdhgbmdq4w. SessionId : ydb://session/3?node_id=1&id=MWEyMGQ2OTctMTlmNDlmMWYtNTUyNmRhNTgtMTEwMDRjNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2024-11-21T17:51:33.671019Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWEyMGQ2OTctMTlmNDlmMWYtNTUyNmRhNTgtMTEwMDRjNA==, ActorId: [1:820:2673], ActorState: ExecuteState, TraceId: 01jd7xkjhd59r092vdhgbmdq4w, Create QueryResponse for error on request, msg: 2024-11-21T17:51:33.671327Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xkjhd59r092vdhgbmdq4w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWEyMGQ2OTctMTlmNDlmMWYtNTUyNmRhNTgtMTEwMDRjNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Simple [GOOD] Test command err: 2024-11-21T17:49:38.919157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:38.919754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:38.919791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c8f/r3tmp/tmpBMbBU8/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8019, node 1 TClient is connected to server localhost:64247 2024-11-21T17:49:39.056096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:39.074811Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:39.075589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:39.075604Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:39.075609Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:39.075669Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:49:39.117870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:39.117917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:39.128549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2024-11-21T17:49:51.719702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:777:2644], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:51.719731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:51.720336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2024-11-21T17:49:51.754817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:892:2721], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:51.754846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:51.754877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:897:2726], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:51.755478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T17:49:51.910454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:899:2728], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:49:52.060857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:49:52.126438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2024-11-21T17:49:52.417461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2024-11-21T17:49:52.754979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:49:52.844648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T17:49:53.364445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2024-11-21T17:49:53.685433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2024-11-21T17:50:05.272734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:50:05.272763Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2024-11-21T17:50:26.456569Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jd7xhh3zaffgsk4wtrw51g69, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1MzE2ZDQtMjA3NDcxNWItOWI2NjdkZWEtNmI2OWZhOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2024-11-21T17:50:47.772948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715749:0, at schemeshard: 72057594046644480 2024-11-21T17:50:48.244033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715756:0, at schemeshard: 72057594046644480 2024-11-21T17:50:48.819144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715767:0, at schemeshard: 72057594046644480 2024-11-21T17:50:48.919693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715770:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2024-11-21T17:50:59.902408Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715785. Ctx: { TraceId: 01jd7xjhrfcakspka3tv0kacbq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmJiYjhmOWYtYTkwNDkwYTYtMWUyNWNjMjMtMzBjZDU1YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2024-11-21T17:51:32.697548Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715829. Ctx: { TraceId: 01jd7xkhtdd0e9j1gte49d3mzc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQxNDdiOTktMWU4YWQ3NjQtOWY1NDBiMC0xNzY1MjhkMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> TReplicationTests::CopyReplicatedTable >> TReplicationTests::DropReplicationWithUnknownSecret >> TopicAutoscaling::CDC_Write [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 >> TPartitionTests::TestBatchingWithProposeConfig ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6fwh/001d18/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk0 Trying to start YDB, gRPC: 3131, MsgBus: 63479 2024-11-21T17:51:26.312473Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791682839019335:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:26.312601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001d18/r3tmp/tmpYvYusw/pdisk_1.dat 2024-11-21T17:51:26.405900Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3131, node 1 2024-11-21T17:51:26.413987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:26.414015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:26.415172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:26.417786Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:26.417798Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:26.417801Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:26.417839Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63479 TClient is connected to server localhost:63479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:26.485569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.493368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.506417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.520687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.531856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.611556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791682839020881:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.611591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.631540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.637757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.650266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.704855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.759751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.768914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.824874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791682839021403:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.824900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.824918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791682839021408:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.825459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:26.830908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791682839021410:2427], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:31.312698Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791682839019335:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:31.312784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '763) '('"_id" '"2489bba4-6f56a69a-21051df7-94ac86e3") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '661) '('"_id" '"f23b0d88-912cf60a-96f252a7-438d1459") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '673) '('"_id" '"36712ecb-260f90e9-449e86d3-c2eef9dd")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) 2024-11-21T17:51:33.684038Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976715971. Error: [TEvError] Spilling Service not started 2024-11-21T17:51:33.684925Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439791712903797839:4282], TxId: 281474976715971, task: 2. Ctx: { TraceId : 01jd7xkjs6ajq0jcf9mqejaqcj. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YTljYWVlYi0xNGQyMGUzOS1kYzYxZDIwYy01MjU1ZTM4MA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: [Compute spilling][TEvError] Spilling Service not started }. 2024-11-21T17:51:33.685030Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439791712903797841:4283], TxId: 281474976715971, task: 3. Ctx: { TraceId : 01jd7xkjs6ajq0jcf9mqejaqcj. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YTljYWVlYi0xNGQyMGUzOS1kYzYxZDIwYy01MjU1ZTM4MA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2024-11-21T17:51:33.685056Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439791712903797838:4281], TxId: 281474976715971, task: 1. Ctx: { TraceId : 01jd7xkjs6ajq0jcf9mqejaqcj. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YTljYWVlYi0xNGQyMGUzOS1kYzYxZDIwYy01MjU1ZTM4MA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2024-11-21T17:51:33.686762Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTljYWVlYi0xNGQyMGUzOS1kYzYxZDIwYy01MjU1ZTM4MA==, ActorId: [1:7439791712903797825:4277], ActorState: ExecuteState, TraceId: 01jd7xkjs6ajq0jcf9mqejaqcj, Create QueryResponse for error on request, msg: >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] >> TReplicationTests::CopyReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [GOOD] Test command err: 2024-11-21T17:49:39.630959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:39.631521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:39.631567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c83/r3tmp/tmpzlNjxB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15666, node 1 TClient is connected to server localhost:18404 2024-11-21T17:49:39.762092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:39.779631Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:39.780322Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:39.780336Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:39.780339Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:39.780394Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:49:39.822183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:39.822220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:39.832805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2024-11-21T17:49:52.402248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:774:2643], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.402277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:784:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.402287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:52.403244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T17:49:52.406022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:788:2651], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T17:49:52.445851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2024-11-21T17:49:52.702882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:52.781927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2024-11-21T17:49:53.119119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2024-11-21T17:49:53.447484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:49:53.528685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T17:49:54.023931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2024-11-21T17:49:54.350933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2024-11-21T17:50:05.882834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:50:05.882873Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2024-11-21T17:50:27.058355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jd7xhhpm077jgzres3dhkx3j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE1MzBmZi1hZTEwZWQ1Ny0zZDU5ZWU1OS1mYmVjODVkNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2024-11-21T17:50:48.407895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715749:0, at schemeshard: 72057594046644480 2024-11-21T17:50:48.904872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715756:0, at schemeshard: 72057594046644480 2024-11-21T17:50:49.459455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715767:0, at schemeshard: 72057594046644480 2024-11-21T17:50:49.545697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715770:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2024-11-21T17:51:00.407066Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715785. Ctx: { TraceId: 01jd7xjj8vek6gc4sbtzfr2024, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkwNDE0ZmItZDJhODlhZTItZjFkZTQ5NDgtNmEyZDdiODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2024-11-21T17:51:33.237674Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715829. Ctx: { TraceId: 01jd7xkjbb4hrrke6fbbz6p5tz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY3Mjk2MzMtMjBiYzhmNzEtZjllN2VmZS1iYzBmZGIxMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6fwh/001cfe/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk4 Trying to start YDB, gRPC: 20101, MsgBus: 32503 2024-11-21T17:51:26.313197Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791686234169154:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:26.313387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001cfe/r3tmp/tmpn886MO/pdisk_1.dat 2024-11-21T17:51:26.405757Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:26.414417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:26.414451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 20101, node 1 2024-11-21T17:51:26.415570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:26.419253Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:26.419263Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:26.419265Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:26.419290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32503 TClient is connected to server localhost:32503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:26.477346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.482736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.542373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.557251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.566689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.610985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791686234170698:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.611011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.631268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.636550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.642314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.696849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.751179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.762214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.770210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791686234171216:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.770232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.770258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791686234171221:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.770886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:26.775214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791686234171223:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:31.313860Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791686234169154:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:31.313921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '763) '('"_id" '"1110c245-680e0f85-f583c8ff-66ae9b59") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '661) '('"_id" '"7566f1fb-9158138a-12aea8aa-91c0e495") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '673) '('"_id" '"8023b2a0-a29ac604-4c9d4754-5fa1bc84")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] Test command err: 2024-11-21T17:51:33.296752Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:51:33.297877Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:51:33.297953Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T17:51:33.297971Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:51:33.297977Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T17:51:33.297984Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:51:33.297995Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:33.298004Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T17:51:33.298010Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:51:33.301174Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:33.301195Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2024-11-21T17:51:33.301211Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:51:33.302894Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:33.303731Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T17:51:33.303749Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:33.304051Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:33.304082Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:33.304092Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitConfigStep 2024-11-21T17:51:33.304169Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:51:33.304273Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2024-11-21T17:51:33.304459Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T17:51:33.304466Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:185:2198] 2024-11-21T17:51:33.304475Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:51:33.304611Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2024-11-21T17:51:33.304620Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2024-11-21T17:51:33.304662Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:33.304703Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:51:33.304814Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T17:51:33.304845Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2200] 2024-11-21T17:51:33.304934Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Completed. 2024-11-21T17:51:33.304938Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:187:2200] 2024-11-21T17:51:33.304941Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:51:33.305007Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2024-11-21T17:51:33.305011Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2024-11-21T17:51:33.305023Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:33.305036Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:51:33.305081Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:51:33.305110Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:33.305573Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:51:33.305592Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:51:33.305642Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:33.305647Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2024-11-21T17:51:33.305930Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:33.305937Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:203:2211], now have 1 active actors on pipe 2024-11-21T17:51:33.306089Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Immediate: false } 2024-11-21T17:51:33.306096Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T17:51:33.306108Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2024-11-21T17:51:33.306112Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T17:51:33.306116Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2024-11-21T17:51:33.306136Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 30231 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T17:51:33.306149Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:33.306693Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:51:33.306701Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2024-11-21T17:51:33.306703Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2024-11-21T17:51:33.307169Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 175 RawX2: 4294969486 } } Step: 100 2024-11-21T17:51:33.307178Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2024-11-21T17:51:33.307181Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2024-11-21T17:51:33.307184Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2024-11-21T17:51:33.307204Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 231 MaxStep: 30231 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T17:51:33.307215Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_P ... RSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T17:51:34.283595Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2024-11-21T17:51:34.283626Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T17:51:34.283641Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:34.290135Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:34.290865Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:34.291732Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] has a tx info 2024-11-21T17:51:34.291760Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890, ExecStep 0, ExecTxId 0 2024-11-21T17:51:34.291789Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067890, Status 0 2024-11-21T17:51:34.291841Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Load tx TxId: 67890 State: PLANNED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T17:51:34.291854Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=1, PlannedTxs.size=1 2024-11-21T17:51:34.291861Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] top tx queue (100, 67890) 2024-11-21T17:51:34.291970Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:34.291977Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] has a tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:34.292010Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep 2024-11-21T17:51:34.292071Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:51:34.292122Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:308:2294] 2024-11-21T17:51:34.292332Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:34.292564Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitMetaStep 2024-11-21T17:51:34.292613Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInfoRangeStep 2024-11-21T17:51:34.292723Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitDataRangeStep 2024-11-21T17:51:34.292756Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitDataStep 2024-11-21T17:51:34.292761Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T17:51:34.292768Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [5:308:2294] 2024-11-21T17:51:34.292775Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:51:34.292799Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 5 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:51:34.292842Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNED 2024-11-21T17:51:34.292846Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2024-11-21T17:51:34.292851Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2024-11-21T17:51:34.292860Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2024-11-21T17:51:34.292871Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2024-11-21T17:51:34.292926Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:51:34.292951Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2024-11-21T17:51:34.292956Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2024-11-21T17:51:34.292960Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2024-11-21T17:51:34.292964Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2024-11-21T17:51:34.292968Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T17:51:34.292973Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2024-11-21T17:51:34.293012Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T17:51:34.293027Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:34.293882Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:51:34.293895Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2024-11-21T17:51:34.293899Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2024-11-21T17:51:34.293906Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2024-11-21T17:51:34.293910Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2024-11-21T17:51:34.293933Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:51:34.294297Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2024-11-21T17:51:34.294309Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:51:34.294727Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:34.294737Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:334:2313], now have 1 active actors on pipe 2024-11-21T17:51:34.294764Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2024-11-21T17:51:34.294773Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2024-11-21T17:51:34.294780Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2024-11-21T17:51:34.294784Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2024-11-21T17:51:34.294788Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2024-11-21T17:51:34.294795Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2024-11-21T17:51:34.294799Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2024-11-21T17:51:34.294807Z node 5 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2024-11-21T17:51:34.294817Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2024-11-21T17:51:34.294826Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2024-11-21T17:51:34.294855Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:34.295338Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:51:34.295362Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2024-11-21T17:51:34.295368Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2024-11-21T17:51:34.295372Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T17:51:34.295376Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2024-11-21T17:51:34.295381Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2024-11-21T17:51:34.295384Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2024-11-21T17:51:34.295419Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T17:51:34.295432Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:34.296164Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:51:34.296176Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2024-11-21T17:51:34.296181Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2024-11-21T17:51:34.296188Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2024-11-21T17:51:34.296193Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2024-11-21T17:51:34.296197Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2024-11-21T17:51:34.296201Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2024-11-21T17:51:34.296204Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard >> BasicStatistics::TwoServerlessTwoSharedDbs |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:32.613247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:32.613323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:32.613351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:32.613362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:32.613389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:32.613422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:32.613454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:32.613540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:32.624173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:32.624208Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:32.627592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:32.628449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:32.628506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:32.630335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:32.630536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:32.630636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:32.630709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:32.631800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:32.632116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:32.632128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:32.632169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:32.632177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:32.632183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:32.632196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.633460Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:32.648105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:32.648192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.648269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:32.648305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:32.648310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.648931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:32.648956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:32.648995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.649004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:32.649008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:32.649012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:32.649390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.649405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:32.649409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:32.649743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.649752Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.649757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:32.649763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:32.650210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:32.650547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:32.650602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:32.650766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:32.650785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:32.650792Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:32.650830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:32.650834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:32.650862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:32.650874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:32.651277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:32.651285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:32.651329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:32.651333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:32.651405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.651411Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:32.651421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:32.651425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:32.651430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:32.651435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:32.651438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:32.651441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:32.651450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:32.651455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:32.651459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:32.651739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:32.651751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:32.651756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:32.651760Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:32.651764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:32.651777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... lPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:51:34.493868Z node 8 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:51:34.493871Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:51:34.493875Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:51:34.493884Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T17:51:34.495592Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:51:34.495613Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:51:34.495817Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 263 } } 2024-11-21T17:51:34.495828Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T17:51:34.495850Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 263 } } 2024-11-21T17:51:34.495864Z node 8 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 263 } } 2024-11-21T17:51:34.495969Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 402 RawX2: 34359740741 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:51:34.495975Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T17:51:34.495989Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 402 RawX2: 34359740741 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:51:34.495994Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:51:34.496002Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 402 RawX2: 34359740741 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:51:34.496012Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:34.496016Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T17:51:34.496683Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:51:34.496771Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:51:34.518238Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:51:34.518270Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T17:51:34.518301Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:51:34.518313Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:51:34.518321Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:51:34.518337Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:34.518341Z node 8 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:51:34.518346Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T17:51:34.518352Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:51:34.518359Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:51:34.518955Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:51:34.519080Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:51:34.519091Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2024-11-21T17:51:34.519100Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:51:34.519105Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2024-11-21T17:51:34.519117Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T17:51:34.519122Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2024-11-21T17:51:34.519576Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:51:34.519590Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:51:34.519607Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:51:34.519612Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:51:34.519618Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:51:34.519637Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:338:2313] message: TxId: 102 2024-11-21T17:51:34.519645Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:51:34.519652Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:51:34.519657Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:51:34.519690Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:51:34.519697Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:51:34.520085Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:51:34.520097Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:428:2392] TestWaitNotification: OK eventTxId 102 2024-11-21T17:51:34.520205Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:34.520284Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 85us result status StatusSuccess 2024-11-21T17:51:34.520409Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/6fwh/001d09/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk3 Trying to start YDB, gRPC: 10378, MsgBus: 21435 2024-11-21T17:51:26.384534Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791683026689901:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:26.384556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001d09/r3tmp/tmpfVqhbH/pdisk_1.dat 2024-11-21T17:51:26.425161Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10378, node 1 2024-11-21T17:51:26.436200Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:26.436210Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:26.436211Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:26.436258Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21435 TClient is connected to server localhost:21435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:26.484896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:26.484925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:26.486041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:26.507818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.521399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.581032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.595543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.608477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.630224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791683026691437:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.630255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.650341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.655242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.663900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.671143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.677722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.684694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.693272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791683026691927:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.693295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.693306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791683026691932:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.693825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:26.698045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791683026691934:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:31.384618Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791683026689901:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:31.384673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '763) '('"_id" '"74fb218-f5eb0f88-9bc54de6-a91b0b4") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '661) '('"_id" '"18fc9a7c-9cc4bed7-816be33f-6f0e1048") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '673) '('"_id" '"ed8059c6-747e6373-65928e88-149fcf56")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:32.628439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:32.628462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:32.628466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:32.628469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:32.628474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:32.628476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:32.628483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:32.628562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:32.636120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:32.636138Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:32.638373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:32.639092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:32.639122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:32.640358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:32.640565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:32.640676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:32.640743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:32.641732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:32.642005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:32.642017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:32.642054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:32.642062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:32.642069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:32.642094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.643299Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:32.658695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:32.658779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.658832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:32.658875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:32.658883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.660016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:32.660047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:32.660090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.660100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:32.660106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:32.660112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:32.660607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.660621Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:32.660625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:32.660954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.660963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.660969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:32.660976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:32.661595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:32.661969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:32.662021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:32.662188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:32.662214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:32.662222Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:32.662277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:32.662282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:32.662315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:32.662327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:32.662665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:32.662672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:32.662719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:32.662724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:32.662796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:32.662803Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:32.662815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:32.662818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:32.662824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:32.662828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:32.662832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:32.662836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:32.662847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:32.662853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:32.662857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:32.663207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:32.663222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:32.663227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:32.663232Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:32.663237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:32.663251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ediator stepId#5000003 2024-11-21T17:51:34.426479Z node 8 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2024-11-21T17:51:34.426508Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [8:123:2149], Recipient [8:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T17:51:34.426512Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:51:34.426526Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:34.426531Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:34.426558Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:51:34.426580Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:34.426584Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T17:51:34.426588Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:51:34.426667Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:51:34.426674Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:51:34.426684Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:51:34.426688Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:51:34.426692Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:51:34.426697Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T17:51:34.426702Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:51:34.426706Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:51:34.426710Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:51:34.426732Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:51:34.426738Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T17:51:34.426742Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:51:34.426745Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:51:34.426863Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [8:201:2204], Recipient [8:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2024-11-21T17:51:34.426871Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T17:51:34.426883Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:51:34.426893Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:51:34.426897Z node 8 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:51:34.426901Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:51:34.426904Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:34.426917Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:51:34.427111Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [8:201:2204], Recipient [8:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2024-11-21T17:51:34.427118Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T17:51:34.427129Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:51:34.427139Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:51:34.427143Z node 8 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:51:34.427147Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:51:34.427151Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:51:34.427165Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T17:51:34.427169Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:51:34.427204Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435080, Sender [8:123:2149], Recipient [8:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2024-11-21T17:51:34.427209Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2024-11-21T17:51:34.427215Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:34.427219Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:51:34.427228Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:34.427816Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:34.427952Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:51:34.427959Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:34.428145Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:51:34.428148Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:34.428160Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T17:51:34.428205Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:51:34.428210Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:51:34.428275Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [8:439:2396], Recipient [8:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:34.428282Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:34.428285Z node 8 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:51:34.428304Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [8:355:2336], Recipient [8:123:2149]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2024-11-21T17:51:34.428307Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T17:51:34.428315Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:51:34.428327Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:51:34.428330Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:437:2394] 2024-11-21T17:51:34.428343Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [8:439:2396], Recipient [8:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:34.428346Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:34.428348Z node 8 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2024-11-21T17:51:34.428388Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [8:440:2397], Recipient [8:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T17:51:34.428391Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:51:34.428397Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:34.428418Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 19us result status StatusPathDoesNotExist 2024-11-21T17:51:34.428443Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> HttpRequest::Status |82.9%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |82.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> SystemView::TabletsFields >> SystemView::TopPartitionsFields |82.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::Nodes [GOOD] >> SystemView::PDisksFields >> SystemView::PartitionStatsOneSchemeShard |83.0%| [TA] $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::QueryStatsRetries >> SystemView::VSlotsFields >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> SystemView::ConcurrentScans >> TStorageTenantTest::CreateTableInsideSubDomain >> TestYmqHttpProxy::TestReceiveMessage >> TStorageTenantTest::Boot >> LabeledDbCounters::TwoTablets >> SystemView::PgTablesOneSchemeShardDataQuery >> TPartitionTests::ShadowPartitionCounters [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain2 >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> TestYmqHttpProxy::TestCreateQueue [GOOD] >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] >> SystemView::TabletsFields [GOOD] >> SystemView::TabletsFollowers >> TestYmqHttpProxy::TestGetQueueUrl >> TestKinesisHttpProxy::DifferentContentTypes >> TestKinesisHttpProxy::MissingAction [GOOD] >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass >> TStorageTenantTest::LsLs >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK >> TestKinesisHttpProxy::TestRequestWithIAM >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> SystemView::TabletsFollowers [GOOD] >> SystemView::TabletsRanges >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] >> TStorageTenantTest::LsLs [GOOD] >> SystemView::TabletsRanges [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled >> SystemView::ConcurrentScans [GOOD] >> SystemView::Describe >> SystemView::PgTablesOneSchemeShardDataQuery [GOOD] >> SystemView::QueryStats >> SystemView::QueryStatsRetries [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad >> SystemView::PartitionStatsOneSchemeShard [GOOD] >> SystemView::StoragePoolsFields >> SystemView::PartitionStatsOneSchemeShardDataQuery >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] |83.0%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2024-11-21T17:51:36.481823Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791728877441897:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.481849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:36.483793Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791727298486502:2195];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000eb3/r3tmp/tmpLRCXkx/pdisk_1.dat 2024-11-21T17:51:36.509797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:36.542360Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3255 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:36.572137Z node 1 :TX_PROXY DEBUG: actor# [1:7439791728877441906:2138] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:36.572150Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791728877442324:2426] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:36.572172Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791728877442022:2200], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:36.572178Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439791728877442022:2200], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:51:36.572214Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728877442325:2427][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:51:36.572527Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441576:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791728877442329:2427] 2024-11-21T17:51:36.572546Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791728877441576:2052] Subscribe: subscriber# [1:7439791728877442329:2427], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:36.572554Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441579:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791728877442330:2427] 2024-11-21T17:51:36.572556Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791728877441579:2055] Subscribe: subscriber# [1:7439791728877442330:2427], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:36.572559Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441582:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791728877442331:2427] 2024-11-21T17:51:36.572562Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791728877441582:2058] Subscribe: subscriber# [1:7439791728877442331:2427], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:36.572569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728877442329:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791728877441576:2052] 2024-11-21T17:51:36.572572Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728877442330:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791728877441579:2055] 2024-11-21T17:51:36.572579Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728877442331:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791728877441582:2058] 2024-11-21T17:51:36.572582Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728877442325:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791728877442326:2427] 2024-11-21T17:51:36.572586Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728877442325:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791728877442327:2427] 2024-11-21T17:51:36.572592Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439791728877442325:2427][/dc-1] Set up state: owner# [1:7439791728877442022:2200], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.572615Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728877442325:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791728877442328:2427] 2024-11-21T17:51:36.572620Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439791728877442325:2427][/dc-1] Path was already updated: owner# [1:7439791728877442022:2200], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.572624Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728877442329:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791728877442326:2427], cookie# 1 2024-11-21T17:51:36.572625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728877442330:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791728877442327:2427], cookie# 1 2024-11-21T17:51:36.572627Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728877442331:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791728877442328:2427], cookie# 1 2024-11-21T17:51:36.572630Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441576:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791728877442329:2427] 2024-11-21T17:51:36.572632Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441576:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791728877442329:2427], cookie# 1 2024-11-21T17:51:36.572634Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441579:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791728877442330:2427] 2024-11-21T17:51:36.572635Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441579:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791728877442330:2427], cookie# 1 2024-11-21T17:51:36.572636Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441582:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791728877442331:2427] 2024-11-21T17:51:36.572637Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441582:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791728877442331:2427], cookie# 1 2024-11-21T17:51:36.575914Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728877442329:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791728877441576:2052], cookie# 1 2024-11-21T17:51:36.575926Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728877442330:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791728877441579:2055], cookie# 1 2024-11-21T17:51:36.575928Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728877442331:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791728877441582:2058], cookie# 1 2024-11-21T17:51:36.575934Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728877442325:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791728877442326:2427], cookie# 1 2024-11-21T17:51:36.575938Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728877442325:2427][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:36.575941Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728877442325:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791728877442327:2427], cookie# 1 2024-11-21T17:51:36.575945Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728877442325:2427][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:36.575948Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728877442325:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791728877442328:2427], cookie# 1 2024-11-21T17:51:36.575950Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728877442325:2427][/dc-1] Unexpected sync response: sender# [1:7439791728877442328:2427], cookie# 1 2024-11-21T17:51:36.579525Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791728877442022:2200], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:51:36.579596Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791728877442022:2200], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Domain ... rkload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7439791727298486622:2110] 2024-11-21T17:51:36.798818Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439791727298486618:2111][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7439791727298486625:2111] 2024-11-21T17:51:36.798821Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7439791727298486617:2110][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7439791727298486588:2103], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.798822Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7439791727298486618:2111][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7439791727298486588:2103], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.798826Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439791727298486635:2112][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439791728877441576:2052] 2024-11-21T17:51:36.798830Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439791727298486636:2112][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439791728877441579:2055] 2024-11-21T17:51:36.798836Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439791727298486637:2112][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439791728877441582:2058] 2024-11-21T17:51:36.798862Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441576:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791727298486626:2110] 2024-11-21T17:51:36.798869Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441579:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791727298486630:2111] 2024-11-21T17:51:36.798871Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441576:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791727298486628:2111] 2024-11-21T17:51:36.798872Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441579:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791727298486627:2110] 2024-11-21T17:51:36.798874Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441576:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791727298486635:2112] 2024-11-21T17:51:36.798875Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441579:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791727298486636:2112] 2024-11-21T17:51:36.798877Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441582:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791727298486631:2111] 2024-11-21T17:51:36.798880Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441582:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791727298486629:2110] 2024-11-21T17:51:36.798883Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728877441582:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791727298486637:2112] 2024-11-21T17:51:36.798840Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439791727298486621:2112][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7439791727298486632:2112] 2024-11-21T17:51:36.798841Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439791727298486588:2103], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2024-11-21T17:51:36.798849Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439791727298486621:2112][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7439791727298486633:2112] 2024-11-21T17:51:36.798851Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439791727298486588:2103], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7439791727298486617:2110] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:36.798852Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:7439791727298486621:2112][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [2:7439791727298486588:2103], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.798854Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439791727298486621:2112][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7439791727298486634:2112] 2024-11-21T17:51:36.798857Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7439791727298486621:2112][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7439791727298486588:2103], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.798866Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439791727298486588:2103], cacheItem# { Subscriber: { Subscriber: [2:7439791727298486617:2110] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:36.798871Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439791727298486588:2103], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T17:51:36.798876Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439791727298486588:2103], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7439791727298486618:2111] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:36.798882Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439791727298486588:2103], cacheItem# { Subscriber: { Subscriber: [2:7439791727298486618:2111] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:36.798894Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439791727298486588:2103], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2024-11-21T17:51:36.798900Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439791727298486588:2103], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7439791727298486621:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:36.798906Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439791727298486638:2113], recipient# [2:7439791727298486613:2276], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:36.798908Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439791727298486588:2103], cacheItem# { Subscriber: { Subscriber: [2:7439791727298486621:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:36.798916Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439791727298486639:2114], recipient# [2:7439791727298486597:2270], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2024-11-21T17:51:36.064290Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791727822492167:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.064494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fb1/r3tmp/tmppg4h4F/pdisk_1.dat 2024-11-21T17:51:36.133078Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1705 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:36.169597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.169637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.170135Z node 1 :TX_PROXY DEBUG: actor# [1:7439791727822492398:2094] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:36.170148Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791727822492864:2415] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:36.170187Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791727822492426:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:36.170197Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439791727822492426:2113], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:51:36.170246Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727822492865:2416][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:51:36.170632Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727822492127:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791727822492869:2416] 2024-11-21T17:51:36.170639Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727822492130:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791727822492870:2416] 2024-11-21T17:51:36.170659Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791727822492130:2052] Subscribe: subscriber# [1:7439791727822492870:2416], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:36.170659Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791727822492127:2049] Subscribe: subscriber# [1:7439791727822492869:2416], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:36.170674Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727822492133:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791727822492871:2416] 2024-11-21T17:51:36.170678Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791727822492133:2055] Subscribe: subscriber# [1:7439791727822492871:2416], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:36.170691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727822492870:2416][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791727822492130:2052] 2024-11-21T17:51:36.170696Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727822492869:2416][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791727822492127:2049] 2024-11-21T17:51:36.170700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727822492871:2416][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791727822492133:2055] 2024-11-21T17:51:36.170707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727822492865:2416][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791727822492867:2416] 2024-11-21T17:51:36.170713Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727822492130:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791727822492870:2416] 2024-11-21T17:51:36.170714Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727822492865:2416][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791727822492866:2416] 2024-11-21T17:51:36.170717Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727822492127:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791727822492869:2416] 2024-11-21T17:51:36.170720Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727822492133:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791727822492871:2416] 2024-11-21T17:51:36.170737Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439791727822492865:2416][/dc-1] Set up state: owner# [1:7439791727822492426:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.170768Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727822492865:2416][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791727822492868:2416] 2024-11-21T17:51:36.170786Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439791727822492865:2416][/dc-1] Path was already updated: owner# [1:7439791727822492426:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.170800Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727822492869:2416][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791727822492866:2416], cookie# 1 2024-11-21T17:51:36.170804Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727822492870:2416][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791727822492867:2416], cookie# 1 2024-11-21T17:51:36.170807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727822492871:2416][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791727822492868:2416], cookie# 1 2024-11-21T17:51:36.170827Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727822492127:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791727822492869:2416], cookie# 1 2024-11-21T17:51:36.170838Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727822492130:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791727822492870:2416], cookie# 1 2024-11-21T17:51:36.170841Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727822492133:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791727822492871:2416], cookie# 1 2024-11-21T17:51:36.170846Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727822492869:2416][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791727822492127:2049], cookie# 1 2024-11-21T17:51:36.170849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727822492870:2416][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791727822492130:2052], cookie# 1 2024-11-21T17:51:36.170851Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727822492871:2416][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791727822492133:2055], cookie# 1 2024-11-21T17:51:36.170856Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727822492865:2416][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791727822492866:2416], cookie# 1 2024-11-21T17:51:36.170861Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727822492865:2416][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:36.170870Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727822492865:2416][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791727822492867:2416], cookie# 1 2024-11-21T17:51:36.170878Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727822492865:2416][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:36.170883Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727822492865:2416][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791727822492868:2416], cookie# 1 2024-11-21T17:51:36.170884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727822492865:2416][/dc-1] Unexpected sync response: sender# [1:7439791727822492868:2416], cookie# 1 2024-11-21T17:51:36.171708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:36.177605Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791727822492426:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:51:36.177667Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791727822492426:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... h Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:36.987034Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439791725184617795:2750][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7439791725184617797:2750] 2024-11-21T17:51:36.987039Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439791725184617795:2750][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7439791725184617798:2750] 2024-11-21T17:51:36.987043Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7439791725184617795:2750][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Set up state: owner# [4:7439791725184616397:2101], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.987046Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439791725184617795:2750][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7439791725184617799:2750] 2024-11-21T17:51:36.987050Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7439791725184617795:2750][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7439791725184616397:2101], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.987050Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7439791725184616397:2101], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests PathId: Strong: 0 } 2024-11-21T17:51:36.987055Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439791725184617796:2751][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7439791725184617803:2751] 2024-11-21T17:51:36.987056Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7439791725184616397:2101], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [4:7439791725184617795:2750] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:36.987059Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439791725184617796:2751][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7439791725184617804:2751] 2024-11-21T17:51:36.987063Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7439791725184617796:2751][/dc-1/USER_0/.metadata/workload_manager/running_requests] Set up state: owner# [4:7439791725184616397:2101], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.987063Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439791725184616397:2101], cacheItem# { Subscriber: { Subscriber: [4:7439791725184617795:2750] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:36.987065Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7439791725184616397:2101], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests PathId: Strong: 0 } 2024-11-21T17:51:36.987066Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439791725184617796:2751][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7439791725184617805:2751] 2024-11-21T17:51:36.987069Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7439791725184616397:2101], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [4:7439791725184617796:2751] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:36.987071Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7439791725184617796:2751][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7439791725184616397:2101], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.987072Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439791725184616397:2101], cacheItem# { Subscriber: { Subscriber: [4:7439791725184617796:2751] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:36.987080Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439791725184617810:2753], recipient# [4:7439791725184617791:2341], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:36.987083Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439791725184617809:2752], recipient# [4:7439791725184617779:2335], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:36.987249Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7439791725184617791:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:36.987280Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:51:37.042937Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439791725184616397:2101], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:37.042987Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439791725184616397:2101], cacheItem# { Subscriber: { Subscriber: [4:7439791725184617795:2750] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:37.042997Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439791725184616397:2101], cacheItem# { Subscriber: { Subscriber: [4:7439791725184617796:2751] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:37.043032Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439791729479585108:2754], recipient# [4:7439791725184617791:2341], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:37.043111Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7439791725184617791:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |83.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |83.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} |83.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] Test command err: 2024-11-21T17:51:35.467398Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791721593928618:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:35.467415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019b9/r3tmp/tmplU17yo/pdisk_1.dat 2024-11-21T17:51:35.509294Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11241, node 1 2024-11-21T17:51:35.525440Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:35.525452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:35.525454Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:35.525500Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:35.549387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:35.553026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:35.567542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:35.567573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:35.568727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:35.742359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791721593929271:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:35.742373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791721593929298:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:35.742377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:35.742936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:35.744141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791721593929300:2304], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T17:51:35.850659Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439791721593929379:2299] TxId: 281474976710662. Ctx: { TraceId: 01jd7xkmnz6rha49z37ek42fdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmI0ZWMxODUtN2QwZWI5MzEtYTNkZmI0MGYtNTgyMjk1NGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T17:51:35.850723Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jd7xkmnz6rha49z37ek42fdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmI0ZWMxODUtN2QwZWI5MzEtYTNkZmI0MGYtNTgyMjk1NGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:35.851528Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791721593929386:2312], owner: [1:7439791721593929382:2310], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:35.851686Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791721593929386:2312], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:35.851886Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791721593929386:2312], row count: 1, finished: 1 2024-11-21T17:51:35.851903Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791721593929386:2312], owner: [1:7439791721593929382:2310], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:35.853241Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211495849, txId: 281474976710661] shutting down 2024-11-21T17:51:36.142157Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791726505517291:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.145157Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019b9/r3tmp/tmpCoPXl6/pdisk_1.dat 2024-11-21T17:51:36.156677Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29268, node 2 2024-11-21T17:51:36.169609Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:36.169631Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:36.169634Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:36.169683Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:36.241851Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.241873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.242934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:36.244132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:36.245505Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:36.451243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:36.461423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791726505517847:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.461440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.461500Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791726505517859:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.462029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:36.474040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791726505517861:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:36.572402Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xkngc5szd0z878mv5kqym, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWZjYWY2ZDAtMTYxOTNjNmUtNTM3MTVmYmEtMjM1OGYzNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:36.572956Z node 2 :SYSTEM_VIEWS INFO: Scan started, actor: [2:7439791726505517945:2318], owner: [2:7439791726505517942:2316], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:36.573741Z node 2 :SYSTEM_VIEWS INFO: Scan prepared, actor: [2:7439791726505517945:2318], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:36.573964Z node 2 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [2:7439791726505517945:2318], row count: 4, finished: 1 2024-11-21T17:51:36.573971Z node 2 :SYSTEM_VIEWS INFO: Scan finished, actor: [2:7439791726505517945:2318], owner: [2:7439791726505517942:2316], scan id: 0, table id: [720575940 ... our snapshot: [step: 1732211497280, txId: 281474976715671] shutting down 2024-11-21T17:51:37.295878Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jd7xkpa2011r0jxv0mde2ryx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDhjODAyYzMtNTg5NzI4NzUtZmU2NGVjZDQtZmIwNTAxN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:37.296357Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7439791730837713735:2387], owner: [3:7439791730837713731:2385], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:37.296486Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7439791730837713735:2387], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:37.296565Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7439791730837713735:2387], row count: 3, finished: 1 2024-11-21T17:51:37.296575Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7439791730837713735:2387], owner: [3:7439791730837713731:2385], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:37.297060Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211497295, txId: 281474976715673] shutting down 2024-11-21T17:51:37.308620Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd7xkpah8s4q6d9gv4ep30j7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDc0OTY5ZDItYmYwNDI4Ni02NGZmNWEzYy03NTZkNTlhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:37.309093Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7439791730837713768:2396], owner: [3:7439791730837713764:2394], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:37.309199Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7439791730837713768:2396], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:37.309274Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7439791730837713768:2396], row count: 3, finished: 1 2024-11-21T17:51:37.309283Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7439791730837713768:2396], owner: [3:7439791730837713764:2394], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:37.309713Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211497308, txId: 281474976715675] shutting down 2024-11-21T17:51:37.322821Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd7xkpay4xch2mp7wv0mgv0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTdjZWNhOTctZTEzYWI3YjQtMjIwOGMwMS05NGJhMjczMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:37.323254Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7439791730837713800:2405], owner: [3:7439791730837713796:2403], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:37.323367Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7439791730837713800:2405], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:37.323469Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7439791730837713800:2405], row count: 4, finished: 1 2024-11-21T17:51:37.323478Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7439791730837713800:2405], owner: [3:7439791730837713796:2403], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:37.323888Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211497322, txId: 281474976715677] shutting down 2024-11-21T17:51:37.335840Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd7xkpbc0gb3vgm18n4dsh7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTA5NTZmMjQtODNmMWE3NmYtZTE0YzZlNGUtZDU3NjAzYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:37.336282Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7439791730837713832:2414], owner: [3:7439791730837713828:2412], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:37.336399Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7439791730837713832:2414], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:37.336482Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7439791730837713832:2414], row count: 4, finished: 1 2024-11-21T17:51:37.336492Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7439791730837713832:2414], owner: [3:7439791730837713828:2412], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:37.336941Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211497335, txId: 281474976715679] shutting down 2024-11-21T17:51:37.468009Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439791730710783131:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:37.468187Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019b9/r3tmp/tmpbBbLzq/pdisk_1.dat 2024-11-21T17:51:37.477401Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62239, node 4 2024-11-21T17:51:37.492097Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:37.492111Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:37.492113Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:37.492152Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:37.568428Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:37.568456Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:37.569577Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:37.570233Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:37.694552Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:37.704162Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791730710783972:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:37.704163Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791730710783983:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:37.704179Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:37.704676Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:37.709333Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439791730710783986:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:37.827201Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xkpq728965p4vygqrwknp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzBiNmEwYWItNDU0NjExMmEtNzIyZWQwN2MtZWFmOTRmZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:37.827718Z node 4 :SYSTEM_VIEWS INFO: Scan started, actor: [4:7439791730710784073:2332], owner: [4:7439791730710784072:2331], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:37.827870Z node 4 :SYSTEM_VIEWS INFO: Scan prepared, actor: [4:7439791730710784073:2332], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:37.828028Z node 4 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [4:7439791730710784073:2332], row count: 4, finished: 1 2024-11-21T17:51:37.828039Z node 4 :SYSTEM_VIEWS INFO: Scan finished, actor: [4:7439791730710784073:2332], owner: [4:7439791730710784072:2331], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:37.828076Z node 4 :SYSTEM_VIEWS INFO: Scan started, actor: [4:7439791730710784079:2335], owner: [4:7439791730710784072:2331], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:37.828452Z node 4 :SYSTEM_VIEWS INFO: Scan prepared, actor: [4:7439791730710784079:2335], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:37.828502Z node 4 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [4:7439791730710784079:2335], row count: 4, finished: 1 2024-11-21T17:51:37.828511Z node 4 :SYSTEM_VIEWS INFO: Scan finished, actor: [4:7439791730710784079:2335], owner: [4:7439791730710784072:2331], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:37.828878Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211497826, txId: 281474976715661] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] Test command err: 2024-11-21T17:51:26.308393Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:26.308418Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:26.311244Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:26.311526Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-2@\000H\000" StorageChannel: INLINE } 2024-11-21T17:51:26.573077Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:26.573100Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:51:26.575546Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:26.575735Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:176:2191] 2024-11-21T17:51:26.575767Z node 2 :PERSQUEUE INFO: new Cookie src3|3ef480ec-c3e5350b-99df9539-e24ade57_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src3 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 2024-11-21T17:51:26.575816Z node 2 :PERSQUEUE INFO: new Cookie src4|5a47b5ce-5dcabb03-b077613f-6826bb30_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Create distr tx with id = 8 and act no: 9 Create distr tx with id = 10 and act no: 11 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COU ... Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Wait immediate tx complete 2 Got batch complete: 1 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 Wait batch completion Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got batch complete: 1 Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 3 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 2024-11-21T17:51:34.492255Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:34.492280Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:51:34.495883Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:34.496304Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 2 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Wait immediate tx complete 2 Got batch complete: 1 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 Wait batch completion Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Got batch complete: 1 Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 4 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fa2/r3tmp/tmpj0fRid/pdisk_1.dat 2024-11-21T17:51:36.175404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:36.180622Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3734 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:36.201219Z node 1 :TX_PROXY DEBUG: actor# [1:7439791728682935500:2135] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:36.201243Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791728682936081:2392] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:36.201281Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791728682935720:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:36.201305Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728682935951:2290][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7439791728682935720:2149], cookie# 1 2024-11-21T17:51:36.201639Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728682935970:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791728682935967:2290], cookie# 1 2024-11-21T17:51:36.201644Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728682935971:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791728682935968:2290], cookie# 1 2024-11-21T17:51:36.201648Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728682935972:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791728682935969:2290], cookie# 1 2024-11-21T17:51:36.201655Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728682935379:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791728682935971:2290], cookie# 1 2024-11-21T17:51:36.201662Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728682935382:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791728682935972:2290], cookie# 1 2024-11-21T17:51:36.201668Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728682935971:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791728682935379:2054], cookie# 1 2024-11-21T17:51:36.201671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728682935972:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791728682935382:2057], cookie# 1 2024-11-21T17:51:36.201676Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728682935951:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791728682935968:2290], cookie# 1 2024-11-21T17:51:36.201681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728682935951:2290][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:36.201685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728682935951:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791728682935969:2290], cookie# 1 2024-11-21T17:51:36.201688Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728682935951:2290][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:36.201699Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791728682935720:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2024-11-21T17:51:36.202399Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791728682935720:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439791728682935951:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:36.202416Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439791728682935720:2149], cacheItem# { Subscriber: { Subscriber: [1:7439791728682935951:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2024-11-21T17:51:36.202666Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791728682936096:2400], recipient# [1:7439791728682936081:2392], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:51:36.202717Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728682935376:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791728682935970:2290], cookie# 1 2024-11-21T17:51:36.202723Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791728682936081:2392] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T17:51:36.203956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791728682935970:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791728682935376:2051], cookie# 1 2024-11-21T17:51:36.203972Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728682935951:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791728682935967:2290], cookie# 1 2024-11-21T17:51:36.203975Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791728682935951:2290][/dc-1] Unexpected sync response: sender# [1:7439791728682935967:2290], cookie# 1 2024-11-21T17:51:36.210349Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791728682936081:2392] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2024-11-21T17:51:36.210933Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791728682936081:2392] Handle TEvDescribeSchemeResult Forward to# [1:7439791728682936080:2391] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2024-11-21T17:51:36.213783Z node 1 :TX_PROXY DEBUG: actor# [1:7439791728682935500:2135] Handle TEvProposeTransaction 2024-11-21T17:51:36.213797Z node 1 :TX_PROXY DEBUG: actor# [1:7439791728682935500:2135] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2024-11-21T17:51:36.222030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.222062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.224655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:36.258167Z node 1 :TX_PROXY DEBUG: actor# [1:7439791728682935500:2135] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:51:36.259153Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:51:36.259168Z node 1 :TX_PROXY DEBUG: actor# [1:7439791728682935500:2135] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:51:36.259202Z node 1 :TX_PROXY DEBUG: actor# [1:7439791728682935500:2135] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7439791728682936139:2437] 2024-11-21T17:51:36.265901Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791728682936139:2437] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2024-11-21T17:51:36.265938Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791728682936139:2437] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:51:36.265978Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791728682935720:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709 ... 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [1:7439791732977903968:2831] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:37.314775Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791728682935720:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 } 2024-11-21T17:51:37.314787Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791728682935720:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439791732977903968:2831] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211497350 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2024-11-21T17:51:37.314804Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439791728682935720:2149], cacheItem# { Subscriber: { Subscriber: [1:7439791732977903968:2831] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211497350 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/SimpleTable TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2024-11-21T17:51:37.314869Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791732977903975:2832], recipient# [1:7439791732977903967:2830], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:51:37.314889Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791732977903967:2830] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T17:51:37.314922Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791732977903967:2830] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2024-11-21T17:51:37.315180Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791732977903967:2830] Handle TEvDescribeSchemeResult Forward to# [1:7439791732977903966:2829] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1732211497350 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1732211497350 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2024-11-21T17:51:37.320990Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T17:51:37.321172Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728682935376:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439791727149381773:2099] 2024-11-21T17:51:37.321189Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791728682935376:2051] Unsubscribe: subscriber# [3:7439791727149381773:2099], path# /dc-1/USER_0 2024-11-21T17:51:37.321195Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728682935379:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439791727149381774:2099] 2024-11-21T17:51:37.321198Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791728682935379:2054] Unsubscribe: subscriber# [3:7439791727149381774:2099], path# /dc-1/USER_0 2024-11-21T17:51:37.321203Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791728682935382:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439791727149381775:2099] 2024-11-21T17:51:37.321207Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791728682935382:2057] Unsubscribe: subscriber# [3:7439791727149381775:2099], path# /dc-1/USER_0 2024-11-21T17:51:37.321243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:37.413572Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791728682935720:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:37.413619Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439791728682935720:2149], cacheItem# { Subscriber: { Subscriber: [1:7439791728682936414:2643] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:37.413646Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791732977903979:2835], recipient# [1:7439791732977903978:2286], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] >> SystemView::Describe [GOOD] >> SystemView::DescribeAccessDenied >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] Test command err: 2024-11-21T17:51:26.412244Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791686035047271:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:26.412667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:26.415882Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791683827276688:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000de9/r3tmp/tmpapGQCB/pdisk_1.dat 2024-11-21T17:51:26.447650Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:26.448429Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:26.449694Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:26.472761Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12154, node 1 2024-11-21T17:51:26.489508Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000de9/r3tmp/yandexkN1iEE.tmp 2024-11-21T17:51:26.489523Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000de9/r3tmp/yandexkN1iEE.tmp 2024-11-21T17:51:26.492781Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000de9/r3tmp/yandexkN1iEE.tmp 2024-11-21T17:51:26.492835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:26.493633Z INFO: TTestServer started on Port 6667 GrpcPort 12154 TClient is connected to server localhost:6667 PQClient connected to localhost:12154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:51:26.512179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:26.512217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:26.513559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.513728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:51:26.539027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:26.546269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:26.546296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:26.547722Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:26.547996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:51:26.678371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791686035048307:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.678392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791686035048318:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.678398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.679061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:51:26.679843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791686035048350:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.679889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:26.681875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791686035048321:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:51:26.699753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.750715Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791683827276878:2285], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:26.750796Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjA5YmQ0NDktM2FhMjBjMDgtZjc5ZGU4Y2MtMjM3YWM2YTM=, ActorId: [2:7439791683827276838:2279], ActorState: ExecuteState, TraceId: 01jd7xkc0t1nf0pjs8h231k52j, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:26.751170Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:26.759040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:26.775247Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791686035048597:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:26.775463Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTdmZDUzZTEtOWMyY2RkZTQtOTUwMWY1NDQtZjM4M2E1NDQ=, ActorId: [1:7439791686035048304:2303], ActorState: ExecuteState, TraceId: 01jd7xkbyp3trzkc947j4sgyra, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:26.775574Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:26.776126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:51:26.801590Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd7xkc23098sfmyefwbn27vf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRlZWJlOGItNmIyY2RhNTctYTdjNDRmZGYtMTk1MjJiZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439791686035048775:3043] 2024-11-21T17:51:31.412401Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791686035047271:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:31.412430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:51:31.415981Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439791683827276688:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:31.416010Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:31.896001Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T17:51:31.896015Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLA ... : 01jd7xkhhx9m3keb8r2tky1796, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODE1OGUzYy0zNjQxOThhMy02ODBhMjA3LTFlMjFhYWZk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2024-11-21T17:51:32.428781Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439791711804853683:2577], TxId: 281474976710699, task: 2. Ctx: { TraceId : 01jd7xkhhz6dxcd20cqvywmvv7. SessionId : ydb://session/3?node_id=1&id=MzNhOWQ2NTktMmM4MDA4NDktZGNjZjczODUtZmZmY2NjNmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439791711804853673:2577], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T17:51:32.428799Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439791711804853684:2571], TxId: 281474976710700, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODE1OGUzYy0zNjQxOThhMy02ODBhMjA3LTFlMjFhYWZk. TraceId : 01jd7xkhhx9m3keb8r2tky1796. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439791711804853674:2571], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T17:51:32.756382Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439791709950103222:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:32.756586Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000de9/r3tmp/tmpKdTqC4/pdisk_1.dat 2024-11-21T17:51:32.761049Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:32.761274Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:32.762638Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:32.770180Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27475, node 3 2024-11-21T17:51:32.784184Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000de9/r3tmp/yandexzP7HnP.tmp 2024-11-21T17:51:32.784206Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000de9/r3tmp/yandexzP7HnP.tmp 2024-11-21T17:51:32.784277Z node 3 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000de9/r3tmp/yandexzP7HnP.tmp 2024-11-21T17:51:32.784330Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:32.788607Z INFO: TTestServer started on Port 20560 GrpcPort 27475 TClient is connected to server localhost:20560 PQClient connected to localhost:27475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:32.856460Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:32.856494Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:32.858144Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:32.858719Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:32.858739Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:32.859093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:32.859158Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T17:51:32.859427Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:51:32.866652Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:51:33.017346Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791716142056053:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:33.017358Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791716142056028:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:33.017364Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:33.018368Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T17:51:33.022651Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439791716142056057:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T17:51:33.053485Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439791714245071543:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:33.053855Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTY4NjJhZDgtODkxZGEwMzktNjNmYmU5NTctZmM5MjY1Nzc=, ActorId: [3:7439791714245071502:2300], ActorState: ExecuteState, TraceId: 01jd7xkj5s5442tnxpxk4xn5v0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:33.053995Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:33.054799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.103174Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7439791716142056100:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:33.103284Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTM1NWRmM2UtMTQ4NjlhYWItZmIwM2U4M2QtZjJkZTNhOWQ=, ActorId: [4:7439791716142056026:2280], ActorState: ExecuteState, TraceId: 01jd7xkj4s2k2ftdh1vf5en11v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:33.103469Z node 4 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:33.115138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.133416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:51:33.163920Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jd7xkj8t4yhy50tsg2ah83x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzM0ZDkwNDAtODAwOGRmYzMtYzgyMjc0NjYtZDA3YzFmNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [3:7439791714245071964:3011] 2024-11-21T17:51:37.756604Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439791709950103222:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:37.756638Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2024-11-21T17:51:38.230057Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439791735719908969:3307] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2024-11-21T17:51:38.230070Z node 3 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [3:7439791735719908969:3307] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] >> TPartitionTests::ShadowPartitionCountersRestore >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> TestKinesisHttpProxy::DoubleCreateStream >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] >> TestKinesisHttpProxy::TestWrongStream >> SystemView::QueryStats [GOOD] >> SystemView::QueryStatsFields >> TestKinesisHttpProxy::ListShards >> TestKinesisHttpProxy::TestPing >> TStorageTenantTest::CreateSolomonInsideSubDomain >> TestKinesisHttpProxy::CreateDeleteStream >> TStorageTenantTest::GenericCases ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2024-11-21T17:51:36.233278Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791727344663113:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.271831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f91/r3tmp/tmpPOlXD9/pdisk_1.dat 2024-11-21T17:51:36.305484Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:36.331029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.331068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.332564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1882 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:36.344575Z node 1 :TX_PROXY DEBUG: actor# [1:7439791727344663182:2135] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:36.344592Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791727344663551:2381] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:36.344635Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791727344663207:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:36.344644Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439791727344663207:2149], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:51:36.344699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727344663552:2382][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:51:36.345075Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727344662859:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791727344663556:2382] 2024-11-21T17:51:36.345099Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791727344662859:2051] Subscribe: subscriber# [1:7439791727344663556:2382], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:36.345116Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727344662862:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791727344663557:2382] 2024-11-21T17:51:36.345119Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791727344662862:2054] Subscribe: subscriber# [1:7439791727344663557:2382], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:36.345124Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727344662865:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791727344663558:2382] 2024-11-21T17:51:36.345127Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791727344662865:2057] Subscribe: subscriber# [1:7439791727344663558:2382], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:36.345139Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727344663556:2382][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791727344662859:2051] 2024-11-21T17:51:36.345143Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727344663557:2382][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791727344662862:2054] 2024-11-21T17:51:36.345147Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727344663558:2382][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791727344662865:2057] 2024-11-21T17:51:36.345152Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727344663552:2382][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791727344663553:2382] 2024-11-21T17:51:36.345165Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727344663552:2382][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791727344663554:2382] 2024-11-21T17:51:36.345177Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439791727344663552:2382][/dc-1] Set up state: owner# [1:7439791727344663207:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.345205Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727344663552:2382][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791727344663555:2382] 2024-11-21T17:51:36.345212Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439791727344663552:2382][/dc-1] Path was already updated: owner# [1:7439791727344663207:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.345219Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727344663556:2382][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791727344663553:2382], cookie# 1 2024-11-21T17:51:36.345221Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727344663557:2382][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791727344663554:2382], cookie# 1 2024-11-21T17:51:36.345224Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727344663558:2382][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791727344663555:2382], cookie# 1 2024-11-21T17:51:36.345229Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727344662859:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791727344663556:2382] 2024-11-21T17:51:36.345233Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727344662859:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791727344663556:2382], cookie# 1 2024-11-21T17:51:36.345238Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727344662862:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791727344663557:2382] 2024-11-21T17:51:36.345239Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727344662862:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791727344663557:2382], cookie# 1 2024-11-21T17:51:36.345242Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727344662865:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791727344663558:2382] 2024-11-21T17:51:36.345244Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727344662865:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791727344663558:2382], cookie# 1 2024-11-21T17:51:36.348286Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727344663556:2382][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791727344662859:2051], cookie# 1 2024-11-21T17:51:36.348298Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727344663557:2382][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791727344662862:2054], cookie# 1 2024-11-21T17:51:36.348302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791727344663558:2382][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791727344662865:2057], cookie# 1 2024-11-21T17:51:36.348311Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727344663552:2382][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791727344663553:2382], cookie# 1 2024-11-21T17:51:36.348317Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727344663552:2382][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:36.348323Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727344663552:2382][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791727344663554:2382], cookie# 1 2024-11-21T17:51:36.348328Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727344663552:2382][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:36.348334Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727344663552:2382][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791727344663555:2382], cookie# 1 2024-11-21T17:51:36.348337Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791727344663552:2382][/dc-1] Unexpected sync response: sender# [1:7439791727344663555:2382], cookie# 1 2024-11-21T17:51:36.353405Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791727344663207:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:51:36.353463Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791727344663207:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 }: sender# [1:7439791735934598786:2841], cookie# 2 2024-11-21T17:51:38.560874Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791735934598783:2841][/dc-1/USER_0/SimpleTable] Unexpected sync response: sender# [1:7439791735934598786:2841], cookie# 2 2024-11-21T17:51:38.560876Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791727344663207:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 } 2024-11-21T17:51:38.560888Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791727344663207:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439791735934598783:2841] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211498600 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2024-11-21T17:51:38.560905Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439791727344663207:2149], cacheItem# { Subscriber: { Subscriber: [1:7439791735934598783:2841] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211498600 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/SimpleTable TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2024-11-21T17:51:38.560948Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791735934598794:2846], recipient# [1:7439791735934598793:2845], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:51:38.560962Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791735934598793:2845] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T17:51:38.560977Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791735934598793:2845] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2024-11-21T17:51:38.561191Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791735934598793:2845] Handle TEvDescribeSchemeResult Forward to# [1:7439791735934598792:2844] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1732211498600 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1732211498600 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2024-11-21T17:51:38.567847Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727344662859:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439791730237952495:2100] 2024-11-21T17:51:38.567861Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791727344662859:2051] Unsubscribe: subscriber# [3:7439791730237952495:2100], path# /dc-1/USER_0 2024-11-21T17:51:38.567870Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727344662862:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439791730237952496:2100] 2024-11-21T17:51:38.567873Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791727344662862:2054] Unsubscribe: subscriber# [3:7439791730237952496:2100], path# /dc-1/USER_0 2024-11-21T17:51:38.567878Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791727344662865:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439791730237952497:2100] 2024-11-21T17:51:38.567881Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791727344662865:2057] Unsubscribe: subscriber# [3:7439791730237952497:2100], path# /dc-1/USER_0 2024-11-21T17:51:38.567963Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T17:51:38.568132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:38.642603Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439791730237952498:2101], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:38.642656Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791730237952498:2101], cacheItem# { Subscriber: { Subscriber: [3:7439791730237952651:2194] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:38.642683Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791734532920249:2366], recipient# [3:7439791734532920248:2307], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> SystemView::PartitionStatsOneSchemeShardDataQuery [GOOD] >> SystemView::PartitionStatsTtlFields >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2024-11-21T17:51:36.576836Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791726027104513:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.576903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000eb1/r3tmp/tmpvYzpAg/pdisk_1.dat 2024-11-21T17:51:36.650774Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28398 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:36.668683Z node 1 :TX_PROXY DEBUG: actor# [1:7439791726027104582:2135] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:36.668705Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791726027104999:2419] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:36.668748Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791726027104607:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:36.668763Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439791726027104607:2149], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:51:36.668811Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791726027105000:2420][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:51:36.669186Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104260:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791726027105004:2420] 2024-11-21T17:51:36.669189Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104263:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791726027105005:2420] 2024-11-21T17:51:36.669208Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791726027104263:2054] Subscribe: subscriber# [1:7439791726027105005:2420], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:36.669208Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791726027104260:2051] Subscribe: subscriber# [1:7439791726027105004:2420], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:36.669221Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104266:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791726027105006:2420] 2024-11-21T17:51:36.669228Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791726027104266:2057] Subscribe: subscriber# [1:7439791726027105006:2420], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:36.669230Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791726027105004:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791726027104260:2051] 2024-11-21T17:51:36.669234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791726027105005:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791726027104263:2054] 2024-11-21T17:51:36.669235Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104260:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791726027105004:2420] 2024-11-21T17:51:36.669237Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791726027105006:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791726027104266:2057] 2024-11-21T17:51:36.669238Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104263:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791726027105005:2420] 2024-11-21T17:51:36.669241Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104266:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791726027105006:2420] 2024-11-21T17:51:36.669243Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791726027105000:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791726027105001:2420] 2024-11-21T17:51:36.669250Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791726027105000:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791726027105002:2420] 2024-11-21T17:51:36.669260Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439791726027105000:2420][/dc-1] Set up state: owner# [1:7439791726027104607:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.669290Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791726027105000:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791726027105003:2420] 2024-11-21T17:51:36.669301Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439791726027105000:2420][/dc-1] Path was already updated: owner# [1:7439791726027104607:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:36.669310Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791726027105004:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791726027105001:2420], cookie# 1 2024-11-21T17:51:36.669313Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791726027105005:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791726027105002:2420], cookie# 1 2024-11-21T17:51:36.669316Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791726027105006:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791726027105003:2420], cookie# 1 2024-11-21T17:51:36.669321Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104266:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791726027105006:2420], cookie# 1 2024-11-21T17:51:36.669335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791726027105006:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791726027104266:2057], cookie# 1 2024-11-21T17:51:36.669345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791726027105000:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791726027105003:2420], cookie# 1 2024-11-21T17:51:36.669350Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791726027105000:2420][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:36.669354Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104260:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791726027105004:2420], cookie# 1 2024-11-21T17:51:36.669358Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104263:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791726027105005:2420], cookie# 1 2024-11-21T17:51:36.669362Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791726027105004:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791726027104260:2051], cookie# 1 2024-11-21T17:51:36.669370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791726027105005:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791726027104263:2054], cookie# 1 2024-11-21T17:51:36.669373Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791726027105000:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791726027105001:2420], cookie# 1 2024-11-21T17:51:36.669377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791726027105000:2420][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:36.669381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791726027105000:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791726027105002:2420], cookie# 1 2024-11-21T17:51:36.669389Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791726027105000:2420][/dc-1] Unexpected sync response: sender# [1:7439791726027105002:2420], cookie# 1 2024-11-21T17:51:36.673156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.673195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.674753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:36.675706Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791726027104607:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:51:36.675767Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791726027104607:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... 48:2221], cacheItem# { Subscriber: { Subscriber: [2:7439791733959134696:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:38.602062Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7439791733959134696:2297][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7439791733959134348:2221], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:38.602066Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439791733959134707:2298][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7439791726027104260:2051] 2024-11-21T17:51:38.602070Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439791733959134697:2298][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7439791733959134704:2298] 2024-11-21T17:51:38.602071Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439791733959134710:2299], recipient# [2:7439791733959134691:2515], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:38.602074Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7439791733959134697:2298][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7439791733959134348:2221], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:38.602008Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104263:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791733959134708:2298] 2024-11-21T17:51:38.602038Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104266:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791733959134709:2298] 2024-11-21T17:51:38.602042Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104266:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791733959134703:2297] 2024-11-21T17:51:38.602049Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104263:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791733959134702:2297] 2024-11-21T17:51:38.602095Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104260:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791733959134701:2297] 2024-11-21T17:51:38.602098Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104260:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7439791733959134707:2298] 2024-11-21T17:51:38.609513Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104263:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_1 }: sender# [2:7439791733959134357:2224] 2024-11-21T17:51:38.609536Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791726027104263:2054] Unsubscribe: subscriber# [2:7439791733959134357:2224], path# /dc-1/USER_1 2024-11-21T17:51:38.609542Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104263:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439791726504543783:2220] 2024-11-21T17:51:38.609544Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791726027104263:2054] Unsubscribe: subscriber# [3:7439791726504543783:2220], path# /dc-1/USER_0 2024-11-21T17:51:38.609549Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104266:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_1 }: sender# [2:7439791733959134358:2224] 2024-11-21T17:51:38.609549Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104260:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_1 }: sender# [2:7439791733959134356:2224] 2024-11-21T17:51:38.609551Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791726027104266:2057] Unsubscribe: subscriber# [2:7439791733959134358:2224], path# /dc-1/USER_1 2024-11-21T17:51:38.609555Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104266:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439791726504543784:2220] 2024-11-21T17:51:38.609556Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791726027104260:2051] Unsubscribe: subscriber# [2:7439791733959134356:2224], path# /dc-1/USER_1 2024-11-21T17:51:38.609557Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791726027104266:2057] Unsubscribe: subscriber# [3:7439791726504543784:2220], path# /dc-1/USER_0 2024-11-21T17:51:38.609562Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791726027104260:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439791726504543782:2220] 2024-11-21T17:51:38.609565Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791726027104260:2051] Unsubscribe: subscriber# [3:7439791726504543782:2220], path# /dc-1/USER_0 2024-11-21T17:51:38.609610Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2024-11-21T17:51:38.609710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:38.609732Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T17:51:38.609776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:38.610589Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[3:7439791726504543773:2219], Type=268959746 2024-11-21T17:51:38.610603Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[3:7439791726504543773:2219], Type=268959746 2024-11-21T17:51:38.683732Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439791733959134348:2221], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:38.683771Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439791733959134348:2221], cacheItem# { Subscriber: { Subscriber: [2:7439791733959134696:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:38.683778Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439791733959134348:2221], cacheItem# { Subscriber: { Subscriber: [2:7439791733959134697:2298] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:38.683796Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439791733959134783:2334], recipient# [2:7439791733959134782:2531], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:38.742003Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439791726504543789:2223], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:38.742046Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791726504543789:2223], cacheItem# { Subscriber: { Subscriber: [3:7439791730799511198:2287] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:38.742073Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791735094481688:3493], recipient# [3:7439791735094481687:2919], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TestYmqHttpProxy::TestSendMessage >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] Test command err: 2024-11-21T17:51:33.235241Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:33.235265Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:33.238644Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:33.238877Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:51:33.331510Z node 1 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:51:33.468574Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:33.468590Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:33.471302Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:33.471822Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:177:2192] 2024-11-21T17:51:33.471851Z node 2 :PERSQUEUE INFO: new Cookie owner1|1b16fc6-1bedc430-13dc46b7-255d09e9_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send write: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send write: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 2 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 3 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 4 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 5 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 6 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 7 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 8 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 9 Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got write info response. Body keys: 1, head: 10, src id info: 1 2024-11-21T17:51:36.358281Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:36.358309Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:36.361816Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [3:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:36.361996Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [3:177:2192] 2024-11-21T17:51:36.362026Z node 3 :PERSQUEUE INFO: new Cookie owner1|66cc7f75-17f4c217-2214e4e4-a744b97c_0 generated for partition {0, {0, 1111}, 123} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send write: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send write: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 2 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 3 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 4 Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 5 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 6 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 7 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 8 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 9 Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got write info response. Body keys: 1, head: 10, src id info: 1 2024-11-21T17:51:39.202544Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:39.202568Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:39.205476Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [4:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning [GOOD] >> TopicAutoscaling::MidOfRange [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest >> SystemView::QueryStatsFields [GOOD] >> SystemView::QueryStatsAllTables >> SystemView::DescribeAccessDenied [GOOD] >> SystemView::CollectScriptingQueries >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout >> DbCounters::TabletsSimple ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::MidOfRange [GOOD] Test command err: 2024-11-21T17:50:58.157471Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791563338669562:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:58.187808Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:58.188874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e03/r3tmp/tmpU0PYIQ/pdisk_1.dat 2024-11-21T17:50:58.222771Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15972, node 1 2024-11-21T17:50:58.236723Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e03/r3tmp/yandexaantQi.tmp 2024-11-21T17:50:58.236734Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e03/r3tmp/yandexaantQi.tmp 2024-11-21T17:50:58.236797Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e03/r3tmp/yandexaantQi.tmp 2024-11-21T17:50:58.236839Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:50:58.241579Z INFO: TTestServer started on Port 10921 GrpcPort 15972 TClient is connected to server localhost:10921 2024-11-21T17:50:58.254451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:58.254473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:58.255592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:15972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:58.270963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:50:58.279670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:50:58.455253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791563338670160:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.455288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791563338670168:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.455294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.456076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:50:58.456124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791563338670203:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.456164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.457819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791563338670174:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:50:58.490236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.496374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.508502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439791563338670517:2596] 2024-11-21T17:51:03.156755Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791563338669562:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:03.156789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:03.705878Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T17:51:03.709202Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:51:03.709634Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439791584813507311:2763], Recipient [1:7439791563338669811:2193]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.709648Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.709650Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T17:51:03.709656Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439791584813507307:2760], Recipient [1:7439791563338669811:2193]: {TEvModifySchemeTransaction txid# 281474976710674 TabletId# 72057594046644480} 2024-11-21T17:51:03.709657Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:51:03.714659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:03.714733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.714783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T17:51:03.714797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T17:51:03.714802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T17:51:03.714809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T17:51:03.714811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T17:51:03.714819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2024-11-21T17:51:03.714842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710674:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:03.714958Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:51:03.714973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2024-11-21T17:51:03.714981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 5 2024-11-21T17:51:03.715168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710674, response: Status: StatusAccepted TxId: 281474976710674 SchemeshardId: 72057594046644480 PathId: 13, at schemeshard: 72057594046644480 2024-11-21T17:51:03.715195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710674, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/test-topic 2024-11-21T17:51:03.715207Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at table ... -11-21T17:51:40.197205Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:40.197206Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T17:51:40.197208Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715674:0 2024-11-21T17:51:40.197228Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [4:7439791717957608609:2138], Recipient [4:7439791717957608609:2138]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T17:51:40.197230Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T17:51:40.197236Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:40.197237Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715674, path id: [OwnerId: 72057594046644480, LocalPathId: 13] 2024-11-21T17:51:40.197288Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:40.197290Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:7439791717957608767:2225], at schemeshard: 72057594046644480, txId: 281474976715674, path id: 13 2024-11-21T17:51:40.197296Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:40.197299Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715674:0 ProgressState 2024-11-21T17:51:40.197308Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:51:40.197311Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715674:0 progress is 1/1 2024-11-21T17:51:40.197312Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2024-11-21T17:51:40.197316Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715674, ready parts: 1/1, is published: false 2024-11-21T17:51:40.197319Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715674 ready parts: 1/1 2024-11-21T17:51:40.197322Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715674:0 2024-11-21T17:51:40.197324Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715674:0 2024-11-21T17:51:40.197367Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 5 2024-11-21T17:51:40.197371Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715674, publications: 1, subscribers: 1 2024-11-21T17:51:40.197372Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715674, [OwnerId: 72057594046644480, LocalPathId: 13], 3 2024-11-21T17:51:40.197614Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T17:51:40.197643Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [4:7439791717957608767:2225], Recipient [4:7439791717957608609:2138]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 13] Version: 3 } 2024-11-21T17:51:40.197645Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T17:51:40.197657Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 13 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715674 2024-11-21T17:51:40.197664Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 13 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715674 2024-11-21T17:51:40.197666Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715674 2024-11-21T17:51:40.197669Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715674, pathId: [OwnerId: 72057594046644480, LocalPathId: 13], version: 3 2024-11-21T17:51:40.197670Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2024-11-21T17:51:40.197685Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715674, subscribers: 1 2024-11-21T17:51:40.197688Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [4:7439791743727413579:2438] 2024-11-21T17:51:40.197691Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:51:40.197729Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715674 2024-11-21T17:51:40.197730Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T17:51:40.197739Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [4:7439791743727413579:2438] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715674 at schemeshard: 72057594046644480 2024-11-21T17:51:40.197853Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [4:7439791743727413620:2428], Recipient [4:7439791743727413495:2428]: NKikimr::TEvKeyValue::TEvIntermediate 2024-11-21T17:51:40.198485Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [4:7439791743727413595:2825], Recipient [4:7439791717957608609:2138]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:40.198491Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:40.198505Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T17:51:40.198670Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [4:7439791743727413495:2428], Recipient [4:7439791743727413495:2428]: NKikimr::TEvKeyValue::TEvCollect 2024-11-21T17:51:40.198707Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [4:7439791743727413495:2428], Recipient [4:7439791743727413495:2428]: NKikimrClient.TResponse Status: 1 Cookie: 5 WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } 2024-11-21T17:51:40.198709Z node 4 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2024-11-21T17:51:40.198711Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:51:40.198715Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715674, State EXECUTED 2024-11-21T17:51:40.198716Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2024-11-21T17:51:40.198718Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715674, NewState WAIT_RS_ACKS 2024-11-21T17:51:40.198720Z node 4 :PERSQUEUE DEBUG: [TxId: 281474976715674] PredicateAcks: 0/0 2024-11-21T17:51:40.198722Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2024-11-21T17:51:40.198723Z node 4 :PERSQUEUE DEBUG: [TxId: 281474976715674] PredicateAcks: 0/0 2024-11-21T17:51:40.198725Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976715674 to the list for deletion 2024-11-21T17:51:40.198728Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715674, NewState DELETING 2024-11-21T17:51:40.198731Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976715674 2024-11-21T17:51:40.198738Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T17:51:40.198758Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794752, Sender [4:7439791743727413495:2428], Recipient [4:7439791743727413495:2428]: NKikimrClient.TKeyValueRequest Cookie: 5 CmdDeleteRange { Range { From: "tx_00000281474976715674" IncludeFrom: true To: "tx_00000281474976715674" IncludeTo: true } } CmdWrite { Key: "_txinfo" Value: "\020\314\311\347\376\2642\030\232\247\200\200\200\200@(\240\215\0060\314\311\347\376\26428\232\247\200\200\200\200@" } 2024-11-21T17:51:40.199003Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [4:7439791743727413626:2428], Recipient [4:7439791743727413495:2428]: NKikimr::TEvKeyValue::TEvIntermediate 2024-11-21T17:51:40.199067Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [4:7439791743727413625:2443], Recipient [4:7439791743727413495:2428]: NKikimr::TEvKeyValue::TEvCompleteGC 2024-11-21T17:51:40.199322Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [4:7439791743727413495:2428], Recipient [4:7439791743727413495:2428]: NKikimrClient.TResponse Status: 1 Cookie: 5 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2024-11-21T17:51:40.199327Z node 4 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2024-11-21T17:51:40.199329Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:51:40.199334Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715674, State DELETING 2024-11-21T17:51:40.199336Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T17:51:40.199338Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976715674 2024-11-21T17:51:40.199357Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [4:7439791743727413495:2428], Recipient [4:7439791743727413495:2428]: NKikimr::TEvKeyValue::TEvCollect 2024-11-21T17:51:40.199544Z node 4 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [4:7439791743727413629:2444], Recipient [4:7439791743727413495:2428]: NKikimr::TEvKeyValue::TEvCompleteGC 2024-11-21T17:51:40.201491Z node 4 :PQ_READ_PROXY DEBUG: new Describe topic request 2024-11-21T17:51:40.201808Z node 4 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "autoscalit-topic" 2024-11-21T17:51:40.201856Z node 4 :PQ_READ_PROXY DEBUG: Describe topic actor for path autoscalit-topic 2024-11-21T17:51:40.279348Z node 4 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [4:7439791743727413495:2428], Partition 0, Sender [0:0:0], Recipient [4:7439791743727413554:2432], Cookie: 0 2024-11-21T17:51:40.279373Z node 4 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [4:7439791743727413554:2432]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:40.279378Z node 4 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:40.279391Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:40.279416Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:40.279420Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:40.279425Z node 4 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] >> SystemView::PDisksFields [GOOD] >> SystemView::GroupsFields |83.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |83.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> SystemView::CollectScriptingQueries [GOOD] >> SystemView::VSlotsFields [GOOD] >> SystemView::TopPartitionsTables >> TPersQueueMirrorer::TestBasicRemote [GOOD] >> TStorageTenantTest::DeclareAndDefine >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2024-11-21T17:51:39.590513Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791739899990853:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:39.590587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e99/r3tmp/tmpHEb1RP/pdisk_1.dat 2024-11-21T17:51:39.652083Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15092 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:39.683359Z node 1 :TX_PROXY DEBUG: actor# [1:7439791739899990922:2132] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:39.683380Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791739899991349:2419] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:39.683417Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791739899991042:2193], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:39.683425Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439791739899991042:2193], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:51:39.683562Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791739899991350:2420][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:51:39.683899Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791739899990610:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791739899991355:2420] 2024-11-21T17:51:39.683919Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791739899990610:2051] Subscribe: subscriber# [1:7439791739899991355:2420], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:39.683933Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791739899990613:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791739899991356:2420] 2024-11-21T17:51:39.683936Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791739899990613:2054] Subscribe: subscriber# [1:7439791739899991356:2420], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:39.683942Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791739899990616:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791739899991357:2420] 2024-11-21T17:51:39.683964Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791739899990616:2057] Subscribe: subscriber# [1:7439791739899991357:2420], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:39.683973Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791739899991355:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791739899990610:2051] 2024-11-21T17:51:39.683976Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791739899991356:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791739899990613:2054] 2024-11-21T17:51:39.683981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791739899991357:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791739899990616:2057] 2024-11-21T17:51:39.683986Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791739899991350:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791739899991352:2420] 2024-11-21T17:51:39.683991Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791739899991350:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791739899991353:2420] 2024-11-21T17:51:39.684000Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439791739899991350:2420][/dc-1] Set up state: owner# [1:7439791739899991042:2193], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:39.684021Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791739899991350:2420][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791739899991354:2420] 2024-11-21T17:51:39.684026Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439791739899991350:2420][/dc-1] Path was already updated: owner# [1:7439791739899991042:2193], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:39.684032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791739899991355:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791739899991352:2420], cookie# 1 2024-11-21T17:51:39.684034Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791739899991356:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791739899991353:2420], cookie# 1 2024-11-21T17:51:39.684036Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791739899991357:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791739899991354:2420], cookie# 1 2024-11-21T17:51:39.684093Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791739899990613:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791739899991356:2420] 2024-11-21T17:51:39.684096Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791739899990613:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791739899991356:2420], cookie# 1 2024-11-21T17:51:39.684099Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791739899990616:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791739899991357:2420] 2024-11-21T17:51:39.684101Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791739899990616:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791739899991357:2420], cookie# 1 2024-11-21T17:51:39.684320Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791739899990610:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791739899991355:2420] 2024-11-21T17:51:39.684326Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791739899990610:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791739899991355:2420], cookie# 1 2024-11-21T17:51:39.684352Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791739899991356:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791739899990613:2054], cookie# 1 2024-11-21T17:51:39.684356Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791739899991357:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791739899990616:2057], cookie# 1 2024-11-21T17:51:39.684359Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791739899991355:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791739899990610:2051], cookie# 1 2024-11-21T17:51:39.684364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791739899991350:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791739899991353:2420], cookie# 1 2024-11-21T17:51:39.684369Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791739899991350:2420][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:39.684373Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791739899991350:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791739899991354:2420], cookie# 1 2024-11-21T17:51:39.684377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791739899991350:2420][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:39.684380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791739899991350:2420][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791739899991352:2420], cookie# 1 2024-11-21T17:51:39.684383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791739899991350:2420][/dc-1] Unexpected sync response: sender# [1:7439791739899991352:2420], cookie# 1 2024-11-21T17:51:39.691956Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791739899991042:2193], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:51:39.692077Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791739899991042:2193], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Version: 18446744073709551615 }: sender# [1:7439791739899991292:2373], cookie# 281474976710661 2024-11-21T17:51:40.796751Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:7439791739899991291:2372] Ack for unknown update (already acked?): sender# [1:7439791739899991292:2373], cookie# 281474976710661 2024-11-21T17:51:40.796752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:51:40.796785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710661 2024-11-21T17:51:40.796795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710661 2024-11-21T17:51:40.796798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710661 2024-11-21T17:51:40.796800Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:51:40.796802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 7 2024-11-21T17:51:40.796816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710661 2024-11-21T17:51:40.796824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710661 2024-11-21T17:51:40.796826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710661 2024-11-21T17:51:40.796828Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:51:40.796829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2024-11-21T17:51:40.796834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710661, subscribers: 1 2024-11-21T17:51:40.796841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7439791744194959183:2286] 2024-11-21T17:51:40.796878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:51:40.796884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:51:40.796886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:51:40.796887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:51:40.796889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:51:40.796890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:51:40.796892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:51:40.796894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2024-11-21T17:51:40.797107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2024-11-21T17:51:40.797119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2024-11-21T17:51:40.797123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2024-11-21T17:51:40.798171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2024-11-21T17:51:40.798235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2024-11-21T17:51:40.798280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2024-11-21T17:51:40.798303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2024-11-21T17:51:40.798321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2024-11-21T17:51:40.798340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2024-11-21T17:51:40.798358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:40.798376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T17:51:40.798395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2024-11-21T17:51:40.798414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2024-11-21T17:51:40.798430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2024-11-21T17:51:40.798449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T17:51:40.798465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2024-11-21T17:51:40.798483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2024-11-21T17:51:40.798523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2024-11-21T17:51:40.798543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2024-11-21T17:51:40.798562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T17:51:40.798569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2024-11-21T17:51:40.798578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T17:51:40.798597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T17:51:40.798605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T17:51:40.798627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T17:51:40.799079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2024-11-21T17:51:40.799090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2024-11-21T17:51:40.799099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2024-11-21T17:51:40.799100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2024-11-21T17:51:40.799165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2024-11-21T17:51:40.799171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2024-11-21T17:51:40.799174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2024-11-21T17:51:40.799176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2024-11-21T17:51:40.799239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2024-11-21T17:51:40.799250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2024-11-21T17:51:40.799261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2024-11-21T17:51:40.799267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2024-11-21T17:51:40.799271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2024-11-21T17:51:40.799273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2024-11-21T17:51:40.799277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2024-11-21T17:51:40.799280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2024-11-21T17:51:40.799287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2024-11-21T17:51:40.799297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:51:40.799313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T17:51:40.799322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T17:51:40.799334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:51:40.799705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] Test command err: 2024-11-21T17:50:45.627044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:50:45.627629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:50:45.627666Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00151c/r3tmp/tmpn1VqP1/pdisk_1.dat 2024-11-21T17:50:45.723983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:50:45.741727Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:45.783948Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:50:45.784206Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:50:45.784268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:45.784290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:45.794816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:45.898100Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:50:45.898124Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:50:45.898152Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T17:50:45.904088Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:50:45.904339Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:50:45.904354Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:50:45.904396Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:50:45.904435Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:50:45.904449Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:50:45.904524Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:50:45.904882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:50:45.905118Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:50:45.905131Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:50:45.918314Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:50:45.918477Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:50:45.918541Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:50:45.918586Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:45.923662Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:50:45.923788Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:45.923806Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:50:45.923916Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:50:45.923922Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:50:45.923927Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:50:45.923962Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:50:45.926637Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:50:45.926691Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:50:45.926709Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:50:45.926713Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:50:45.926717Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:50:45.926721Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:50:45.926816Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:45.926822Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:50:45.926933Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:50:45.926946Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:50:45.926955Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:45.926958Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:50:45.926963Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:50:45.926968Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:50:45.926972Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:50:45.926977Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:50:45.926981Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:50:45.926984Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:50:45.926988Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:50:45.926991Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:50:45.927004Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:50:45.927007Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:50:45.927028Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:50:45.927076Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:50:45.927084Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:50:45.927098Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:50:45.927105Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:50:45.927107Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:50:45.927111Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:50:45.927113Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:50:45.927147Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:50:45.927149Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:50:45.927151Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:50:45.927153Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:45.927160Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:50:45.927175Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:50:45.927177Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:50:45.927179Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:50:45.927183Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:50:45.927355Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:50:45.927360Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:50:45.937675Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:50:45.937709Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:50:45.937716Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:50:45.937728Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T17:51:39.947816Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [6:639:2542]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T17:51:39.947824Z node 6 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2024-11-21T17:51:39.947837Z node 6 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037888, for tableId 8: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T17:51:39.947857Z node 6 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 8 2024-11-21T17:51:39.947866Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [6:642:2544]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T17:51:39.947870Z node 6 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037889 2024-11-21T17:51:39.947878Z node 6 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037889, for tableId 8: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T17:51:39.947889Z node 6 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 8 2024-11-21T17:51:40.201933Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [6:788:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2024-11-21T17:51:40.201965Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2024-11-21T17:51:40.201996Z node 6 :TX_DATASHARD TRACE: No cleanup at 72075186224037890 outdated step 62000 last cleanup 0 2024-11-21T17:51:40.202012Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:40.202021Z node 6 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2024-11-21T17:51:40.202028Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T17:51:40.202032Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T17:51:40.202075Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [6:791:2645]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T17:51:40.202086Z node 6 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037891 2024-11-21T17:51:40.202107Z node 6 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037891, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T17:51:40.202135Z node 6 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037891, FollowerId 0, tableId 3 2024-11-21T17:51:40.202420Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [6:788:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T17:51:40.202431Z node 6 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037890 2024-11-21T17:51:40.202447Z node 6 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037890, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T17:51:40.202463Z node 6 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037890, FollowerId 0, tableId 3 2024-11-21T17:51:40.223602Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [6:791:2645]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2024-11-21T17:51:40.223635Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2024-11-21T17:51:40.223665Z node 6 :TX_DATASHARD TRACE: No cleanup at 72075186224037891 outdated step 62000 last cleanup 0 2024-11-21T17:51:40.223684Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:40.223694Z node 6 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037891 2024-11-21T17:51:40.223699Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037891 has no attached operations 2024-11-21T17:51:40.223702Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037891 2024-11-21T17:51:41.186427Z node 6 :TX_PROXY DEBUG: actor# [6:52:2099] Handle TEvProposeTransaction 2024-11-21T17:51:41.186459Z node 6 :TX_PROXY DEBUG: actor# [6:52:2099] TxId# 281474976715669 ProcessProposeTransaction 2024-11-21T17:51:41.186481Z node 6 :TX_PROXY DEBUG: actor# [6:52:2099] Cookie# 0 userReqId# "" txid# 281474976715669 SEND to# [6:1499:3277] DataReq marker# P0 2024-11-21T17:51:41.186521Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] Cookie# 0 txid# 281474976715669 HANDLE TDataReq marker# P1 2024-11-21T17:51:41.186633Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2024-11-21T17:51:41.186685Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2024-11-21T17:51:41.186719Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 2 followers disallowed marker# P4b 2024-11-21T17:51:41.186737Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 SEND TEvProposeTransaction to datashard 72075186224037889 with read table request affected shards 2 followers disallowed marker# P4b 2024-11-21T17:51:41.186842Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [6:1499:3277], Recipient [6:639:2542]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1499 RawX2: 25769807053 } TxBody: " \0018\001BC\n\014\010\200\202\224\204\200\200\200\200\001\020\010\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001(\210\'0\217\247\200\200\200\200@H\001R\022\t\333\005\000\000\000\000\000\000\021\315\014\000\000\006\000\000\000" TxId: 281474976715669 ExecLevel: 0 Flags: 8 2024-11-21T17:51:41.186853Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:51:41.186904Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:41.186974Z node 6 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:8:0] 2024-11-21T17:51:41.187023Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit CheckDataTx 2024-11-21T17:51:41.187055Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2024-11-21T17:51:41.187061Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T17:51:41.187066Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:51:41.187071Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit FinishPropose 2024-11-21T17:51:41.187090Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is DelayComplete 2024-11-21T17:51:41.187094Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:51:41.187101Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:281474976715669] at 72075186224037888 has finished 2024-11-21T17:51:41.187130Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:41.187137Z node 6 :TX_DATASHARD TRACE: Complete execution for [0:281474976715669] at 72075186224037888 on unit FinishPropose 2024-11-21T17:51:41.187148Z node 6 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715669 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: BAD_REQUEST 2024-11-21T17:51:41.187159Z node 6 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715669 at tablet 72075186224037888 status: BAD_REQUEST errors: SNAPSHOT_NOT_EXIST (Shard 72075186224037888 has no snapshot { table 72057594046644480:8 version 5000/281474976715663 }) | 2024-11-21T17:51:41.187206Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:41.187311Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# BAD_REQUEST shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2024-11-21T17:51:41.187337Z node 6 :TX_PROXY ERROR: Actor# [6:1499:3277] txid# 281474976715669 RESPONSE Status# WrongRequest marker# P13c 2024-11-21T17:51:41.187410Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [6:1499:3277], Recipient [6:642:2544]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1499 RawX2: 25769807053 } TxBody: " \0018\001BC\n\014\010\200\202\224\204\200\200\200\200\001\020\010\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001(\210\'0\217\247\200\200\200\200@H\001R\022\t\333\005\000\000\000\000\000\000\021\315\014\000\000\006\000\000\000" TxId: 281474976715669 ExecLevel: 0 Flags: 8 2024-11-21T17:51:41.187419Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:51:41.187452Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T17:51:41.187486Z node 6 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:8:0] 2024-11-21T17:51:41.187502Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037889 on unit CheckDataTx 2024-11-21T17:51:41.187518Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037889 is Executed 2024-11-21T17:51:41.187523Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037889 executing on unit CheckDataTx 2024-11-21T17:51:41.187529Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037889 to execution unit FinishPropose 2024-11-21T17:51:41.187535Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037889 on unit FinishPropose 2024-11-21T17:51:41.187545Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037889 is DelayComplete 2024-11-21T17:51:41.187551Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037889 executing on unit FinishPropose 2024-11-21T17:51:41.187556Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:281474976715669] at 72075186224037889 has finished 2024-11-21T17:51:41.187570Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T17:51:41.187576Z node 6 :TX_DATASHARD TRACE: Complete execution for [0:281474976715669] at 72075186224037889 on unit FinishPropose 2024-11-21T17:51:41.187582Z node 6 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715669 at tablet 72075186224037889 send to client, exec latency: 0 ms, propose latency: 0 ms, status: BAD_REQUEST 2024-11-21T17:51:41.187591Z node 6 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715669 at tablet 72075186224037889 status: BAD_REQUEST errors: SNAPSHOT_NOT_EXIST (Shard 72075186224037889 has no snapshot { table 72057594046644480:8 version 5000/281474976715663 }) | 2024-11-21T17:51:41.187607Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 >> TestYmqHttpProxy::TestGetQueueAttributes >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TestKinesisHttpProxy::TestWrongStream [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::CollectScriptingQueries [GOOD] Test command err: 2024-11-21T17:51:36.171787Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791726788961514:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.172390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001966/r3tmp/tmpKoUHFW/pdisk_1.dat 2024-11-21T17:51:36.225713Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27742, node 1 2024-11-21T17:51:36.244384Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:36.244395Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:36.244397Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:36.244431Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:36.271734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.271759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:14852 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:51:36.304451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:36.320722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.327039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.471149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962485:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962548:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962550:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962551:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962552:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962553:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962560:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962561:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962563:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962564:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962549:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962557:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962558:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962559:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962566:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962567:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962569:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962570:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962554:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962555:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.471660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726788962556:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.473127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:51:36.492346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791726788962625:2418], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:51:36.492364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791726788962601:2406], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:51:36.492370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791726788962608:2413], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:51:36.492376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791726788962600:2405], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:51:36.492381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791726788962609:2414], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:51:36.492387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791726788962602:2407], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:51:36.492392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791726788962594:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transa ... 6:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:39.310045Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439791739595565255:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:39.310057Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:39.310696Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:3, at schemeshard: 72057594046644480 2024-11-21T17:51:39.321091Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439791739595565269:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } 2024-11-21T17:51:39.406179Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xkr9dbt46jj5mv9jbk13b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=YTM2ZWFiNTktYmUwNjdkNDYtYTM1MzI1MDQtMTNiMGI1ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:39.414688Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:39.527271Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xkrfn42et2smzav02fv3t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=YTM2ZWFiNTktYmUwNjdkNDYtYTM1MzI1MDQtMTNiMGI1ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:39.537579Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2024-11-21T17:51:39.660855Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd7xkrkrdwmd3arq82kbye3n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=YTM2ZWFiNTktYmUwNjdkNDYtYTM1MzI1MDQtMTNiMGI1ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:39.689788Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root
: Error: Access denied 2024-11-21T17:51:39.697167Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1
: Error: Access denied 2024-11-21T17:51:39.698296Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/.sys
: Error: Access denied 2024-11-21T17:51:39.772455Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys
: Error: Access denied 2024-11-21T17:51:39.788889Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/.sys/partition_stats
: Error: Access denied 2024-11-21T17:51:39.804790Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys/partition_stats
: Error: Access denied 2024-11-21T17:51:39.815579Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2024-11-21T17:51:39.815733Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:39.815769Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2024-11-21T17:51:39.815813Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:39.815831Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 13 2024-11-21T17:51:39.815854Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:39.815876Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 14 2024-11-21T17:51:39.815903Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:40.078399Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:40.077006Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:40.915918Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7439791745208817410:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:40.916125Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001966/r3tmp/tmpmN2AbD/pdisk_1.dat 2024-11-21T17:51:40.942958Z node 16 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8154, node 16 2024-11-21T17:51:40.980439Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:40.980452Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:40.980455Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:40.980492Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:41.030936Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:41.031732Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:41.031760Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:41.032970Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:41.041226Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:41.260992Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439791749503785397:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:41.261006Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439791749503785389:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:41.261019Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:41.261931Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:41.264494Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7439791749503785403:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:41.344470Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xkt6c04df3fn3nwcmstrx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YmI2ODY1YzYtODRiMmQ4NjQtY2I2MTZiM2MtZTJkMzQwMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:41.369988Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xkt992yh6yhcrzyvxqjbe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=M2FkNDdkNDMtYTdjNjMyYzgtNGZlZjk5ZmEtZDQwOTIyNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:41.371638Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211501412, txId: 281474976715662] shutting down 2024-11-21T17:51:41.393757Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xkt9y9shpwazwxrxwvbt6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=OGU4MGJjYTktMjk1YjYxNWUtYTc5ZGI5NDItMjg5NDZhZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:41.394612Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439791749503785572:2335], owner: [16:7439791749503785569:2333], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T17:51:41.399876Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439791749503785572:2335], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:41.400064Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439791749503785572:2335], row count: 2, finished: 1 2024-11-21T17:51:41.400078Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439791749503785572:2335], owner: [16:7439791749503785569:2333], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T17:51:41.401302Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211501393, txId: 281474976715664] shutting down >> TestKinesisHttpProxy::TestPing [GOOD] >> TestKinesisHttpProxy::TestConsumersEmptyNames >> TestKinesisHttpProxy::TestWrongStream2 >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> TestKinesisHttpProxy::ListShards [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecords >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> TestKinesisHttpProxy::TestRequestBadJson >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::TestBasicRemote [GOOD] Test command err: 2024-11-21T17:50:57.786527Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791560885217413:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:57.786546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:50:57.812764Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e2a/r3tmp/tmpvrrCgu/pdisk_1.dat 2024-11-21T17:50:57.839642Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12299, node 1 2024-11-21T17:50:57.850192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e2a/r3tmp/yandexxc9sxa.tmp 2024-11-21T17:50:57.850205Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e2a/r3tmp/yandexxc9sxa.tmp 2024-11-21T17:50:57.850256Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e2a/r3tmp/yandexxc9sxa.tmp 2024-11-21T17:50:57.850292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:50:57.854481Z INFO: TTestServer started on Port 29532 GrpcPort 12299 TClient is connected to server localhost:29532 PQClient connected to localhost:12299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:57.887702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:57.887730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:57.888814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:57.921174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:57.926535Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:57.936683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:50:58.016045Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:58.123613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791565180185468:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.123630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791565180185476:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.123636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.124439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791565180185509:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.124455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.124522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:50:58.128426Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2024-11-21T17:50:58.128492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791565180185482:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:50:58.151763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.162274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.197985Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791565180185671:2329], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:50:58.198374Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTI3NTM3ODUtYmZhNzcxZWEtNGFhMDQwMWYtNTgyNzg5Y2I=, ActorId: [1:7439791565180185464:2304], ActorState: ExecuteState, TraceId: 01jd7xjg2a5z727vs1xdgmgp4w, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:50:58.198842Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:50:58.228368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439791565180185825:2596] 2024-11-21T17:51:02.786860Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791560885217413:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:02.786926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:03.361408Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T17:51:03.364434Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:51:03.364848Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439791586655022601:2760], Recipient [1:7439791560885217832:2197]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.364858Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.364860Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T17:51:03.364879Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439791586655022597:2757], Recipient [1:7439791560885217832:2197]: {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2024-11-21T17:51:03.364885Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:51:03.370218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 10 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976715673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:03.370309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.370375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T17:51:03.370391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T17:51:03.370400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T17:51:03.370405Z ... rted " } 2024-11-21T17:51:41.315620Z node 5 :PQ_MIRRORER ERROR: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [ce79609e-c89df8b4-4e063b59-876b27f0] [] Got error. Status: CLIENT_CANCELLED. Description:
: Error: GRpc error: (1): Cancelled on the server side 2024-11-21T17:51:41.315773Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [ce79609e-c89df8b4-4e063b59-876b27f0] [] In Reconnect, ReadSizeBudget = 0, ReadSizeServerDelta = 8388608 2024-11-21T17:51:41.315784Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [ce79609e-c89df8b4-4e063b59-876b27f0] [] New values: ReadSizeBudget = 8388608, ReadSizeServerDelta = 0 2024-11-21T17:51:41.315809Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [ce79609e-c89df8b4-4e063b59-876b27f0] [] Closing session to cluster: SessionClosed { Status: CLIENT_CANCELLED Issues: "
: Error: GRpc error: (1): Cancelled on the server side " } 2024-11-21T17:51:41.315874Z node 5 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [ce79609e-c89df8b4-4e063b59-876b27f0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:51:41.315880Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [ce79609e-c89df8b4-4e063b59-876b27f0] [] Abort session to cluster 2024-11-21T17:51:41.315926Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [ce79609e-c89df8b4-4e063b59-876b27f0] Closing read session. Close timeout: 0.000000s 2024-11-21T17:51:41.315941Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic2:0:1:15:16 2024-11-21T17:51:41.315953Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [ce79609e-c89df8b4-4e063b59-876b27f0] Counters: { Errors: 1 CurrentSessionLifetimeMs: 1168 BytesRead: 530 MessagesRead: 16 BytesReadCompressed: 530 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:51:41.315959Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [ce79609e-c89df8b4-4e063b59-876b27f0] Closing read session. Close timeout: 0.000000s 2024-11-21T17:51:41.315961Z node 5 :PQ_MIRRORER ERROR: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [3129810e-ef226c2e-276ad7bd-a704f42a] [] Got error. Status: CLIENT_CANCELLED. Description:
: Error: GRpc error: (1): Cancelled on the server side 2024-11-21T17:51:41.315963Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic2:0:1:15:16 2024-11-21T17:51:41.315965Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [ce79609e-c89df8b4-4e063b59-876b27f0] Counters: { Errors: 1 CurrentSessionLifetimeMs: 1168 BytesRead: 530 MessagesRead: 16 BytesReadCompressed: 530 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:51:41.315972Z node 5 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [ce79609e-c89df8b4-4e063b59-876b27f0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:51:41.315990Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [3129810e-ef226c2e-276ad7bd-a704f42a] [] In Reconnect, ReadSizeBudget = 0, ReadSizeServerDelta = 8388608 2024-11-21T17:51:41.315993Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [3129810e-ef226c2e-276ad7bd-a704f42a] [] New values: ReadSizeBudget = 8388608, ReadSizeServerDelta = 0 2024-11-21T17:51:41.315997Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [3129810e-ef226c2e-276ad7bd-a704f42a] [] Closing session to cluster: SessionClosed { Status: CLIENT_CANCELLED Issues: "
: Error: GRpc error: (1): Cancelled on the server side " } 2024-11-21T17:51:41.315891Z node 6 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0] got next reader event: 1 2024-11-21T17:51:41.316366Z node 6 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0] schedule consumer creation 2024-11-21T17:51:41.316385Z node 6 :PQ_MIRRORER ERROR: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0]: read session closed: SessionClosed { Status: CLIENT_CANCELLED Issues: "
: Error: GRpc error: (1): Cancelled on the server side " } 2024-11-21T17:51:41.316404Z node 5 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [3129810e-ef226c2e-276ad7bd-a704f42a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:51:41.316415Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [3129810e-ef226c2e-276ad7bd-a704f42a] [] Abort session to cluster 2024-11-21T17:51:41.316441Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [3129810e-ef226c2e-276ad7bd-a704f42a] Closing read session. Close timeout: 0.000000s 2024-11-21T17:51:41.316419Z node 6 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1] got next reader event: 1 2024-11-21T17:51:41.316455Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic2:1:1:11:12 2024-11-21T17:51:41.316461Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [3129810e-ef226c2e-276ad7bd-a704f42a] Counters: { Errors: 1 CurrentSessionLifetimeMs: 1169 BytesRead: 255 MessagesRead: 12 BytesReadCompressed: 255 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:51:41.316465Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [3129810e-ef226c2e-276ad7bd-a704f42a] Closing read session. Close timeout: 0.000000s 2024-11-21T17:51:41.316467Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic2:1:1:11:12 2024-11-21T17:51:41.316469Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [3129810e-ef226c2e-276ad7bd-a704f42a] Counters: { Errors: 1 CurrentSessionLifetimeMs: 1169 BytesRead: 255 MessagesRead: 12 BytesReadCompressed: 255 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:51:41.316475Z node 5 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [3129810e-ef226c2e-276ad7bd-a704f42a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:51:41.316499Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/some_user session shared/some_user_5_1_12310726138902353309_v1 grpc read done: success# 0, data# { } 2024-11-21T17:51:41.316499Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/some_user session shared/some_user_5_2_16829681504271119582_v1 grpc read done: success# 0, data# { } 2024-11-21T17:51:41.316504Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/some_user session shared/some_user_5_1_12310726138902353309_v1 grpc read failed 2024-11-21T17:51:41.316504Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/some_user session shared/some_user_5_2_16829681504271119582_v1 grpc read failed 2024-11-21T17:51:41.316512Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/some_user session shared/some_user_5_2_16829681504271119582_v1 grpc closed 2024-11-21T17:51:41.316512Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/some_user session shared/some_user_5_1_12310726138902353309_v1 grpc closed 2024-11-21T17:51:41.316523Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/some_user session shared/some_user_5_2_16829681504271119582_v1 is DEAD 2024-11-21T17:51:41.316523Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/some_user session shared/some_user_5_1_12310726138902353309_v1 is DEAD 2024-11-21T17:51:41.316663Z node 6 :PQ_MIRRORER ERROR: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1]: read session closed: SessionClosed { Status: CLIENT_CANCELLED Issues: "
: Error: GRpc error: (1): Cancelled on the server side " } 2024-11-21T17:51:41.316728Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic2] pipe [5:7439791744387837494:2503] disconnected; active server actors: 1 2024-11-21T17:51:41.316738Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic2] pipe [5:7439791744387837494:2503] client some_user disconnected session shared/some_user_5_2_16829681504271119582_v1 2024-11-21T17:51:41.316755Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic2] consumer some_user rebalancing was scheduled 2024-11-21T17:51:41.316768Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic2] pipe [5:7439791744387837492:2502] disconnected; active server actors: 1 2024-11-21T17:51:41.316775Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic2] pipe [5:7439791744387837492:2502] client some_user disconnected session shared/some_user_5_1_12310726138902353309_v1 2024-11-21T17:51:41.316861Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:41.316879Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/some_user_5_1_12310726138902353309_v1 2024-11-21T17:51:41.316887Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439791744387837498:2510] destroyed 2024-11-21T17:51:41.316892Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:41.316895Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/some_user_5_2_16829681504271119582_v1 2024-11-21T17:51:41.316898Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439791744387837500:2511] destroyed 2024-11-21T17:51:41.316916Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/some_user_5_1_12310726138902353309_v1 2024-11-21T17:51:41.316919Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/some_user_5_2_16829681504271119582_v1 2024-11-21T17:51:41.318637Z node 6 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1] schedule consumer creation >> TestKinesisHttpProxy::ListShardsEmptyFields >> TestYmqHttpProxy::TestSendMessage [GOOD] >> TStorageTenantTest::DeclareAndDefine [GOOD] |83.1%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl >> TStorageTenantTest::GenericCases [GOOD] |83.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> HttpRequest::Status [GOOD] |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] >> TPersQueueMirrorer::ValidStartStream |83.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2024-11-21T17:51:41.725808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791747008847190:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:41.726105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e68/r3tmp/tmp6njlZ6/pdisk_1.dat 2024-11-21T17:51:41.779242Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10215 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:41.796550Z node 1 :TX_PROXY DEBUG: actor# [1:7439791747008847410:2136] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:41.796578Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791747008847782:2389] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:41.796629Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791747008847512:2191], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:41.796650Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439791747008847512:2191], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:51:41.796707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791747008847783:2390][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:51:41.797096Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791747008847083:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791747008847789:2390] 2024-11-21T17:51:41.797102Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791747008847086:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791747008847790:2390] 2024-11-21T17:51:41.797120Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791747008847086:2053] Subscribe: subscriber# [1:7439791747008847790:2390], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:41.797120Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791747008847083:2050] Subscribe: subscriber# [1:7439791747008847789:2390], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:41.797133Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791747008847089:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791747008847791:2390] 2024-11-21T17:51:41.797141Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791747008847089:2056] Subscribe: subscriber# [1:7439791747008847791:2390], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:41.797154Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791747008847790:2390][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791747008847086:2053] 2024-11-21T17:51:41.797165Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791747008847789:2390][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791747008847083:2050] 2024-11-21T17:51:41.797169Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791747008847791:2390][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791747008847089:2056] 2024-11-21T17:51:41.797176Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791747008847783:2390][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791747008847786:2390] 2024-11-21T17:51:41.797178Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791747008847086:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791747008847790:2390] 2024-11-21T17:51:41.797187Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791747008847783:2390][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791747008847785:2390] 2024-11-21T17:51:41.797194Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791747008847083:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791747008847789:2390] 2024-11-21T17:51:41.797199Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439791747008847783:2390][/dc-1] Set up state: owner# [1:7439791747008847512:2191], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:41.797200Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791747008847089:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791747008847791:2390] 2024-11-21T17:51:41.797229Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791747008847783:2390][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791747008847787:2390] 2024-11-21T17:51:41.797241Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439791747008847783:2390][/dc-1] Path was already updated: owner# [1:7439791747008847512:2191], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:41.797250Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791747008847789:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791747008847785:2390], cookie# 1 2024-11-21T17:51:41.797258Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791747008847790:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791747008847786:2390], cookie# 1 2024-11-21T17:51:41.797261Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791747008847791:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791747008847787:2390], cookie# 1 2024-11-21T17:51:41.797269Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791747008847083:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791747008847789:2390], cookie# 1 2024-11-21T17:51:41.797279Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791747008847086:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791747008847790:2390], cookie# 1 2024-11-21T17:51:41.797286Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791747008847089:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791747008847791:2390], cookie# 1 2024-11-21T17:51:41.797347Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791747008847789:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791747008847083:2050], cookie# 1 2024-11-21T17:51:41.797355Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791747008847790:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791747008847086:2053], cookie# 1 2024-11-21T17:51:41.797358Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791747008847791:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791747008847089:2056], cookie# 1 2024-11-21T17:51:41.797363Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791747008847783:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791747008847785:2390], cookie# 1 2024-11-21T17:51:41.797369Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791747008847783:2390][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:41.797372Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791747008847783:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791747008847786:2390], cookie# 1 2024-11-21T17:51:41.797376Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791747008847783:2390][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:41.797381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791747008847783:2390][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791747008847787:2390], cookie# 1 2024-11-21T17:51:41.797383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791747008847783:2390][/dc-1] Unexpected sync response: sender# [1:7439791747008847787:2390], cookie# 1 2024-11-21T17:51:41.803436Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791747008847512:2191], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:51:41.803521Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791747008847512:2191], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { T ... 41495Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791747008847510:2268], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/dir/table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/dir/table2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:42.241529Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439791747008847510:2268], cacheItem# { Subscriber: { Subscriber: [1:7439791751303815802:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211502050 PathId: [OwnerId: 72057594046644480, LocalPathId: 4] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/dir/table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:42.241558Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439791747008847510:2268], cacheItem# { Subscriber: { Subscriber: [1:7439791751303815818:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211502200 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/dir/table2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:42.241622Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791751303815834:2299], recipient# [1:7439791751303815833:2298], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/dir/table1 TableId: [72057594046644480:4:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } },{ Path: dc-1/USER_0/dir/table2 TableId: [72057594046644480:5:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:51:42.242029Z node 1 :TX_PROXY DEBUG: actor# [1:7439791747008847410:2136] Handle TEvProposeTransaction 2024-11-21T17:51:42.242046Z node 1 :TX_PROXY DEBUG: actor# [1:7439791747008847410:2136] TxId# 281474976710665 ProcessProposeTransaction 2024-11-21T17:51:42.242057Z node 1 :TX_PROXY DEBUG: actor# [1:7439791747008847410:2136] Cookie# 0 userReqId# "" txid# 281474976710665 SEND to# [1:7439791751303815835:2967] DataReq marker# P0 2024-11-21T17:51:42.242075Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] Cookie# 0 txid# 281474976710665 HANDLE TDataReq marker# P1 2024-11-21T17:51:42.242154Z node 1 :TX_PROXY DEBUG: Actor [1:7439791751303815835:2967] txid 281474976710665 disallow followers cause of operation 2 read target mode 0 2024-11-21T17:51:42.242161Z node 1 :TX_PROXY DEBUG: Actor [1:7439791751303815835:2967] txid 281474976710665 disallow followers cause of operation 2 read target mode 0 2024-11-21T17:51:42.242167Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] txid# 281474976710665 SEND to# [1:7439791747008847512:2191] TSchemeCache with 2 scheme entries. DataReq marker# P2 2024-11-21T17:51:42.242199Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [1:7439791747008847512:2191], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 5] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo Point: (Uint64 : 42) },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 4] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo Point: (Uint64 : 42) }] } 2024-11-21T17:51:42.242216Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [1:7439791747008847512:2191], cacheItem# { Subscriber: { Subscriber: [1:7439791751303815790:2953] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211502200 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 5] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:42.242233Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [1:7439791747008847512:2191], cacheItem# { Subscriber: { Subscriber: [1:7439791751303815675:2853] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211502050 PathId: [OwnerId: 72057594046644480, LocalPathId: 4] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 4] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:42.242275Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791751303815837:2969], recipient# [1:7439791751303815835:2967], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 5] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } Point: (Uint64 : 42) },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 4] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } Point: (Uint64 : 42) }] } 2024-11-21T17:51:42.242287Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] txid# 281474976710665 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2024-11-21T17:51:42.242418Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] txid# 281474976710665 SEND TEvProposeTransaction to datashard 72075186224037892 with 327 bytes program affected shards 2 followers disallowed marker# P4 2024-11-21T17:51:42.242447Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] txid# 281474976710665 SEND TEvProposeTransaction to datashard 72075186224037894 with 327 bytes program affected shards 2 followers disallowed marker# P4 2024-11-21T17:51:42.244923Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037892 read size 0 out readset size 0 marker# P6 2024-11-21T17:51:42.244940Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037894 read size 0 out readset size 0 marker# P6 2024-11-21T17:51:42.244947Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] txid# 281474976710665 SEND EvProposeTransaction to# 72075186224037889 Coordinator marker# P7 2024-11-21T17:51:42.245517Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] txid# 281474976710665 HANDLE TEvProposeTransactionStatus TDataReq marker# P11 Status# 16 2024-11-21T17:51:42.253398Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] txid# 281474976710665 HANDLE TEvProposeTransactionStatus TDataReq marker# P10 Status# 17 2024-11-21T17:51:42.254848Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037894 marker# P12 2024-11-21T17:51:42.255869Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2024-11-21T17:51:42.256009Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791751303815835:2967] txid# 281474976710665 MergeResult ExecComplete TDataReq marker# P17 2024-11-21T17:51:42.256049Z node 1 :TX_PROXY INFO: Actor# [1:7439791751303815835:2967] txid# 281474976710665 RESPONSE Status# ExecComplete prepare time: 0.002873s execute time: 0.011098s total time: 0.013971s marker# P13 2024-11-21T17:51:42.262614Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791747008847083:2050] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7439791749139422217:2098] 2024-11-21T17:51:42.262635Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791747008847083:2050] Unsubscribe: subscriber# [2:7439791749139422217:2098], path# /dc-1/USER_0 2024-11-21T17:51:42.262643Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791747008847086:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7439791749139422218:2098] 2024-11-21T17:51:42.262646Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791747008847086:2053] Unsubscribe: subscriber# [2:7439791749139422218:2098], path# /dc-1/USER_0 2024-11-21T17:51:42.262653Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791747008847089:2056] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7439791749139422219:2098] 2024-11-21T17:51:42.262656Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791747008847089:2056] Unsubscribe: subscriber# [2:7439791749139422219:2098], path# /dc-1/USER_0 2024-11-21T17:51:42.262698Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2024-11-21T17:51:42.262957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize |83.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e77/r3tmp/tmpXjv4CA/pdisk_1.dat 2024-11-21T17:51:39.951526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:39.985858Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10301 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:40.028488Z node 1 :TX_PROXY DEBUG: actor# [1:7439791738530860574:2131] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:40.028507Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791742825828458:2387] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:40.028543Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791738530860799:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:40.028566Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791738530860910:2204][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7439791738530860799:2147], cookie# 1 2024-11-21T17:51:40.028928Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791738530860920:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791738530860917:2204], cookie# 1 2024-11-21T17:51:40.028935Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791738530860921:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791738530860918:2204], cookie# 1 2024-11-21T17:51:40.028939Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791738530860922:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791738530860919:2204], cookie# 1 2024-11-21T17:51:40.028949Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791738530860459:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791738530860920:2204], cookie# 1 2024-11-21T17:51:40.028957Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791738530860462:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791738530860921:2204], cookie# 1 2024-11-21T17:51:40.028963Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791738530860465:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791738530860922:2204], cookie# 1 2024-11-21T17:51:40.028970Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791738530860920:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791738530860459:2051], cookie# 1 2024-11-21T17:51:40.028973Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791738530860921:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791738530860462:2054], cookie# 1 2024-11-21T17:51:40.028981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791738530860922:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791738530860465:2057], cookie# 1 2024-11-21T17:51:40.028986Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791738530860910:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791738530860917:2204], cookie# 1 2024-11-21T17:51:40.028991Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791738530860910:2204][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:40.028994Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791738530860910:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791738530860918:2204], cookie# 1 2024-11-21T17:51:40.028998Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791738530860910:2204][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:40.029003Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791738530860910:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791738530860919:2204], cookie# 1 2024-11-21T17:51:40.029010Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791738530860910:2204][/dc-1] Unexpected sync response: sender# [1:7439791738530860919:2204], cookie# 1 2024-11-21T17:51:40.029018Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791738530860799:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2024-11-21T17:51:40.029944Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791738530860799:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439791738530860910:2204] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:40.029965Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439791738530860799:2147], cacheItem# { Subscriber: { Subscriber: [1:7439791738530860910:2204] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2024-11-21T17:51:40.030311Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791742825828459:2388], recipient# [1:7439791742825828458:2387], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:51:40.030321Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791742825828458:2387] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T17:51:40.037296Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791742825828458:2387] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2024-11-21T17:51:40.037792Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791742825828458:2387] Handle TEvDescribeSchemeResult Forward to# [1:7439791742825828457:2386] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 Shard... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2024-11-21T17:51:40.040596Z node 1 :TX_PROXY DEBUG: actor# [1:7439791738530860574:2131] Handle TEvProposeTransaction 2024-11-21T17:51:40.040607Z node 1 :TX_PROXY DEBUG: actor# [1:7439791738530860574:2131] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2024-11-21T17:51:40.048860Z node 1 :TX_PROXY DEBUG: actor# [1:7439791738530860574:2131] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:51:40.049130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:40.049150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:40.051351Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:51:40.051358Z node 1 :TX_PROXY DEBUG: actor# [1:7439791738530860574:2131] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:51:40.051384Z node 1 :TX_PROXY DEBUG: actor# [1:7439791738530860574:2131] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7439791742825828475:2402] 2024-11-21T17:51:40.052306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:40.065908Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791742825828475:2402] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2024-11-21T17:51:40.065952Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791742825828475:2402] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:51:40.065987Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791738530860799:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [1844674407370 ... 4037968897 at ss 72057594046644480 2024-11-21T17:51:42.273960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715660 2024-11-21T17:51:42.273994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715660 2024-11-21T17:51:42.274395Z node 1 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2024-11-21T17:51:42.275549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2024-11-21T17:51:42.275647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2024-11-21T17:51:42.275703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2024-11-21T17:51:42.275748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T17:51:42.275772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:42.275794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T17:51:42.275814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2024-11-21T17:51:42.275836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T17:51:42.275858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T17:51:42.275866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T17:51:42.275890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T17:51:42.275920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T17:51:42.275929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T17:51:42.275939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:51:42.276495Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037889 2024-11-21T17:51:42.276515Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037890 2024-11-21T17:51:42.277264Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037891 2024-11-21T17:51:42.277298Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2024-11-21T17:51:42.278850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2024-11-21T17:51:42.278868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2024-11-21T17:51:42.278889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2024-11-21T17:51:42.278896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2024-11-21T17:51:42.278902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2024-11-21T17:51:42.278904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2024-11-21T17:51:42.278908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2024-11-21T17:51:42.278919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2024-11-21T17:51:42.278934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T17:51:42.278946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 TabletID: 72075186224037888 Status: OK Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } TabletType: Coordinator Version: 1 TenantIdOwner: 72057594046644480 TenantIdLocalId: 2 } 2024-11-21T17:51:42.279050Z node 1 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2024-11-21T17:51:42.473350Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439791753651425391:2252], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:42.473393Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439791753651425391:2252], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:42.473402Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7439791753651425391:2252], path# /dc-1/USER_0, domainOwnerId# 72057594046644480 2024-11-21T17:51:42.473504Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791753651425405:2258][/dc-1/USER_0] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:51:42.473635Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791753651425405:2258][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7439791753651425406:2258] 2024-11-21T17:51:42.473670Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791753651425405:2258][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7439791753651425407:2258] 2024-11-21T17:51:42.473681Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7439791753651425405:2258][/dc-1/USER_0] Set up state: owner# [3:7439791753651425391:2252], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:42.473693Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7439791753651425405:2258][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7439791753651425408:2258] 2024-11-21T17:51:42.473701Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7439791753651425405:2258][/dc-1/USER_0] Ignore empty state: owner# [3:7439791753651425391:2252], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:42.473718Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7439791753651425391:2252], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 } 2024-11-21T17:51:42.473740Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7439791753651425391:2252], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [3:7439791753651425405:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T17:51:42.473761Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791753651425391:2252], cacheItem# { Subscriber: { Subscriber: [3:7439791753651425405:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:42.473778Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791753651425412:2259], recipient# [3:7439791753651425404:2257], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:42.473792Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439791753651425391:2252], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:42.473806Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791753651425413:2260], recipient# [3:7439791753651425403:2515], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:42.481033Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: 2024-11-21T17:51:35.520298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:35.520351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:35.520364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001676/r3tmp/tmpumvMme/pdisk_1.dat 2024-11-21T17:51:35.597348Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13066, node 1 2024-11-21T17:51:35.693945Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:35.693968Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:35.693973Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:35.694080Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:35.701233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:35.777711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:35.777740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:35.789285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14259 2024-11-21T17:51:36.214303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:37.077814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:37.077850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:37.111656Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:37.112791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:37.163705Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:37.171923Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:51:37.171950Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:51:37.178284Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:51:37.178440Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:51:37.178461Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:51:37.178466Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:51:37.178472Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:51:37.178478Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:51:37.178483Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:51:37.178489Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:51:37.178593Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:51:37.355147Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:51:37.355172Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:51:37.356566Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T17:51:37.358385Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T17:51:37.358462Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T17:51:37.359085Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T17:51:37.362541Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:51:37.362555Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:51:37.362563Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T17:51:37.363837Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:37.363856Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:37.364834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:51:37.365958Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:51:37.365979Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:51:37.368135Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:51:37.380072Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:37.401939Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:51:37.516103Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:51:37.683403Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:51:38.415459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:38.415498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:38.419012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T17:51:38.465652Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:51:38.465718Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:51:38.465764Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:51:38.465790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:51:38.465810Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:51:38.465828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:51:38.465851Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:51:38.465879Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:51:38.465898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:51:38.465918Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:51:38.465939Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:51:38.465968Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2286:2840];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:51:38.473415Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:51:38.473457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:51:38.473524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:51:38.473546Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:51:38.473566Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:51:38.473589Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2292:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cl ... ::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:51:38.526061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:51:38.526090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:51:38.526096Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:51:38.526114Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:51:38.526119Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:51:38.526131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:51:38.526136Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:51:38.526152Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:51:38.526158Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:51:38.526169Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:51:38.526175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:51:38.530109Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:51:38.530133Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:51:38.530147Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:51:38.530153Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:51:38.530175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:51:38.530182Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:51:38.530193Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:51:38.530201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:51:38.530213Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:51:38.530219Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:51:38.530227Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:51:38.530234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:51:38.530283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:51:38.530290Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:51:38.530309Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:51:38.530316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:51:38.530329Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:51:38.530338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:51:38.530356Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:51:38.530362Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:51:38.530376Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:51:38.530381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:51:39.774148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2942:3129], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:39.774207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:39.775324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037889 2024-11-21T17:51:40.491943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3089:3172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:40.491980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:40.494235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037889 waiting actualization: 0/0.000013s 2024-11-21T17:51:42.459676Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3418:3622] 2024-11-21T17:51:42.460222Z node 2 :STATISTICS DEBUG: [72075186224037897] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2024-11-21T17:51:39.568986Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791740725603586:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:39.569101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ea5/r3tmp/tmpFOfhj1/pdisk_1.dat 2024-11-21T17:51:39.653987Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:39.668469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:39.668500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:39.672857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19678 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:39.764538Z node 1 :TX_PROXY DEBUG: actor# [1:7439791740725603661:2136] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:39.764559Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791740725604080:2424] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:39.764617Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791740725603685:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:39.764634Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439791740725603685:2149], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:51:39.764697Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791740725604081:2425][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:51:39.765128Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740725603334:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791740725604085:2425] 2024-11-21T17:51:39.765135Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740725603337:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791740725604086:2425] 2024-11-21T17:51:39.765147Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791740725603334:2050] Subscribe: subscriber# [1:7439791740725604085:2425], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:39.765150Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791740725603337:2053] Subscribe: subscriber# [1:7439791740725604086:2425], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:39.765163Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740725603340:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791740725604087:2425] 2024-11-21T17:51:39.765166Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791740725603340:2056] Subscribe: subscriber# [1:7439791740725604087:2425], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:39.765171Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791740725604086:2425][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791740725603337:2053] 2024-11-21T17:51:39.765176Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791740725604085:2425][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791740725603334:2050] 2024-11-21T17:51:39.765178Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740725603337:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791740725604086:2425] 2024-11-21T17:51:39.765182Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740725603334:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791740725604085:2425] 2024-11-21T17:51:39.765185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791740725604087:2425][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791740725603340:2056] 2024-11-21T17:51:39.765189Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740725603340:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791740725604087:2425] 2024-11-21T17:51:39.765191Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791740725604081:2425][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791740725604083:2425] 2024-11-21T17:51:39.765198Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791740725604081:2425][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791740725604082:2425] 2024-11-21T17:51:39.765210Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439791740725604081:2425][/dc-1] Set up state: owner# [1:7439791740725603685:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:39.765242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791740725604081:2425][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791740725604084:2425] 2024-11-21T17:51:39.765247Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439791740725604081:2425][/dc-1] Path was already updated: owner# [1:7439791740725603685:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:39.765254Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791740725604085:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791740725604082:2425], cookie# 1 2024-11-21T17:51:39.765257Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791740725604086:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791740725604083:2425], cookie# 1 2024-11-21T17:51:39.765260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791740725604087:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791740725604084:2425], cookie# 1 2024-11-21T17:51:39.765265Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740725603334:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791740725604085:2425], cookie# 1 2024-11-21T17:51:39.765270Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740725603337:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791740725604086:2425], cookie# 1 2024-11-21T17:51:39.765274Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740725603340:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791740725604087:2425], cookie# 1 2024-11-21T17:51:39.765278Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791740725604085:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791740725603334:2050], cookie# 1 2024-11-21T17:51:39.765281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791740725604086:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791740725603337:2053], cookie# 1 2024-11-21T17:51:39.765287Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791740725604087:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791740725603340:2056], cookie# 1 2024-11-21T17:51:39.765310Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791740725604081:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791740725604082:2425], cookie# 1 2024-11-21T17:51:39.765316Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791740725604081:2425][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:39.765320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791740725604081:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791740725604083:2425], cookie# 1 2024-11-21T17:51:39.765323Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791740725604081:2425][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:39.765327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791740725604081:2425][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791740725604084:2425], cookie# 1 2024-11-21T17:51:39.765329Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791740725604081:2425][/dc-1] Unexpected sync response: sender# [1:7439791740725604084:2425], cookie# 1 2024-11-21T17:51:39.774755Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791740725603685:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T17:51:39.774826Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439791740725603685:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... 91740725603661:2136], Recipient [1:7439791753610506886:3094]: NKikimr::TEvTxProxyReq::TEvMakeRequest 2024-11-21T17:51:42.373550Z node 1 :TX_PROXY TRACE: StateWaitInit, processing event TEvTxProxyReq::TEvMakeRequest 2024-11-21T17:51:42.373560Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] Cookie# 0 txid# 281474976715668 HANDLE TDataReq marker# P1 2024-11-21T17:51:42.373651Z node 1 :TX_PROXY DEBUG: Actor [1:7439791753610506886:3094] txid 281474976715668 disallow followers cause of operation 2 read target mode 0 2024-11-21T17:51:42.373653Z node 1 :TX_PROXY DEBUG: Actor [1:7439791753610506886:3094] txid 281474976715668 disallow followers cause of operation 2 read target mode 0 2024-11-21T17:51:42.373658Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] txid# 281474976715668 SEND to# [1:7439791740725603685:2149] TSchemeCache with 2 scheme entries. DataReq marker# P2 2024-11-21T17:51:42.373686Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [1:7439791740725603685:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 8] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo Point: (Uint64 : 42) },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 7] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo Point: (Uint64 : 42) }] } 2024-11-21T17:51:42.373699Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [1:7439791740725603685:2149], cacheItem# { Subscriber: { Subscriber: [1:7439791753610506841:3080] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211502350 PathId: [OwnerId: 72057594046644480, LocalPathId: 8] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 8] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:42.373715Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [1:7439791740725603685:2149], cacheItem# { Subscriber: { Subscriber: [1:7439791753610506732:2982] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1732211502250 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 7] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:42.373762Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791753610506888:3096], recipient# [1:7439791753610506886:3094], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 8] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } Point: (Uint64 : 42) },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 7] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } Point: (Uint64 : 42) }] } 2024-11-21T17:51:42.373773Z node 1 :TX_PROXY TRACE: StateWaitResolve, received event# 269746178, Sender [1:7439791753610506888:3096], Recipient [1:7439791753610506886:3094]: NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult 2024-11-21T17:51:42.373775Z node 1 :TX_PROXY TRACE: StateWaitResolve, processing event TEvTxProxySchemeCache::TEvResolveKeySetResult 2024-11-21T17:51:42.373779Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] txid# 281474976715668 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2024-11-21T17:51:42.373910Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] txid# 281474976715668 SEND TEvProposeTransaction to datashard 72075186224037892 with 327 bytes program affected shards 2 followers disallowed marker# P4 2024-11-21T17:51:42.373937Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] txid# 281474976715668 SEND TEvProposeTransaction to datashard 72075186224037894 with 327 bytes program affected shards 2 followers disallowed marker# P4 2024-11-21T17:51:42.377475Z node 1 :TX_PROXY TRACE: StateWaitPrepare, received event# 269550080, Sender [2:7439791752688660392:2307], Recipient [1:7439791753610506886:3094] 2024-11-21T17:51:42.377488Z node 1 :TX_PROXY TRACE: StateWaitPrepare, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T17:51:42.377509Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] txid# 281474976715668 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037894 read size 0 out readset size 0 marker# P6 2024-11-21T17:51:42.377516Z node 1 :TX_PROXY TRACE: StateWaitPrepare, received event# 269550080, Sender [2:7439791752688660214:2294], Recipient [1:7439791753610506886:3094] 2024-11-21T17:51:42.377518Z node 1 :TX_PROXY TRACE: StateWaitPrepare, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T17:51:42.377535Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] txid# 281474976715668 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037892 read size 0 out readset size 0 marker# P6 2024-11-21T17:51:42.377544Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] txid# 281474976715668 SEND EvProposeTransaction to# 72075186224037888 Coordinator marker# P7 2024-11-21T17:51:42.378063Z node 1 :TX_PROXY TRACE: StateWaitPlan, received event# 269091328, Sender [2:7439791744098725390:2271], Recipient [1:7439791753610506886:3094] 2024-11-21T17:51:42.378069Z node 1 :TX_PROXY TRACE: StateWaitPlan, processing event TEvTxProxy::TEvProposeTransactionStatus 2024-11-21T17:51:42.378076Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] txid# 281474976715668 HANDLE TEvProposeTransactionStatus TDataReq marker# P11 Status# 16 2024-11-21T17:51:42.405099Z node 1 :TX_PROXY TRACE: StateWaitPlan, received event# 269091328, Sender [2:7439791744098725390:2271], Recipient [1:7439791753610506886:3094] 2024-11-21T17:51:42.405114Z node 1 :TX_PROXY TRACE: StateWaitPlan, processing event TEvTxProxy::TEvProposeTransactionStatus 2024-11-21T17:51:42.405124Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] txid# 281474976715668 HANDLE TEvProposeTransactionStatus TDataReq marker# P10 Status# 17 2024-11-21T17:51:42.410161Z node 1 :TX_PROXY TRACE: StateWaitPlan, received event# 269550080, Sender [2:7439791752688660214:2294], Recipient [1:7439791753610506886:3094] 2024-11-21T17:51:42.410194Z node 1 :TX_PROXY TRACE: StateWaitPlan, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T17:51:42.410212Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] txid# 281474976715668 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2024-11-21T17:51:42.410225Z node 1 :TX_PROXY TRACE: StateWaitPlan, received event# 269550080, Sender [2:7439791752688660392:2307], Recipient [1:7439791753610506886:3094] 2024-11-21T17:51:42.410226Z node 1 :TX_PROXY TRACE: StateWaitPlan, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T17:51:42.410229Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] txid# 281474976715668 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037894 marker# P12 2024-11-21T17:51:42.410361Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791753610506886:3094] txid# 281474976715668 MergeResult ExecComplete TDataReq marker# P17 2024-11-21T17:51:42.410403Z node 1 :TX_PROXY INFO: Actor# [1:7439791753610506886:3094] txid# 281474976715668 RESPONSE Status# ExecComplete prepare time: 0.003984s execute time: 0.032858s total time: 0.036842s marker# P13 2024-11-21T17:51:42.424056Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740725603334:2050] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7439791744098725347:2099] 2024-11-21T17:51:42.424087Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791740725603334:2050] Unsubscribe: subscriber# [2:7439791744098725347:2099], path# /dc-1/USER_0 2024-11-21T17:51:42.424094Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740725603337:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7439791744098725348:2099] 2024-11-21T17:51:42.424098Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791740725603337:2053] Unsubscribe: subscriber# [2:7439791744098725348:2099], path# /dc-1/USER_0 2024-11-21T17:51:42.424103Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740725603340:2056] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [2:7439791744098725349:2099] 2024-11-21T17:51:42.424106Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791740725603340:2056] Unsubscribe: subscriber# [2:7439791744098725349:2099], path# /dc-1/USER_0 2024-11-21T17:51:42.424392Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2024-11-21T17:51:42.424604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:42.568551Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791740725603685:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:42.568594Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439791740725603685:2149], cacheItem# { Subscriber: { Subscriber: [1:7439791745020571717:2649] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:42.568628Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791753610506902:3105], recipient# [1:7439791753610506901:2309], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> SystemView::StoragePoolsFields [GOOD] >> SystemView::StoragePoolsRanges >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:42.646040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:42.646064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:42.646070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:42.646074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:42.646088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:42.646092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:42.646101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:42.646184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:42.657037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:42.657063Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:42.660102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:42.660975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:42.661012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:42.662467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:42.662659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:42.662765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:42.662906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:42.664204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:42.664788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:42.664805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:42.664851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:42.664859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:42.664866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:42.664884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.666434Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:42.682261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:42.682357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.682409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:42.682455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:42.682460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.683257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:42.683289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:42.683339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.683351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:42.683356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:42.683361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:42.683784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.683796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:42.683801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:42.684084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.684095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.684101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:42.684107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:42.685060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:42.685437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:42.685488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:42.685649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:42.685675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:42.685682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:42.685736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:42.685744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:42.685778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:42.685791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:42.686147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:42.686155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:42.686193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:42.686196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:42.686284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.686292Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:42.686304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:42.686308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:42.686314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:42.686319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:42.686323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:42.686327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:42.686339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:42.686344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:42.686348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:42.686650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:42.686668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:42.686673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:42.686678Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:42.686683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:42.686697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... serAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.774761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.774796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.774810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.774819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.774830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:51:42.774839Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueue, at schemeshard: 72057594046678944 2024-11-21T17:51:42.774845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T17:51:42.774849Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueueReadBalancer, at schemeshard: 72057594046678944 2024-11-21T17:51:42.774852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:51:42.774862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.774870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.774888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 8, at schemeshard: 72057594046678944 2024-11-21T17:51:42.774911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:51:42.774940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.774954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.775143Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:51:42.775800Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:42.776054Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:513:2449], Recipient [1:513:2449]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T17:51:42.776064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T17:51:42.776218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:42.776225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:42.776397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:42.776405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:42.776409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:42.776411Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:42.776675Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:548:2449], Recipient [1:513:2449]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T17:51:42.776685Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T17:51:42.776689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:513:2449] sender: [1:569:2058] recipient: [1:15:2062] 2024-11-21T17:51:42.817589Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:568:2493], Recipient [1:513:2449]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T17:51:42.817606Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:51:42.817638Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:42.817693Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 45us result status StatusSuccess 2024-11-21T17:51:42.817813Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:42.817904Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:570:2494], Recipient [1:513:2449]: NKikimrPQ.TEvPeriodicTopicStats PathId: 2 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2024-11-21T17:51:42.817910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2024-11-21T17:51:42.817916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 19 UsedReserveSize 7 2024-11-21T17:51:42.817922Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T17:51:42.817930Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2024-11-21T17:51:42.817961Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:571:2495], Recipient [1:513:2449]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T17:51:42.817965Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:51:42.817972Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:42.817986Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 15us result status StatusSuccess 2024-11-21T17:51:42.818041Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize >> SystemView::TopPartitionsFields [GOOD] >> SystemView::TopPartitionsFollowers >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:40.679234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:40.679262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:40.679266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:40.679269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:40.679284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:40.679286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:40.679294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:40.679373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:40.687145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:40.687167Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:40.689760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:40.690384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:40.690414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:40.692090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:40.692360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:40.692463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:40.692568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:40.693678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:40.693950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:40.693959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:40.694000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:40.694007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:40.694013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:40.694028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:40.695386Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:40.711010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:40.711114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:40.711185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:40.711241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:40.711250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:40.712114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:40.712144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:40.712193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:40.712204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:40.712209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:40.712214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:40.712719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:40.712732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:40.712737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:40.713114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:40.713126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:40.713131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:40.713138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:40.713751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:40.714135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:40.714194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:40.714384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:40.714408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:40.732150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:40.732279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:40.732294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:40.732334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:40.732377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:40.733286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:40.733297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:40.733347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:40.733353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:40.733448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:40.733457Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:40.733469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:40.733473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:40.733481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:40.733487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:40.733492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:40.733496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:40.733510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:40.733516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:40.733520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:40.733909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:40.733924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:40.733929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:40.733934Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:40.733938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:40.733951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... e: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:42.406613Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2024-11-21T17:51:42.406650Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2024-11-21T17:51:42.406965Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2024-11-21T17:51:42.407019Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2024-11-21T17:51:42.407091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2024-11-21T17:51:42.424153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:51:42.434512Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:42.434602Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 108us result status StatusSuccess 2024-11-21T17:51:42.434762Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:42.951173Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2024-11-21T17:51:42.951209Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2024-11-21T17:51:42.951459Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2024-11-21T17:51:42.951482Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2024-11-21T17:51:42.951543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2024-11-21T17:51:42.963496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:51:42.973775Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:42.973854Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 93us result status StatusSuccess 2024-11-21T17:51:42.973996Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:43.005413Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:43.005501Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 110us result status StatusSuccess 2024-11-21T17:51:43.005635Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:43.005784Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:569:2499] connected; active server actors: 1 2024-11-21T17:51:43.013639Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2024-11-21T17:51:43.013715Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2024-11-21T17:51:43.014378Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:43.014485Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 119us result status StatusSuccess 2024-11-21T17:51:43.014638Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:43.052862Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2024-11-21T17:51:43.052978Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2024-11-21T17:51:43.075591Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:616:2534] connected; active server actors: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2024-11-21T17:51:39.893504Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791740385742413:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:39.893574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:39.908426Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791739667925692:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e84/r3tmp/tmpVpFO8E/pdisk_1.dat 2024-11-21T17:51:39.951846Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:40.036491Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:40.053223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:40.053252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:40.054392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:40.056180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:40.056198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:40.061530Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:40.064447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24962 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T17:51:40.108140Z node 1 :TX_PROXY DEBUG: actor# [1:7439791740385742482:2133] Handle TEvNavigate describe path dc-1 2024-11-21T17:51:40.108159Z node 1 :TX_PROXY DEBUG: Actor# [1:7439791744680710229:2438] HANDLE EvNavigateScheme dc-1 2024-11-21T17:51:40.108195Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791740385742512:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:40.108206Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439791740385742512:2149], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T17:51:40.108273Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791744680710230:2439][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T17:51:40.108653Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740385742169:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791744680710234:2439] 2024-11-21T17:51:40.108670Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791740385742169:2052] Subscribe: subscriber# [1:7439791744680710234:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:40.108684Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740385742172:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791744680710235:2439] 2024-11-21T17:51:40.108687Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791740385742172:2055] Subscribe: subscriber# [1:7439791744680710235:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:40.108694Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740385742175:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439791744680710236:2439] 2024-11-21T17:51:40.108697Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439791740385742175:2058] Subscribe: subscriber# [1:7439791744680710236:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T17:51:40.108709Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791744680710234:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791740385742169:2052] 2024-11-21T17:51:40.108714Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791744680710235:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791740385742172:2055] 2024-11-21T17:51:40.108720Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791744680710236:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791740385742175:2058] 2024-11-21T17:51:40.108727Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791744680710230:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791744680710231:2439] 2024-11-21T17:51:40.108732Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791744680710230:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791744680710232:2439] 2024-11-21T17:51:40.108743Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439791744680710230:2439][/dc-1] Set up state: owner# [1:7439791740385742512:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:40.108770Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791744680710230:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439791744680710233:2439] 2024-11-21T17:51:40.108782Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439791744680710230:2439][/dc-1] Path was already updated: owner# [1:7439791740385742512:2149], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T17:51:40.108788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791744680710234:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791744680710231:2439], cookie# 1 2024-11-21T17:51:40.108791Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791744680710235:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791744680710232:2439], cookie# 1 2024-11-21T17:51:40.108794Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791744680710236:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791744680710233:2439], cookie# 1 2024-11-21T17:51:40.108798Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740385742169:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791744680710234:2439] 2024-11-21T17:51:40.108801Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740385742169:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791744680710234:2439], cookie# 1 2024-11-21T17:51:40.108805Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740385742172:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791744680710235:2439] 2024-11-21T17:51:40.108808Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740385742172:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791744680710235:2439], cookie# 1 2024-11-21T17:51:40.108811Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740385742175:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439791744680710236:2439] 2024-11-21T17:51:40.108813Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439791740385742175:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439791744680710236:2439], cookie# 1 2024-11-21T17:51:40.112096Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791744680710234:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791740385742169:2052], cookie# 1 2024-11-21T17:51:40.112108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791744680710235:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791740385742172:2055], cookie# 1 2024-11-21T17:51:40.112111Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439791744680710236:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791740385742175:2058], cookie# 1 2024-11-21T17:51:40.112119Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791744680710230:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791744680710231:2439], cookie# 1 2024-11-21T17:51:40.112127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791744680710230:2439][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T17:51:40.112131Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791744680710230:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791744680710232:2439], cookie# 1 2024-11-21T17:51:40.112136Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791744680710230:2439][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T17:51:40.112141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791744680710230:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439791744680710233:2439], cookie# 1 2024-11-21T17:51:40.112144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439791744680710230:2439][/dc-1] Unexpected sync response: sender# [1:7439791744680710233:2439], cookie# 1 2024-11-21T17:51:40.126545Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439791740385742512:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { Rese ... N: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7439791751573603237:2511], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:42.953838Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439791739667925769:2103], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:42.953883Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439791739667925769:2103], cacheItem# { Subscriber: { Subscriber: [2:7439791743962893121:2113] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:42.953908Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439791752552827729:2119], recipient# [2:7439791752552827728:2281], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:42.972520Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439791754479714550:2175], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:42.972563Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791754479714550:2175], cacheItem# { Subscriber: { Subscriber: [3:7439791754479714632:2212] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:42.972570Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791754479714550:2175], cacheItem# { Subscriber: { Subscriber: [3:7439791754479714633:2213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:42.972607Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791754479714648:2216], recipient# [3:7439791754479714631:2511], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:42.972744Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439791754479714631:2511], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:43.039761Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439791754479714550:2175], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:43.039803Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791754479714550:2175], cacheItem# { Subscriber: { Subscriber: [3:7439791754479714632:2212] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:43.039810Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791754479714550:2175], cacheItem# { Subscriber: { Subscriber: [3:7439791754479714633:2213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:43.039837Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791758774681945:2217], recipient# [3:7439791754479714631:2511], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:43.039989Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439791754479714631:2511], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:43.111976Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439791754479714550:2175], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:43.112029Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791754479714550:2175], cacheItem# { Subscriber: { Subscriber: [3:7439791754479714632:2212] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:43.112041Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439791754479714550:2175], cacheItem# { Subscriber: { Subscriber: [3:7439791754479714633:2213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T17:51:43.112070Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439791758774681946:2218], recipient# [3:7439791754479714631:2511], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T17:51:43.112124Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439791754479714631:2511], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:43.266901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:43.266922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:43.266925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:43.266931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:43.266942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:43.266944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:43.266951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:43.267032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:43.276712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:43.276732Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:43.279527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:43.280306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:43.280343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:43.282407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:43.282556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:43.282651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:43.282730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:43.283513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:43.283743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:43.283754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:43.283788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:43.283795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:43.283801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:43.283815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.284997Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:43.302964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:43.303071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.303130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:43.303181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:43.303189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.303795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:43.303821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:43.303853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.303862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:43.303866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:43.303870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:43.304247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.304259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:43.304264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:43.304554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.304562Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.304567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:43.304572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:43.305140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:43.305453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:43.305497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:43.305652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:43.305675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:43.305681Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:43.305733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:43.305738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:43.305760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:43.305771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:43.306150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:43.306159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:43.306201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:43.306206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:43.306301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.306308Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:43.306318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:43.306322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:43.306328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:43.306333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:43.306337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:43.306341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:43.306352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:43.306358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:43.306361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:43.306632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:43.306645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:43.306649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:43.306654Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:43.306661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:43.306672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... nerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:51:43.466754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2024-11-21T17:51:43.466757Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:51:43.466779Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:855:2734], Recipient [1:282:2270]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T17:51:43.466783Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2024-11-21T17:51:43.466787Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037968897, status: OK, at schemeshard: 72057594046678944 2024-11-21T17:51:43.466804Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2024-11-21T17:51:43.466823Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:215:2215], Recipient [1:282:2270]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 2024-11-21T17:51:43.466827Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2024-11-21T17:51:43.466832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:51:43.466864Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:215:2215], Recipient [1:282:2270]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 2024-11-21T17:51:43.466867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2024-11-21T17:51:43.466873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:51:43.467005Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2024-11-21T17:51:43.467033Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2024-11-21T17:51:43.467342Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:43.467416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:51:43.467419Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:43.467708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:51:43.467725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:51:43.467728Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:43.467740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:51:43.467751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:51:43.467774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [1:855:2734], Recipient [1:282:2270]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:43.467778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:43.467781Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T17:51:43.467845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T17:51:43.467850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T17:51:43.467904Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:869:2748], Recipient [1:282:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:43.467907Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:43.467911Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T17:51:43.467925Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:497:2437], Recipient [1:282:2270]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2024-11-21T17:51:43.467929Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T17:51:43.467937Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:51:43.467952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:51:43.467956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:867:2746] 2024-11-21T17:51:43.467972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [1:869:2748], Recipient [1:282:2270]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:43.467976Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:43.467981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2024-11-21T17:51:43.468039Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:870:2749], Recipient [1:282:2270]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T17:51:43.468043Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:51:43.468051Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:43.468076Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 24us result status StatusSuccess 2024-11-21T17:51:43.468167Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:43.468265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:871:2750], Recipient [1:282:2270]: NKikimrPQ.TEvPeriodicTopicStats PathId: 4 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2024-11-21T17:51:43.468271Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2024-11-21T17:51:43.468277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 4] DataSize 151 UsedReserveSize 151 2024-11-21T17:51:43.468283Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T17:51:43.468344Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:872:2751], Recipient [1:282:2270]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T17:51:43.468349Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:51:43.468355Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:43.468672Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 18us result status StatusSuccess 2024-11-21T17:51:43.468753Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal |83.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:42.325332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:42.325358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:42.325362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:42.325373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:42.325389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:42.325392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:42.325400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:42.325465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:42.335571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:42.335597Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:42.338473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:42.339258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:42.339295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:42.340558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:42.340704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:42.340802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:42.340893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:42.341717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:42.341979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:42.341989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:42.342029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:42.342036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:42.342041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:42.342056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.343263Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:42.359938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:42.360039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.360106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:42.360153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:42.360159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.360962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:42.360988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:42.361033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.361043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:42.361047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:42.361052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:42.361369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.361378Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:42.361382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:42.361761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.361778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.361784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:42.361790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:42.362347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:42.362831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:42.362887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:42.363088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:42.363114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:42.363124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:42.363177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:42.363183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:42.363212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:42.363224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:42.363647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:42.363655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:42.363698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:42.363703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:42.363797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.363805Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:42.363816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:42.363821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:42.363827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:42.363833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:42.363837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:42.363841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:42.363851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:42.363857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:42.363861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:42.364147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:42.364161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:42.364166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:42.364171Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:42.364176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:42.364192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:43.665124Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2024-11-21T17:51:43.665159Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2024-11-21T17:51:43.665222Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546] Handle TEvPersQueue::TEvStatus 2024-11-21T17:51:43.665300Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2024-11-21T17:51:43.665328Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2024-11-21T17:51:43.665341Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2024-11-21T17:51:43.665437Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 16975298 2024-11-21T17:51:43.665459Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2024-11-21T17:51:43.665497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2024-11-21T17:51:43.683429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:51:43.693652Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:43.693732Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 92us result status StatusSuccess 2024-11-21T17:51:43.693856Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:44.196768Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2024-11-21T17:51:44.196804Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2024-11-21T17:51:44.196875Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546] Handle TEvPersQueue::TEvStatus 2024-11-21T17:51:44.196949Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2024-11-21T17:51:44.196968Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2024-11-21T17:51:44.196980Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2024-11-21T17:51:44.197112Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2024-11-21T17:51:44.197142Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2024-11-21T17:51:44.197181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2024-11-21T17:51:44.208706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T17:51:44.219029Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:44.219114Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 102us result status StatusSuccess 2024-11-21T17:51:44.219268Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:44.252510Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:44.252595Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 102us result status StatusSuccess 2024-11-21T17:51:44.252771Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2024-11-21T17:51:43.773715Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.773724Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.773729Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:43.773885Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:43.774105Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:51:43.774123Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.774398Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.774403Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.774406Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:43.774495Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:43.774575Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:51:43.774582Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.774749Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.774752Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.774754Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:43.774823Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:51:43.774835Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.774838Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.774986Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2024-11-21T17:51:43.775186Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.775189Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.775191Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:43.775243Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T17:51:43.775247Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.775249Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.775254Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2024-11-21T17:51:43.776800Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T17:51:43.776806Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T17:51:43.776809Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:43.776882Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:43.777049Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:43.778240Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T17:51:43.778868Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:43.778958Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (empty maybe) 2024-11-21T17:51:43.779505Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2024-11-21T17:51:43.779657Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:43.779673Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:51:43.779677Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:51:43.779680Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T17:51:43.779684Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T17:51:43.779687Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T17:51:43.779691Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2024-11-21T17:51:43.779694Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2024-11-21T17:51:43.779705Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2024-11-21T17:51:43.779708Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2024-11-21T17:51:43.779711Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2024-11-21T17:51:43.779715Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2024-11-21T17:51:43.779717Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2024-11-21T17:51:43.779720Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2024-11-21T17:51:43.779724Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2024-11-21T17:51:43.779727Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2024-11-21T17:51:43.779753Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2024-11-21T17:51:43.779757Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2024-11-21T17:51:43.779759Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2024-11-21T17:51:43.779762Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2024-11-21T17:51:43.779765Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2024-11-21T17:51:43.779767Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2024-11-21T17:51:43.779770Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2024-11-21T17:51:43.779774Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2024-11-21T17:51:43.779777Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2024-11-21T17:51:43.779780Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2024-11-21T17:51:43.779784Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2024-11-21T17:51:43.779786Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2024-11-21T17:51:43.779789Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2024-11-21T17:51:43.779791Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2024-11-21T17:51:43.779794Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2024-11-21T17:51:43.779796Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2024-11-21T17:51:43.779805Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2024-11-21T17:51:43.779808Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2024-11-21T17:51:43.779811Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2024-11-21T17:51:43.779813Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2024-11-21T17:51:43.779818Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2024-11-21T17:51:43.779821Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2024-11-21T17:51:43.779824Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2024-11-21T17:51:43.779826Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2024-11-21T17:51:43.779829Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2024-11-21T17:51:43.779832Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2024-11-21T17:51:43.779835Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2024-11-21T17:51:43.779838Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2024-11-21T17:51:43.779840Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2024-11-21T17:51:43.779846Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2024-11-21T17:51:43.779850Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2024-11-21T17:51:43.779852Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2024-11-21T17:51:43.779855Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2024-11-21T17:51:43.779858Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2024-11-21T17:51:43.779868Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T17:51:43.779943Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2024-11-21T17:51:43.779963Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2024-11-21T17:51:43.779966Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2024-11-21T17:51:43.779970Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2024-11-21T17:51:43.779973Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2024-11-21T17:51:43.779976Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2024-11-21T17:51:43.779979Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2024-11-21T17:51:43.779982Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2024-11-21T17:51:43.779986Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2024-11-21T17:51:43.779989Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2024-11-21T17:51:43.779992Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2024-11-21T17:51:43.779995Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2024-11-21T17:51:43.779998Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2024-11-21T17:51:43.780001Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2024-11-21T17:51:43.780003Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2024-11-21T17:51:43.780006Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2024-11-21T17:51:43.780009Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2024-11-21T17:51:43.780013Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2024-11-21T17:51:43.780016Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2024-11-21T17:51:43.780019Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2024-11-21T17:51:43.780022Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2024-11-21T17:51:43.780026Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2024-11-21T17:51:43.780028Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2024-11-21T17:51:43.780031Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2024-11-21T17:51:43.780033Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2024-11-21T17:51:43.780036Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2024-11-21T17:51:43.780038Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2024-11-21T17:51:43.780041Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2024-11-21T17:51:43.780043Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2024-11-21T17:51:43.780046Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2024-11-21T17:51:43.780050Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2024-11-21T17:51:43.780053Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2024-11-21T17:51:43.780055Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2024-11-21T17:51:43.780063Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2024-11-21T17:51:43.780067Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2024-11-21T17:51:43.780070Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2024-11-21T17:51:43.780073Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2024-11-21T17:51:43.780076Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2024-11-21T17:51:43.780078Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2024-11-21T17:51:43.780081Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2024-11-21T17:51:43.780084Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2024-11-21T17:51:43.780088Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2024-11-21T17:51:43.780091Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2024-11-21T17:51:43.780094Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2024-11-21T17:51:43.780096Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2024-11-21T17:51:43.780099Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2024-11-21T17:51:43.780101Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2024-11-21T17:51:43.780104Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2024-11-21T17:51:43.780107Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2024-11-21T17:51:43.780110Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2024-11-21T17:51:43.780113Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2024-11-21T17:51:43.780117Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T17:51:43.780141Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T17:51:43.780436Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.780439Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.780442Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:43.780505Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:43.780603Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:43.780644Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.781069Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:43.882879Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:43.884288Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:51:43.884314Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:43.884321Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T17:51:43.884349Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T17:51:44.088290Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T17:51:44.191890Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T17:51:44.192015Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:51:44.192083Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T17:51:44.192466Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.192471Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.192474Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.192559Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.192658Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.192702Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.192823Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.296246Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.300321Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:51:44.300360Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.300368Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T17:51:44.300404Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T17:51:44.300445Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T17:51:44.300539Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T17:51:44.300563Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:51:44.300594Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition >> TestYmqHttpProxy::TestDeleteMessage >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> TestKinesisHttpProxy::TestListStreamConsumers >> TestKinesisHttpProxy::TestRequestNoAuthorization >> TestKinesisHttpProxy::TestWrongRequest >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2024-11-21T17:51:44.414982Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.414990Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.414993Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.415106Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:51:44.415116Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.415119Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.415688Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009692s 2024-11-21T17:51:44.415799Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.420696Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:51:44.420715Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.421882Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.421887Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.421890Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.421968Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:51:44.421978Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.421981Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.421998Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009235s 2024-11-21T17:51:44.422067Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.422159Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:51:44.422171Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.422377Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.422381Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.422383Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.422565Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T17:51:44.422575Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.422577Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.422590Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.223856s 2024-11-21T17:51:44.422671Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.423902Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:51:44.423913Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.424133Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.424136Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.424139Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.424202Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T17:51:44.424208Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.424210Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.424221Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.205682s 2024-11-21T17:51:44.424286Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.424408Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T17:51:44.424463Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.424707Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.424711Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.424715Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.424757Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.424807Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.425905Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.425967Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2024-11-21T17:51:44.425973Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.425975Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.425988Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.277429s 2024-11-21T17:51:44.426060Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.426064Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T17:51:44.427886Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.427891Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.427894Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.427975Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.428084Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.428111Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.428214Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.558595Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.560348Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:51:44.560397Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.560410Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T17:51:44.560450Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T17:51:44.668298Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:51:44.671845Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T17:51:44.673613Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.673622Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.673627Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.673717Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.674602Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.674672Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.680462Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.780722Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.784530Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:51:44.784569Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.784579Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T17:51:44.784613Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T17:51:44.784660Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T17:51:44.784749Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T17:51:44.787049Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:51:44.787125Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> Compression::WriteRAW >> KqpConstraints::AddNonColumnDoesnotReturnInternalError [GOOD] >> KqpConstraints::AlterTableAddNotNullWithDefault ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2024-11-21T17:51:44.798419Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.798430Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.798434Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.798548Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.798727Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.800273Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.800387Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.804697Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.804703Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.804707Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.804780Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.804879Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.804935Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.804995Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.805089Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T17:51:44.805323Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.805326Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.805328Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.807849Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.808360Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.808395Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.808877Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.809190Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.809328Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:51:44.809362Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.809373Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T17:51:44.809721Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.809724Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.809729Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.809945Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.824371Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.824465Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.828331Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 11 Compressed message data size: 31 2024-11-21T17:51:44.828744Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:51:44.828778Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T17:51:44.828843Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T17:51:44.828852Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T17:51:44.828876Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.828882Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:51:44.828891Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:51:44.828928Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2024-11-21T17:51:44.828935Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:51:44.828938Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T17:51:44.828940Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:51:44.828955Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2024-11-21T17:51:44.828973Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T17:51:44.828976Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T17:51:44.828979Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:51:44.828989Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2024-11-21T17:51:44.828993Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T17:51:44.828998Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T17:51:44.829002Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:51:44.829012Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2024-11-21T17:51:44.836740Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.836746Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.836749Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.848330Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.848516Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.848599Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.852340Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T17:51:44.852628Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:51:44.852658Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T17:51:44.852985Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T17:51:44.852995Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T17:51:44.853381Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.853389Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:51:44.853394Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:51:44.853397Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T17:51:44.853409Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:51:44.853461Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2024-11-21T17:51:44.853485Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T17:51:44.853488Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T17:51:44.853492Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T17:51:44.853494Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T17:51:44.853498Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:51:44.853514Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2024-11-21T17:51:44.856786Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.856792Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.856795Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.856904Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.857031Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.857081Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.860348Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.860587Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:51:44.860628Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:51:44.864331Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2024-11-21T17:51:44.864359Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T17:51:44.864400Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.864409Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:51:44.864415Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2024-11-21T17:51:44.864418Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2024-11-21T17:51:44.864434Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2024-11-21T17:51:44.864437Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T17:51:44.864480Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T17:51:44.864547Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> TestYmqHttpProxy::TestListQueues >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ResultFormatter::Utf8WithQuotes [GOOD] >> ResultFormatter::VariantStruct [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2024-11-21T17:50:57.995684Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791559862537114:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:57.995822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:50:58.041602Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e18/r3tmp/tmpW61IKt/pdisk_1.dat 2024-11-21T17:50:58.067806Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6053, node 1 2024-11-21T17:50:58.081383Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e18/r3tmp/yandexHvBE6o.tmp 2024-11-21T17:50:58.081397Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e18/r3tmp/yandexHvBE6o.tmp 2024-11-21T17:50:58.085479Z INFO: TTestServer started on Port 21484 GrpcPort 6053 TClient is connected to server localhost:21484 2024-11-21T17:50:58.098188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:58.098212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:58.099428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:58.110217Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e18/r3tmp/yandexHvBE6o.tmp 2024-11-21T17:50:58.110405Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration PQClient connected to localhost:6053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:58.143854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:58.148671Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:58.153060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:50:58.282969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791564157505008:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.282983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791564157505034:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.282987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.283723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:50:58.284758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791564157505066:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.284889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.285538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791564157505037:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:50:58.321518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.328159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.343725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439791564157505372:2591] 2024-11-21T17:51:02.995906Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791559862537114:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:02.995961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:03.589199Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T17:51:03.603355Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:51:03.603869Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439791585632342169:2758], Recipient [1:7439791564157504686:2198]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.603887Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.603889Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T17:51:03.603897Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439791585632342165:2755], Recipient [1:7439791564157504686:2198]: {TEvModifySchemeTransaction txid# 281474976715674 TabletId# 72057594046644480} 2024-11-21T17:51:03.603899Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:51:03.618960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976715674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:03.619167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.619243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T17:51:03.619258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T17:51:03.619265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T17:51:03.619273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T17:51:03.619282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T17:51:03.619312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715674:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:03.619432Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:51:03.619443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2024-11-21T17:51:03.619453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2024-11-21T17:51:03.619631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715674, response: Status: StatusAccepted TxId: 281474976715674 SchemeshardId: 72057594046644480 PathId: 13, at schemeshard: 72057594046644480 2024-11-21T17:51:03.619648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715674, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/test-topic 2024-11-21T17:51:03.619650Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 720575940466444 ... 53406326164:2434]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:44.653122Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:44.653124Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:44.653127Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_1_10317962273841411516_v1 2024-11-21T17:51:44.653130Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439791753406326864:2629] destroyed 2024-11-21T17:51:44.653145Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_10317962273841411516_v1 2024-11-21T17:51:44.653148Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_10317962273841411516_v1 2024-11-21T17:51:44.653151Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_10317962273841411516_v1 2024-11-21T17:51:44.656626Z :INFO: [/Root] SessionId [producer-1|c1a602bb-de1fb669-8dd9adee-5a2426fc_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T17:51:44.656643Z :INFO: [/Root] SessionId [producer-1|c1a602bb-de1fb669-8dd9adee-5a2426fc_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T17:51:44.656656Z :DEBUG: [/Root] SessionId [producer-1|c1a602bb-de1fb669-8dd9adee-5a2426fc_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T17:51:44.656872Z :INFO: [/Root] SessionId [producer-1|c1a602bb-de1fb669-8dd9adee-5a2426fc_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T17:51:44.656879Z :DEBUG: [/Root] SessionId [producer-1|c1a602bb-de1fb669-8dd9adee-5a2426fc_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T17:51:44.660346Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: producer-1|c1a602bb-de1fb669-8dd9adee-5a2426fc_0 grpc read done: success: 0 data: 2024-11-21T17:51:44.660361Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|c1a602bb-de1fb669-8dd9adee-5a2426fc_0 grpc read failed 2024-11-21T17:51:44.660369Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|c1a602bb-de1fb669-8dd9adee-5a2426fc_0 grpc closed 2024-11-21T17:51:44.660374Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|c1a602bb-de1fb669-8dd9adee-5a2426fc_0 is DEAD 2024-11-21T17:51:44.660701Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateIdle, received event# 65543, Sender [5:7439791753406326881:2633], Recipient [5:7439791753406326883:2633]: NActors::TEvents::TEvPoison 2024-11-21T17:51:44.660715Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:51:44.661111Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439791753406326913:3119], Recipient [5:7439791753406326164:2434]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:44.661119Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:44.661122Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:44.661131Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439791753406326912:2633] destroyed 2024-11-21T17:51:44.661143Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [5:7439791753406326164:2434], Partition 0, Sender [5:7439791753406326164:2434], Recipient [5:7439791753406326226:2438], Cookie: 0 2024-11-21T17:51:44.661147Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [5:7439791753406326164:2434], Recipient [5:7439791753406326226:2438]: NKikimr::TEvPQ::TEvPipeDisconnected 2024-11-21T17:51:44.661150Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2024-11-21T17:51:44.661156Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:51:44.661160Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2024-11-21T17:51:44.661165Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:44.661177Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:44.661179Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:44.661184Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:44.744348Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791753406326164:2434], Partition 0, Sender [0:0:0], Recipient [5:7439791753406326226:2438], Cookie: 0 2024-11-21T17:51:44.744374Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791753406326226:2438]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:44.744379Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:44.744397Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:44.744422Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:44.744427Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:44.744436Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:44.748419Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791757701294336:2667], Partition 1, Sender [0:0:0], Recipient [5:7439791757701294418:2677], Cookie: 0 2024-11-21T17:51:44.748440Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791757701294418:2677]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:44.748445Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:44.748459Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:44.748480Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:44.748483Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:44.748488Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:44.748500Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791757701294334:2666], Partition 2, Sender [0:0:0], Recipient [5:7439791757701294416:2675], Cookie: 0 2024-11-21T17:51:44.748504Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791757701294416:2675]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:44.748506Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:44.748509Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:44.748514Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:44.748516Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:44.748519Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:44.848312Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791753406326164:2434], Partition 0, Sender [0:0:0], Recipient [5:7439791753406326226:2438], Cookie: 0 2024-11-21T17:51:44.848340Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791753406326226:2438]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:44.848345Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:44.848360Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:44.848384Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:44.848387Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:44.848392Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:44.852301Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791757701294336:2667], Partition 1, Sender [0:0:0], Recipient [5:7439791757701294418:2677], Cookie: 0 2024-11-21T17:51:44.852329Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791757701294418:2677]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:44.852335Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:44.852348Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:44.852376Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:44.852379Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:44.852385Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:44.852398Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791757701294334:2666], Partition 2, Sender [0:0:0], Recipient [5:7439791757701294416:2675], Cookie: 0 2024-11-21T17:51:44.852402Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791757701294416:2675]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:44.852405Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:44.852409Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:44.852415Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:44.852417Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:44.852421Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] |83.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:42.417281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:42.417310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:42.417316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:42.417321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:42.417338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:42.417342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:42.417352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:42.417432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:42.428878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:42.428904Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:42.432012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:42.432653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:42.432692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:42.434442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:42.434614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:42.434748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:42.434864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:42.435792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:42.436035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:42.436043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:42.436074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:42.436078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:42.436082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:42.436095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.437391Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:42.451777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:42.451855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.451907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:42.451947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:42.451953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.452608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:42.452632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:42.452669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.452677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:42.452680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:42.452684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:42.453058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.453069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:42.453073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:42.453518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.453534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.453539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:42.453546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:42.454155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:42.454605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:42.454654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:42.454845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:42.454868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:42.454876Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:42.454928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:42.454934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:42.454986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:42.454999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:42.455543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:42.455552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:42.455603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:42.455610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:42.455686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:42.455692Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:42.455703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:42.455708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:42.455715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:42.455719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:42.455724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:42.455729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:42.455739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:42.455745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:42.455749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:42.456101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:42.456118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:42.456123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:42.456128Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:42.456134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:42.456149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... letID 72057594046678944 is [1:121:2147] sender: [1:751:2058] recipient: [1:750:2707] Leader for TabletID 72057594046678944 is [1:752:2708] sender: [1:753:2058] recipient: [1:750:2707] 2024-11-21T17:51:45.353467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:45.353500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:45.353505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:45.353511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:45.353516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:45.353519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:45.353528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:45.353589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:45.355865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:45.356213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:45.356283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:45.356327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:45.356334Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:45.356361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:45.356442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SomeTable, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:51:45.356486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T17:51:45.356652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:51:45.356707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.356989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.357019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.357043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.357056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.357062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.357068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:45.360106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:45.360134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:45.361447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:45.361470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:45.361479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:45.361655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:752:2708] sender: [1:805:2058] recipient: [1:15:2062] 2024-11-21T17:51:45.395053Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:45.395131Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 98us result status StatusSuccess 2024-11-21T17:51:45.395252Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 0 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 82136 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:45.395436Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:45.395455Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 21us result status StatusSuccess 2024-11-21T17:51:45.395520Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] >> ResultFormatter::Void [GOOD] >> ResultFormatter::VariantTuple [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2024-11-21T17:51:45.598927Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:45.598933Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:45.598936Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:45.599045Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:45.599171Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:45.600680Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:45.601435Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:45.601783Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:51:45.601891Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:51:45.601967Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T17:51:45.601982Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:51:45.602020Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:45.602035Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T17:51:45.602045Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:51:45.602048Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:51:45.602444Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:45.602448Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:45.602451Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:45.602505Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:45.602621Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:45.602663Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:45.602696Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T17:51:45.602864Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:51:45.602889Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T17:51:45.602952Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T17:51:45.602973Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T17:51:45.602996Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:45.603000Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:51:45.603006Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:51:45.603043Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2024-11-21T17:51:45.603051Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:51:45.603054Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T17:51:45.603056Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:51:45.603070Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2024-11-21T17:51:45.603082Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T17:51:45.603085Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T17:51:45.603087Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:51:45.603096Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2024-11-21T17:51:45.603100Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T17:51:45.603103Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T17:51:45.603106Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:51:45.603119Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2024-11-21T17:51:45.603395Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:45.603399Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:45.603402Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:45.603444Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:45.603498Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:45.603527Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:45.603576Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 100 Compressed message data size: 91 2024-11-21T17:51:45.603678Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:51:45.603698Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T17:51:45.603781Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T17:51:45.603791Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T17:51:45.603807Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:45.603812Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:51:45.603828Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2024-11-21T17:51:45.603833Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:51:45.603835Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:51:45.603842Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2024-11-21T17:51:45.603846Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:51:45.603849Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T17:51:45.603858Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2024-11-21T17:51:45.603863Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T17:51:45.603865Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataRecei ... uster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:51:46.052075Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2024-11-21T17:51:46.072763Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T17:51:46.072772Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T17:51:46.072776Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:46.072833Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:46.072923Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:46.072977Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T17:51:46.073040Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2024-11-21T17:51:46.100982Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2024-11-21T17:51:46.101041Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:46.101047Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:51:46.101052Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:51:46.101054Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T17:51:46.101057Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T17:51:46.101060Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T17:51:46.101063Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2024-11-21T17:51:46.101065Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2024-11-21T17:51:46.101069Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2024-11-21T17:51:46.101071Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2024-11-21T17:51:46.101085Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2024-11-21T17:51:46.101155Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T17:51:46.102822Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2024-11-21T17:51:46.104642Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:46.104645Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:46.104647Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:46.104695Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:46.104784Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:46.104831Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:46.104862Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:46.104925Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2024-11-21T17:51:46.105108Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:46.105110Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:46.105112Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:46.105153Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:46.105200Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:46.105213Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:46.105319Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:46.105343Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:51:46.105389Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:46.105397Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:51:46.105424Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: 2024-11-21T17:49:17.758943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:17.758986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:17.758996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00210b/r3tmp/tmph8gINz/pdisk_1.dat 2024-11-21T17:49:17.854364Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7431, node 1 2024-11-21T17:49:17.979741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:17.979767Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:17.979771Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:17.979864Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:49:17.987054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:18.071825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:18.071858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:18.084525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16749 2024-11-21T17:49:18.548418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:49:19.553448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:19.553495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:19.602913Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:49:19.606899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:19.672367Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:19.701347Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:49:19.701376Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:49:19.724792Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:49:19.724947Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:49:19.724968Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:49:19.724973Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:49:19.724979Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:49:19.724985Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:49:19.724989Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:49:19.724996Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:49:19.725138Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:49:19.936089Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:49:19.936123Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:49:19.937826Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T17:49:19.940349Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T17:49:19.940479Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T17:49:19.945254Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T17:49:19.957888Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:49:19.957912Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:49:19.957925Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T17:49:19.960356Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:19.960391Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:19.961799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:49:19.965773Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:49:19.965827Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:49:19.970776Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:49:19.989925Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:49:20.021837Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:49:20.172943Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:49:20.345639Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:49:21.222194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:21.222241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:21.227856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T17:49:21.500768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2433:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:21.500818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:21.501339Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2438:3075]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:49:21.501396Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:49:21.501408Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2440:3077] 2024-11-21T17:49:21.501421Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2440:3077] 2024-11-21T17:49:21.501615Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2441:2945] 2024-11-21T17:49:21.501698Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2440:3077], server id = [2:2441:2945], tablet id = 72075186224037897, status = OK 2024-11-21T17:49:21.501737Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2441:2945], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T17:49:21.501753Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T17:49:21.501817Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:49:21.501837Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2438:3075], StatRequests.size() = 1 2024-11-21T17:49:21.510941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2445:3081], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:21.510984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:21.511077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2450:3086], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:21.512509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:49:21.718270Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T17:49:21.718307Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:49:21.807197Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2440:3077], schemeshard count = 1 2024-11-21T17:49:22.120656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2452:3088], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:49:22.216331Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2590:3177]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:49:22.216397Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T17:49:22.216404Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2590:3177], StatRequests.size() = 1 2024-11-21T17:49:22.257280Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xfhptakgp6z0781yj9v0f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY3NzlhZmYtYzc5YmY5ZmUtNjhiYmM1MmUtMmM3MTZkY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:49:22.289927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opI ... SHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T17:51:37.904019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6925:4870], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T17:51:38.228894Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7042:4932]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:51:38.228950Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:51:38.228963Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7044:4934] 2024-11-21T17:51:38.228977Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7044:4934] 2024-11-21T17:51:38.229077Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7045:4935] 2024-11-21T17:51:38.229125Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7044:4934], server id = [2:7045:4935], tablet id = 72075186224037897, status = OK 2024-11-21T17:51:38.229149Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7045:4935], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T17:51:38.229161Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T17:51:38.229182Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:51:38.229197Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7042:4932], StatRequests.size() = 1 2024-11-21T17:51:38.319198Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWYyNzc5ZjItNTc3NTY1NDctNjhjNzlmMTQtNjAwOWFjYTM=, TxId: 2024-11-21T17:51:38.319231Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWYyNzc5ZjItNTc3NTY1NDctNjhjNzlmMTQtNjAwOWFjYTM=, TxId: 2024-11-21T17:51:38.327922Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:51:38.339535Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:51:38.339573Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:51:38.412327Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T17:51:38.412363Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:51:38.474623Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7044:4934], schemeshard count = 1 2024-11-21T17:51:39.482369Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T17:51:39.482419Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T17:51:39.482425Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:51:40.616453Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T17:51:40.648526Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:51:40.648601Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T17:51:40.648623Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:51:40.648761Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:51:40.649436Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:51:40.653279Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Mzk2NDc2MDItYmI2YzAyYS1mNDk5MjliNC00MzM4OWJj, TxId: 2024-11-21T17:51:40.653302Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Mzk2NDc2MDItYmI2YzAyYS1mNDk5MjliNC00MzM4OWJj, TxId: 2024-11-21T17:51:40.653407Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:51:40.664944Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:51:40.664973Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:2981:3266] 2024-11-21T17:51:41.941048Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T17:51:41.941088Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is data table. 2024-11-21T17:51:41.941094Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T17:51:43.102936Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T17:51:43.103120Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:51:43.103199Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:51:43.130386Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:51:43.130427Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is data table. 2024-11-21T17:51:43.130433Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T17:51:43.130573Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:51:43.131221Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:51:43.134705Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmIwYzJjMGMtNmZhYzRmNmItNDk5MTM4Yi0xZTY2NGYzNw==, TxId: 2024-11-21T17:51:43.134729Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmIwYzJjMGMtNmZhYzRmNmItNDk5MTM4Yi0xZTY2NGYzNw==, TxId: 2024-11-21T17:51:43.134839Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:51:43.146218Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T17:51:43.146243Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:51:44.378748Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T17:51:44.378784Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2024-11-21T17:51:44.378789Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T17:51:45.727131Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:51:45.727197Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is data table. 2024-11-21T17:51:45.727203Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T17:51:45.727322Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:51:45.727958Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:51:45.735214Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWY3ZWQzNDctZTVlYmFjZTAtYWU4YzhhNGQtNDM1MzQxYzE=, TxId: 2024-11-21T17:51:45.735251Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWY3ZWQzNDctZTVlYmFjZTAtYWU4YzhhNGQtNDM1MzQxYzE=, TxId: 2024-11-21T17:51:45.735391Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:51:45.757134Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T17:51:45.757171Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2981:3266] 2024-11-21T17:51:45.757427Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7370:5116]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T17:51:45.758226Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T17:51:45.758242Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T17:51:45.759091Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T17:51:45.759112Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T17:51:45.759124Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T17:51:45.769540Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2024-11-21T17:51:45.769622Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 2024-11-21T17:51:45.769723Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:7400:5128]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T17:51:45.770437Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T17:51:45.770451Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T17:51:45.770538Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T17:51:45.770548Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T17:51:45.770556Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T17:51:45.771076Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2024-11-21T17:51:45.771117Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 |83.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] |83.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |83.2%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration >> TestKinesisHttpProxy::TestWrongRequest [GOOD] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] >> TestYmqHttpProxy::TestDeleteMessage [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibility >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] >> TestYmqHttpProxy::TestDeleteQueue >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] >> SystemView::GroupsFields [GOOD] >> SystemView::DescribeSystemFolder >> TestKinesisHttpProxy::BadRequestUnknownMethod >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TestKinesisHttpProxy::ListShardsTimestamp >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TestYmqHttpProxy::TestSendMessageWithAttributes >> TestYmqHttpProxy::TestListQueues [GOOD] >> TestKinesisHttpProxy::TestCounters >> TestKinesisHttpProxy::ErroneousRequestGetRecords >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |83.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] >> SystemView::StoragePoolsRanges [GOOD] >> SystemView::SystemViewFailOps >> TestYmqHttpProxy::TestPurgeQueue |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:43.146554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:43.146577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:43.146582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:43.146586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:43.146599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:43.146603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:43.146610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:43.146683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:43.156364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:43.156386Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:43.159874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:43.160734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:43.160761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:43.163301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:43.163523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:43.163628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:43.163721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:43.164712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:43.164982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:43.164994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:43.165034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:43.165042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:43.165049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:43.165063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.166661Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:43.180564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:43.180645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.180695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:43.180738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:43.180744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.181325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:43.181344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:43.181380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.181390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:43.181395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:43.181399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:43.181835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.181851Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:43.181857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:43.182307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.182320Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.182326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:43.182335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:43.183000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:43.183561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:43.183638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:43.183864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:43.183894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:43.183908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:43.183973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:43.183983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:43.184017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:43.184033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:43.184599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:43.184607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:43.184670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:43.184675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:43.184774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.184782Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:43.184794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:43.184798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:43.184804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:43.184809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:43.184814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:43.184818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:43.184828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:43.184834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:43.184837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:43.185138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:43.185152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:43.185158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:43.185163Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:43.185167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:43.185186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... T WARN: Table profiles were not loaded 2024-11-21T17:51:49.115214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:49.115306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:51:49.115335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T17:51:49.115463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115502Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:51:49.115521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:49.115854Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:51:49.117414Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:49.118079Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:1010:2957], Recipient [1:1010:2957]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T17:51:49.118094Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T17:51:49.118447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:49.118461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:49.118502Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1010:2957], Recipient [1:1010:2957]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:49.118509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:49.118529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:49.118539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:49.118546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:49.118550Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:49.118573Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1045:2957], Recipient [1:1010:2957]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T17:51:49.118578Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T17:51:49.118582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1010:2957] sender: [1:1063:2058] recipient: [1:15:2062] 2024-11-21T17:51:49.140377Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1062:2999], Recipient [1:1010:2957]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T17:51:49.140403Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:51:49.140437Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:51:49.140541Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 91us result status StatusSuccess 2024-11-21T17:51:49.140774Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13184 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 82136 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13184 DataSize: 13184 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpConstraints::AlterTableAddNotNullWithDefault [GOOD] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/utils/actors/ut/ydb-library-yql-utils-actors-ut |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/utils/actors/ut/ydb-library-yql-utils-actors-ut |83.2%| [LD] {RESULT} $(B)/ydb/library/yql/utils/actors/ut/ydb-library-yql-utils-actors-ut >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::AlterTableAddNotNullWithDefault [GOOD] Test command err: Trying to start YDB, gRPC: 20950, MsgBus: 25972 2024-11-21T17:51:05.724493Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791595121450145:2148];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fba/r3tmp/tmp2GjCqz/pdisk_1.dat 2024-11-21T17:51:05.759923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:05.773206Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20950, node 1 2024-11-21T17:51:05.791385Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:05.791399Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:05.791401Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:05.791433Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25972 2024-11-21T17:51:05.823512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:05.823541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:05.825829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:05.854151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.860180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.922107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:05.987584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.003725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.036825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791599416418868:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.036858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.072443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.080978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.092222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.107810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.121305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.136835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.151526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791599416419366:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.151554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.151590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791599416419371:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:06.152155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:06.162022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791599416419374:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 15132, MsgBus: 23650 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fba/r3tmp/tmpdUZUnj/pdisk_1.dat 2024-11-21T17:51:06.654839Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:06.681873Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15132, node 2 2024-11-21T17:51:06.725639Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:06.725652Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:06.725653Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:06.725698Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23650 2024-11-21T17:51:06.752562Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:06.752591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:06.756394Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:51:06.798777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:06.803763Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:06.821310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.833909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.856732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:06.871999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:07.013024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791600993799296:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.013057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:07.019553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:07.027148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pr ... main>:2:37: Error: At function: Member
:2:37: Error: Member not found: Value3. Did you mean Value? 2024-11-21T17:51:14.644912Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:3064:4419], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Column is under build operation, write operation is not allowed to column: Value3 for table: /Root/AddNonColumnDoesnotReturnInternalError, code: 2017 2024-11-21T17:51:14.645123Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NjI1MjExZWQtNzkyYzhiZDAtY2VjYzNjNi0yZWZlMTI2ZA==, ActorId: [5:2585:4032], ActorState: ExecuteState, TraceId: 01jd7xk06kd2b0tygrea1ek60b, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: BAD_REQUEST:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Column is under build operation, write operation is not allowed to column: Value3 for table: /Root/AddNonColumnDoesnotReturnInternalError, code: 2017 2024-11-21T17:51:14.658332Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:3073:4428], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:2:94: Error: At function: KiUpdateTable!
:2:94: Error: Column 'Value3' is under the build operation '/Root/AddNonColumnDoesnotReturnInternalError'., code: 2017 2024-11-21T17:51:14.658663Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NjI1MjExZWQtNzkyYzhiZDAtY2VjYzNjNi0yZWZlMTI2ZA==, ActorId: [5:2585:4032], ActorState: ExecuteState, TraceId: 01jd7xk0709zdndp9tz9qs5tkr, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: BAD_REQUEST:
: Error: Type annotation, code: 1030
:2:94: Error: At function: KiUpdateTable!
:2:94: Error: Column 'Value3' is under the build operation '/Root/AddNonColumnDoesnotReturnInternalError'., code: 2017 2024-11-21T17:51:14.674132Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:3082:4437], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:89: Error: At tuple, At tuple, At function: KqlDeleteRows, At function: Map
:2:29: Error: At function: Filter
:2:88: Error: At function: ==
:2:82: Error: At function: Member, At function: Member, At function: Member
:2:82: Error: Member not found: Value3. Did you mean Value? 2024-11-21T17:51:14.674398Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NjI1MjExZWQtNzkyYzhiZDAtY2VjYzNjNi0yZWZlMTI2ZA==, ActorId: [5:2585:4032], ActorState: ExecuteState, TraceId: 01jd7xk07db36m3maxpbas1kxa, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: GENERIC_ERROR:
: Error: Execution, code: 1060
:2:89: Error: At tuple, At tuple, At function: KqlDeleteRows, At function: Map
:2:29: Error: At function: Filter
:2:88: Error: At function: ==
:2:82: Error: At function: Member, At function: Member, At function: Member
:2:82: Error: Member not found: Value3. Did you mean Value? 2024-11-21T17:51:14.725326Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 GRpc shutdown warning: left infly: 1, spent: 3.015994 sec GRpc shutdown warning: left infly: 1, spent: 6.041386 sec GRpc shutdown warning: left infly: 1, spent: 9.064917 sec GRpc shutdown warning: left infly: 1, spent: 12.091004 sec GRpc shutdown warning: left infly: 1, spent: 15.113531 sec GRpc shutdown warning: left infly: 1, spent: 18.141394 sec GRpc shutdown warning: left infly: 1, spent: 21.169049 sec GRpc shutdown warning: left infly: 1, spent: 24.206868 sec GRpc shutdown warning: left infly: 1, spent: 27.305019 sec GRpc shutdown warning: failed to shutdown all connections, left infly: 1, spent: 30.013066 sec Trying to start YDB, gRPC: 17318, MsgBus: 9068 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000fba/r3tmp/tmpXyosQk/pdisk_1.dat 2024-11-21T17:51:45.404432Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:45.469633Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17318, node 6 2024-11-21T17:51:45.508656Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:45.508695Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:45.509064Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:45.509074Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:45.509076Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:45.509122Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:45.509512Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9068 TClient is connected to server localhost:9068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:45.636165Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:45.638001Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:45.651646Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:45.679176Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:45.761827Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:45.796764Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:46.093012Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791771546631607:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:46.093062Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:46.101811Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:46.126945Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:46.151543Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:46.165044Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:46.183440Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:46.239295Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:51:46.298996Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791771546632131:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:46.299020Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791771546632136:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:46.299025Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:46.299810Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:51:46.306117Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791771546632138:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:51:46.485111Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:51:46.529745Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710757:0, at schemeshard: 72057594046644480 2024-11-21T17:51:46.546983Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 |83.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots >> TPersQueueMirrorer::ValidStartStream [GOOD] |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> ResultFormatter::Primitive [GOOD] >> ResultFormatter::Struct [GOOD] >> SystemView::SystemViewFailOps [GOOD] |83.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::StructTypeNameAsString [GOOD] |83.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadSubscription >> PrivateApi::PingTask >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] >> Yq_1::DeleteConnections ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::SystemViewFailOps [GOOD] Test command err: 2024-11-21T17:51:36.096215Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791728798485104:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.096252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00196c/r3tmp/tmpXWDM9t/pdisk_1.dat 2024-11-21T17:51:36.156656Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16959, node 1 2024-11-21T17:51:36.180386Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:36.180401Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:36.180403Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:36.180440Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:36.196626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.196663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:2929 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:51:36.241177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:36.251645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.260009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.402095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791728798486006:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.402098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791728798486018:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.402120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.402788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:36.407514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791728798486020:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:36.562017Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xkneg80p8mrzex87yqvqx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRmOTZlNmMtNDdjODIzYzktOTMyYTNmNTctYzZiMmY5NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:36.579387Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439791728798486158:2315] TxId: 281474976715663. Ctx: { TraceId: 01jd7xknkrbvrhfy994fgt6hq6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzNjMTRjNWMtZDY5ODJlMTYtZTI3YzdlNmUtZjgwZjM5MDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T17:51:36.579439Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xknkrbvrhfy994fgt6hq6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzNjMTRjNWMtZDY5ODJlMTYtZTI3YzdlNmUtZjgwZjM5MDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:36.580287Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791728798486164:2323], owner: [1:7439791728798486161:2321], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T17:51:36.583237Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791728798486164:2323], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:36.583366Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791728798486164:2323], row count: 1, finished: 1 2024-11-21T17:51:36.583375Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791728798486164:2323], owner: [1:7439791728798486161:2321], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T17:51:36.584261Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211496576, txId: 281474976715662] shutting down 2024-11-21T17:51:37.655186Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439791731673622193:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:37.655280Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00196c/r3tmp/tmpVF0cPe/pdisk_1.dat 2024-11-21T17:51:37.663865Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16042, node 6 2024-11-21T17:51:37.679357Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:37.679368Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:37.679369Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:37.679414Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:37.755593Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:37.755623Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:37.756731Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:37.757862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:37.919788Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791731673622785:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:37.919824Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:37.919906Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791731673622797:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:37.920745Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:51:37.922696Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439791731673622799:2300], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:51:38.012293Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xkpsd05ytd1gtz2z625g9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=NTEzNzc1YTMtZTExYWZlNTItNWQzYWE1N2YtZmJiYWQwODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:38.012876Z node 6 :SYSTEM_VIEWS INFO: Scan started, actor: [6:7439791735968590181:2308], owner: [6:7439791735968590177:2306], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T17:51:38.013046Z node 6 :SYSTEM_VIEWS INFO: Scan prepared, actor: [6:7439791735968590181:2308], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:38.013154Z node 6 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [6:7439791735968590181:2308], row count: 0, finished: 1 2024-11-21T17:51:38.013168Z node 6 :SYSTEM_VIEWS INFO: Scan finished, actor: [6:7439791735968590181:2308], owner: [6:7439791735968590177:2306], scan id: 0, table id: [7205759404 ... 1784265975777:2377], owner: [7:7439791784265975773:2375], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T17:51:49.059954Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439791784265975777:2377], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:49.060058Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439791784265975777:2377], row count: 2, finished: 1 2024-11-21T17:51:49.060074Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439791784265975777:2377], owner: [7:7439791784265975773:2375], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T17:51:49.060653Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211509059, txId: 281474976715672] shutting down 2024-11-21T17:51:49.078164Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd7xm1t7367yck8jk9mjdhyp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MmNhYzVhZjktODliNmExMzctZDQ5ZGNiNTEtMWNiNTIyZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:49.078876Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439791784265975809:2386], owner: [7:7439791784265975805:2384], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T17:51:49.079155Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439791784265975809:2386], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:49.079273Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439791784265975809:2386], row count: 3, finished: 1 2024-11-21T17:51:49.079288Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439791784265975809:2386], owner: [7:7439791784265975805:2384], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T17:51:49.080258Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211509076, txId: 281474976715674] shutting down 2024-11-21T17:51:49.097035Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jd7xm1tr2j567mt47j5mf329, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MmJiNWRkZTgtMjMzMDIwMDItZWE4NjdjNzMtZjU4OWVkOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:49.097766Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439791784265975840:2395], owner: [7:7439791784265975837:2393], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T17:51:49.101735Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439791784265975840:2395], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:49.104402Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439791784265975840:2395], row count: 2, finished: 1 2024-11-21T17:51:49.104428Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439791784265975840:2395], owner: [7:7439791784265975837:2393], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T17:51:49.105748Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211509092, txId: 281474976715676] shutting down 2024-11-21T17:51:49.128514Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jd7xm1vm73z7cw76qtm66e5n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDdjYjM1YWItNWJjOWFjOGUtY2U0MmQ5YmEtYWYwNGUyMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:49.129268Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439791784265975872:2404], owner: [7:7439791784265975869:2402], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T17:51:49.134436Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439791784265975872:2404], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:49.134612Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439791784265975872:2404], row count: 3, finished: 1 2024-11-21T17:51:49.134628Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439791784265975872:2404], owner: [7:7439791784265975869:2402], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T17:51:49.135382Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211509126, txId: 281474976715678] shutting down 2024-11-21T17:51:49.529959Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439791781530077601:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:49.529977Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00196c/r3tmp/tmpiDfYe2/pdisk_1.dat 2024-11-21T17:51:49.561171Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1910, node 8 2024-11-21T17:51:49.580463Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:49.580477Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:49.580479Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:49.580531Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:49.630449Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:49.630481Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:49.632418Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:49.632581Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:440, code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp:774, code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:523, code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp:406, code: 200200 2024-11-21T17:51:49.894041Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439791781530078398:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:49.894067Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions }
: Error: Execution, code: 1060
:2:28: Error: Executing DROP TABLE
: Error: Incorrect scheme found while performing Kikimr operation., code: 2003
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp:406, code: 200200 2024-11-21T17:51:49.900578Z node 8 :TX_PROXY ERROR: [ReadTable [8:7439791781530078419:2304] TxId# 281474976715663] Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats] 2024-11-21T17:51:49.900629Z node 8 :TX_PROXY ERROR: [ReadTable [8:7439791781530078419:2304] TxId# 281474976715663] RESPONSE Status# ResolveError shard: 0 table: /Root/.sys/partition_stats
: Error: Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats], code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats]
: Error: Bulk upsert to table '/Root/.sys/partition_stats'is not supported. Table is a system view
: Error: Check failed: path: '/Root/.sys', error: path part '.sys' is reserved by the system, source_location: ydb/core/tx/schemeshard/schemeshard__operation.cpp:935
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:133, code: 200200
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:31, code: 200200 >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] |83.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] |83.3%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Yq_1::CreateQuery_With_Idempotency >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> Yq_1::CreateConnection_With_Existing_Name >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> TestKinesisHttpProxy::ListShardsToken |83.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> Yq_1::Basic >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> TestKinesisHttpProxy::TestCounters [GOOD] >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] |83.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |83.3%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel >> Yq_1::ListConnections >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> TestYmqHttpProxy::TestSetQueueAttributes >> DataShardTxOrder::RandomPoints_DelayRS >> TestKinesisHttpProxy::TestEmptyHttpBody >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup-EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2024-11-21T17:51:33.577218Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791714062806667:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:33.577382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001f55/r3tmp/tmpPGzvbr/pdisk_1.dat 2024-11-21T17:51:33.634950Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28485, node 1 2024-11-21T17:51:33.641414Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:33.641429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:33.641431Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:33.641462Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:33.664556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.665538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.665556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.665668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:33.665701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:33.665709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:51:33.665752Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:33.665759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:51:33.665837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:33.666069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:33.666131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493712, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.666142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:51:33.666207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:51:33.666340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.666367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.666377Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:51:33.666384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:51:33.666393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:51:33.666405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:51:33.666629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:51:33.666645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:51:33.666648Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:33.666678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:51:33.678262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:33.678287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:33.679371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30015 2024-11-21T17:51:33.681513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.681565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.681576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.681680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T17:51:33.681720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:33.681983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493726, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.681997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732211493726, at schemeshard: 72057594046644480 2024-11-21T17:51:33.682091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T17:51:33.682117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T17:51:33.682124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T17:51:33.682186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:51:33.682239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.682297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.682412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:51:33.682425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:51:33.682428Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:33.682438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T17:51:33.682831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.682865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.682872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.682878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T17:51:33.682887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T17:51:33.682900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T17:51:33.682955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T17:51:33.682971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.683006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.683086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T17:51:33.683098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T17:51:33.683100Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:51:33.683109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T17:51:33.683315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.683355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.683466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: C ... 240 2024-11-21T17:51:49.434822Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715685:0 ProgressState 2024-11-21T17:51:49.434832Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715685:0 progress is 1/1 2024-11-21T17:51:49.434840Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715685:0 2024-11-21T17:51:49.435855Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/SQS/.STD/MessageData, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:51:49.435961Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 1 -> 2 2024-11-21T17:51:49.436130Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715686:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:49.436136Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:51:49.436294Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715686, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/SQS/.STD/MessageData 2024-11-21T17:51:49.436333Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:49.436380Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:49.436394Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:51:49.436510Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 15 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T17:51:49.436519Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T17:51:49.436522Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 15 2024-11-21T17:51:49.436564Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T17:51:49.436566Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T17:51:49.436568Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 2 2024-11-21T17:51:49.437043Z node 6 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715686, at schemeshard: 72057594046644480 2024-11-21T17:51:49.437597Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:51:49.437616Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 2 -> 3 2024-11-21T17:51:49.437699Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:51:49.498352Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:51:49.498368Z node 6 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:51:49.498401Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 3 -> 128 2024-11-21T17:51:49.498577Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:51:49.498891Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211509546, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:49.498904Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211509546 2024-11-21T17:51:49.498962Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 128 -> 129 2024-11-21T17:51:49.499119Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:49.499227Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:49.499244Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:51:49.499640Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 16 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T17:51:49.499653Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T17:51:49.499657Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 16 2024-11-21T17:51:49.499690Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T17:51:49.499697Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T17:51:49.499699Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 4 2024-11-21T17:51:49.499969Z node 6 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037906 Status: COMPLETE TxId: 281474976715686 Step: 1732211509546 OrderId: 281474976715686 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037906 CpuTimeUsec: 378 } } 2024-11-21T17:51:49.500092Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:51:49.500102Z node 6 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:51:49.500106Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 129 -> 240 2024-11-21T17:51:49.500169Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2024-11-21T17:51:49.500183Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2024-11-21T17:51:49.500201Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2024-11-21T17:51:50.509802Z node 6 :HTTP INFO: Listening on http://[::]:12742 2024-11-21T17:51:50.511254Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439791787059292965:2401], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T17:51:50.512067Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439791787059293024:2413], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T17:51:50.512083Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439791787059293025:2414], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T17:51:50.512215Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791787059293023:2412], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:50.513758Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:50.529956Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7439791787059293020:2411]: Pool not found 2024-11-21T17:51:50.574546Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7439791787059293018:2410]: Pool not found 2024-11-21T17:51:50.575261Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439791787059293145:2431], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:50.575262Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439791787059293146:2432], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T17:51:50.575282Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:50.595758Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7439791787059293143:2430]: Pool not found 2024-11-21T17:51:51.514936Z node 6 :HTTP DEBUG: (#44,[::1]:52604) incoming connection opened 2024-11-21T17:51:51.514982Z node 6 :HTTP DEBUG: (#44,[::1]:52604) -> (POST /Root) 2024-11-21T17:51:51.515066Z node 6 :HTTP_PROXY INFO: proxy service: incoming request from [98cb:ecfa:2317:0:80cb:ecfa:2317:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 659fa16d-9905613c-41926455-90fcf22b Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2024-11-21T17:51:51.515133Z node 6 :HTTP_PROXY INFO: http request [UnknownMethodName] requestId [659fa16d-9905613c-41926455-90fcf22b] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2024-11-21T17:51:51.515274Z node 6 :HTTP DEBUG: (#44,[::1]:52604) <- (400 InvalidAction) 2024-11-21T17:51:51.515284Z node 6 :HTTP DEBUG: (#44,[::1]:52604) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 3 { } 0 2024-11-21T17:51:51.515288Z node 6 :HTTP DEBUG: (#44,[::1]:52604) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 659fa16d-9905613c-41926455-90fcf22b x-amz-crc32: 139748724 Content-Type: application/x-amz-json-1.1 Content-Length: 76 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2024-11-21T17:51:51.515337Z node 6 :HTTP DEBUG: (#44,[::1]:52604) connection closed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:43.754503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:43.754523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:43.754526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:43.754529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:43.754541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:43.754543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:43.754549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:43.754617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:43.765488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:43.765510Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:43.771120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:43.771990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:43.772020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:43.773658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:43.773829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:43.773919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:43.774003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:43.775138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:43.775398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:43.775411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:43.775466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:43.775474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:43.775481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:43.775495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.776892Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:43.794150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:43.794240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.794298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:43.794347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:43.794356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.795444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:43.795469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:43.795510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.795518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:43.795521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:43.795525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:43.801027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.801049Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:43.801055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:43.805284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.805313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.805321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:43.805330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:43.805940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:43.812714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:43.812797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:43.813003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:43.813050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:43.813059Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:43.813121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:43.813131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:43.813167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:43.813181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:43.816778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:43.816796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:43.816848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:43.816855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:43.816934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:43.816945Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:43.816964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:43.816968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:43.816978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:43.816985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:43.816991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:43.816996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:43.817016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:43.817022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:43.817026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:43.817434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:43.817454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:43.817459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:43.817464Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:43.817471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:43.817487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 944 2024-11-21T17:51:51.714899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.714905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.714960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:51:51.714983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T17:51:51.714989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T17:51:51.715008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715036Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:51:51.715045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 3], TabletType: DataShard, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:51:51.715057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:51.715319Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:51:51.716437Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:51.716841Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:1133:3069], Recipient [1:1133:3069]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T17:51:51.716853Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T17:51:51.717076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:51.717084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:51.717149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1133:3069], Recipient [1:1133:3069]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:51.717153Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:51.717218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:51.717227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:51.717233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:51.717237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:51.717496Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1168:3069], Recipient [1:1133:3069]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T17:51:51.717504Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T17:51:51.717507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1133:3069] sender: [1:1188:2058] recipient: [1:15:2062] 2024-11-21T17:51:51.739494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1187:3113], Recipient [1:1133:3069]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T17:51:51.739509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:51:51.739526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:51:51.739610Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 74us result status StatusSuccess 2024-11-21T17:51:51.739800Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13184 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 4004 Memory: 132824 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13184 DataSize: 13184 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestKinesisHttpProxy::GoodRequestCreateStream >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> SystemView::DescribeSystemFolder [GOOD] |83.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> DataShardOutOfOrder::TestPlannedTimeoutSplit |83.3%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::DescribeSystemFolder [GOOD] Test command err: 2024-11-21T17:51:35.731840Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791723187691850:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:35.732014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001999/r3tmp/tmpB3IqVe/pdisk_1.dat 2024-11-21T17:51:35.782291Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22593, node 1 2024-11-21T17:51:35.794035Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:35.794049Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:35.794057Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:35.794089Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:35.816420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:35.831848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:35.831884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:35.832920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:35.974119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791723187692452:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:35.974139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791723187692441:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:35.974181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:35.974822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T17:51:35.976594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791723187692455:2300], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T17:51:36.095948Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439791727482659830:2295] TxId: 281474976710661. Ctx: { TraceId: 01jd7xkmwf070cgcm24x5n3bq1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY4MmMwOC0zNDk5OWQ4ZS04OTU2MTVkNy03NjZmYzZkZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T17:51:36.096034Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jd7xkmwf070cgcm24x5n3bq1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY4MmMwOC0zNDk5OWQ4ZS04OTU2MTVkNy03NjZmYzZkZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:36.099355Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791727482659837:2308], owner: [1:7439791727482659833:2306], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T17:51:36.099506Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791727482659837:2308], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:36.100691Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791727482659837:2308], row count: 1, finished: 1 2024-11-21T17:51:36.100712Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791727482659837:2308], owner: [1:7439791727482659833:2306], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T17:51:36.104804Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211496095, txId: 281474976710660] shutting down 2024-11-21T17:51:37.126737Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jd7xkp4n80ayjpvpkte70s6z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTVjZmZhMDYtZDU1YzdiMDMtOTQyZTY4YjctYzAyNzc5OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:37.127299Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791731777627190:2322], owner: [1:7439791731777627186:2320], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T17:51:37.127454Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791731777627190:2322], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:37.127553Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791731777627190:2322], row count: 1, finished: 1 2024-11-21T17:51:37.127568Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791731777627190:2322], owner: [1:7439791731777627186:2320], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T17:51:37.128093Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211497126, txId: 281474976710662] shutting down 2024-11-21T17:51:38.146939Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd7xkq4j0mjp2h3pv7nhd0kf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmY1OGFlZC1mODViMDlhZC1lZjRiNzQ0YS03MmYyMGQxMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:38.147548Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791736072594523:2333], owner: [1:7439791736072594519:2331], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T17:51:38.147657Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791736072594523:2333], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:38.147727Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791736072594523:2333], row count: 1, finished: 1 2024-11-21T17:51:38.147744Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791736072594523:2333], owner: [1:7439791736072594519:2331], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T17:51:38.148221Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211498146, txId: 281474976710664] shutting down 2024-11-21T17:51:39.167025Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd7xkr4d9a461fkgq9szjrh0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzMyNDQ4OS1lZmU2MzcyMC0xNzkwM2JmNy0zNTIwOWJhOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:39.167618Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791740367561856:2344], owner: [1:7439791740367561852:2342], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T17:51:39.167766Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791740367561856:2344], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:39.167865Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791740367561856:2344], row count: 1, finished: 1 2024-11-21T17:51:39.167880Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791740367561856:2344], owner: [1:7439791740367561852:2342], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T17:51:39.168351Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211499166, txId: 281474976710666] shutting down 2024-11-21T17:51:40.224407Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jd7xks4gfyantdmfw0hqcakf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTRkMjhkYWUtOGJkMWYxZDktYjk5YzI0MjgtNjY0Mzc0MmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:40.225163Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791744662529188:2355], owner: [1:7439791744662529185:2353], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T17:51:40.228613Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791744662529188:2355], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:40.228939Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791744662529188:2355], row count: 1, finished: 1 2024-11-21T17:51:40.228951Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791744662529188:2355], owner: [1:7439791744662529185:2353], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T17:51:40.232671Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211500223, txId: 281474976710668] shutting down 2024-11-21T17:51:40.732028Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791723187691850:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:40.732075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:51:41.266650Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jd7xkt5r09t1ydtcjx99e2c2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2VjN2ZkYmItZWU5OWY0ZDAtZWExYjgxNjgtYmQxYjUzMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T ... 7084Z node 2 :SYSTEM_VIEWS INFO: Scan started, actor: [2:7439791774179464729:2368], owner: [2:7439791774179464725:2366], scan id: 0, table id: [72057594046644480:1:0:ds_groups] 2024-11-21T17:51:47.170860Z node 2 :SYSTEM_VIEWS INFO: Scan prepared, actor: [2:7439791774179464729:2368], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:47.171281Z node 2 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [2:7439791774179464729:2368], row count: 1, finished: 1 2024-11-21T17:51:47.171298Z node 2 :SYSTEM_VIEWS INFO: Scan finished, actor: [2:7439791774179464729:2368], owner: [2:7439791774179464725:2366], scan id: 0, table id: [72057594046644480:1:0:ds_groups] 2024-11-21T17:51:47.172786Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211507164, txId: 281474976715670] shutting down 2024-11-21T17:51:48.909319Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001999/r3tmp/tmpTj94Q2/pdisk_1.dat 2024-11-21T17:51:48.963359Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20870, node 7 2024-11-21T17:51:49.003306Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:49.003318Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:49.003320Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:49.003371Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:49.009771Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:49.009800Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:49.017272Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:49.020731Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:49.024180Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:49.025487Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:49.035419Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7439791783939641580:2116];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:49.037637Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:49.046463Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:49.047001Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:49.047034Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:49.050526Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:49.050551Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:49.053843Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2024-11-21T17:51:49.054681Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:49.062459Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2024-11-21T17:51:49.063355Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:51.120397Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:51.130647Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7439791791983825194:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:51.131865Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:51:51.133284Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2024-11-21T17:51:51.138059Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:51.139135Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:51.141535Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2024-11-21T17:51:51.144443Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:51.144547Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:51.144559Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:51.146162Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2024-11-21T17:51:51.148569Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:51.246295Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:51.281622Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791793204102454:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:51.281652Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:51.281722Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791793204102466:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:51.282564Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:3, at schemeshard: 72057594046644480 2024-11-21T17:51:51.289452Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439791793204102468:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } 2024-11-21T17:51:51.391602Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xm3zgdadh53tthkcd56b4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=M2EyN2E2MWMtZGQzMmEyYjItODVhYThlNzMtZmY1NTYxNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:51.407689Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:51.492590Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xm45r4msrw62edveqvtxy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=M2EyN2E2MWMtZGQzMmEyYjItODVhYThlNzMtZmY1NTYxNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:51.501130Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2024-11-21T17:51:51.574229Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd7xm48c06f443kxvjmkwvts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=M2EyN2E2MWMtZGQzMmEyYjItODVhYThlNzMtZmY1NTYxNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Path not found 2024-11-21T17:51:51.584535Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2024-11-21T17:51:51.584687Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:51.584722Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T17:51:51.584789Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:51.584805Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2024-11-21T17:51:51.584827Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:51.584906Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2024-11-21T17:51:51.584960Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:51.586287Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439791791483058442:2110], Type=268959746 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::ValidStartStream [GOOD] Test command err: 2024-11-21T17:50:57.906711Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791557976977493:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:57.906929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:50:57.946140Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e24/r3tmp/tmpj2MFpf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28136, node 1 2024-11-21T17:50:57.977544Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:57.984522Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e24/r3tmp/yandexvsIKP3.tmp 2024-11-21T17:50:57.984537Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e24/r3tmp/yandexvsIKP3.tmp 2024-11-21T17:50:57.984583Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e24/r3tmp/yandexvsIKP3.tmp 2024-11-21T17:50:57.984629Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:50:57.988540Z INFO: TTestServer started on Port 29182 GrpcPort 28136 TClient is connected to server localhost:29182 PQClient connected to localhost:28136 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:50:58.008719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:58.008745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:58.009849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:58.024904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:58.036600Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:58.046864Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:50:58.048162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:50:58.119530Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:58.125269Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T17:50:58.251141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791562271945548:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.251167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791562271945557:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.251174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.251962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:50:58.253753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791562271945562:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:50:58.278907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.284784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.307463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439791562271945893:2586] 2024-11-21T17:51:02.908340Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791557976977493:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:02.908396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:03.596176Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T17:51:03.603755Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:51:03.604102Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439791583746782701:2760], Recipient [1:7439791557976977900:2196]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.604113Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.604114Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T17:51:03.604120Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439791583746782697:2757], Recipient [1:7439791557976977900:2196]: {TEvModifySchemeTransaction txid# 281474976710674 TabletId# 72057594046644480} 2024-11-21T17:51:03.604121Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:51:03.609357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:03.609462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.609524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T17:51:03.609540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T17:51:03.609545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T17:51:03.609550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T17:51:03.609554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T17:51:03.609578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710674:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:03.609682Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:51:03.609690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2024-11-21T17:51:03.609701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2024-11-21T17:51:03.609852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710674, response: Status: StatusAccepted TxId: 281474976710674 SchemeshardId: 72057594046644480 PathId: 13, at schemeshard: 72057594046644480 2024-11-21T17:51:03.609870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710674, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/test-topic 2024-11-21T17:51:03.609879Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T17:51:03.609881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send fo ... Read offset: 5 2024-11-21T17:51:50.169994Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T17:51:50.169997Z node 6 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T17:51:50.170015Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T17:51:50.170089Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user user offset 5 count 6 size 734 endOffset 10 max time lag 0ms effective offset 5 2024-11-21T17:51:50.170106Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 5 2024-11-21T17:51:50.170173Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2024-11-21T17:51:50.170187Z node 6 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:51:50.170244Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 5 2024-11-21T17:51:50.170845Z :DEBUG: [] [] [b70bdb4d-1c9e3e6d-3b89d818-5ec72e69] [] Got ReadResponse, serverBytesSize = 429, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428371 2024-11-21T17:51:50.170887Z :DEBUG: [] [] [b70bdb4d-1c9e3e6d-3b89d818-5ec72e69] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428371 2024-11-21T17:51:50.168977Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 4 EndOffset: 10 WriteTimestampMS: 1732211510061 CreateTimestampMS: 1732211510058 SizeLag: 612 WriteTimestampEstimateMS: 1732211510165 } Cookie: 18446744073709551615 } 2024-11-21T17:51:50.168998Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 4 committedOffset 4 2024-11-21T17:51:50.172291Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (5-9) 2024-11-21T17:51:50.169028Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 sending to client partition status 2024-11-21T17:51:50.169624Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 5 } } 2024-11-21T17:51:50.169674Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, commitOffset# (empty maybe) 2024-11-21T17:51:50.169693Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 4 committedOffset 4 clientCommitOffset (empty maybe) clientReadOffset 5 2024-11-21T17:51:50.172315Z :DEBUG: [] [] [b70bdb4d-1c9e3e6d-3b89d818-5ec72e69] [] Returning serverBytesSize = 429 to budget 2024-11-21T17:51:50.169700Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 5 endOffset 10 2024-11-21T17:51:50.172322Z :DEBUG: [] [] [b70bdb4d-1c9e3e6d-3b89d818-5ec72e69] [] In ContinueReadingDataImpl, ReadSizeBudget = 429, ReadSizeServerDelta = 52428371 2024-11-21T17:51:50.169730Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, endOffset# 10, WTime# 1732211510061, sizeLag# 612 2024-11-21T17:51:50.169734Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1TEvPartitionReady. Aval parts: 1 2024-11-21T17:51:50.169750Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 performing read request: guid# 675fb18-ff4e12b1-308a0c90-2d002b20, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 6, size# 734, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T17:51:50.169785Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 6 maxSize 734 maxTimeLagMs 0 readTimestampMs 0 readOffset 5 EndOffset 10 ClientCommitOffset 4 committedOffset 4 Guid 675fb18-ff4e12b1-308a0c90-2d002b20 2024-11-21T17:51:50.170488Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 10 Result { Offset: 5 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 6 WriteTimestampMS: 1732211510065 CreateTimestampMS: 1732211510064 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 6 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 7 WriteTimestampMS: 1732211510065 CreateTimestampMS: 1732211510064 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 7 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 8 WriteTimestampMS: 1732211510065 CreateTimestampMS: 1732211510064 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 8 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 9 WriteTimestampMS: 1732211510065 CreateTimestampMS: 1732211510064 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 9 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 10 WriteTimestampMS: 1732211510065 CreateTimestampMS: 1732211510064 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 18446744073709551523 RealReadOffset: 9 WaitQuotaTimeMs: 0 } Cookie: 5 } 2024-11-21T17:51:50.170550Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset10 2024-11-21T17:51:50.170559Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 ReadOffset 10 ReadGuid 675fb18-ff4e12b1-308a0c90-2d002b20 has messages 1 2024-11-21T17:51:50.170603Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 read done: guid# 675fb18-ff4e12b1-308a0c90-2d002b20, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 429 2024-11-21T17:51:50.170614Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 response to read: guid# 675fb18-ff4e12b1-308a0c90-2d002b20 2024-11-21T17:51:50.170716Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 Process answer. Aval parts: 0 2024-11-21T17:51:50.172467Z :DEBUG: [] [] [b70bdb4d-1c9e3e6d-3b89d818-5ec72e69] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T17:51:50.172538Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (5-5) 2024-11-21T17:51:50.172555Z :DEBUG: [] Take Data. Partition 0. Read: {0, 1} (6-6) 2024-11-21T17:51:50.172561Z :DEBUG: [] Take Data. Partition 0. Read: {0, 2} (7-7) 2024-11-21T17:51:50.172565Z :DEBUG: [] Take Data. Partition 0. Read: {0, 3} (8-8) 2024-11-21T17:51:50.172571Z :DEBUG: [] Take Data. Partition 0. Read: {0, 4} (9-9) 2024-11-21T17:51:50.172591Z :DEBUG: [] [] [b70bdb4d-1c9e3e6d-3b89d818-5ec72e69] [] The application data is transferred to the client. Number of messages 5, size 115 bytes 2024-11-21T17:51:50.172601Z :DEBUG: [] [] [b70bdb4d-1c9e3e6d-3b89d818-5ec72e69] [] Returning serverBytesSize = 0 to budget 2024-11-21T17:51:50.172629Z :INFO: [] [] [b70bdb4d-1c9e3e6d-3b89d818-5ec72e69] Closing read session. Close timeout: 0.000000s 2024-11-21T17:51:50.172637Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:9:4 2024-11-21T17:51:50.172651Z :INFO: [] [] [b70bdb4d-1c9e3e6d-3b89d818-5ec72e69] Counters: { Errors: 0 CurrentSessionLifetimeMs: 7 BytesRead: 115 MessagesRead: 5 BytesReadCompressed: 115 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:51:50.172629Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 grpc read done: success# 1, data# { read_request { bytes_size: 429 } } 2024-11-21T17:51:50.172681Z :NOTICE: [] [] [b70bdb4d-1c9e3e6d-3b89d818-5ec72e69] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:51:50.172690Z :DEBUG: [] [] [b70bdb4d-1c9e3e6d-3b89d818-5ec72e69] [] Abort session to cluster 2024-11-21T17:51:50.172696Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 got read request: guid# d5014dbe-ba1f8d96-58d1c20d-150f0c71 2024-11-21T17:51:50.172832Z :NOTICE: [] [] [b70bdb4d-1c9e3e6d-3b89d818-5ec72e69] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:51:50.173087Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 grpc read done: success# 0, data# { } 2024-11-21T17:51:50.173098Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 grpc read failed 2024-11-21T17:51:50.173103Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 grpc closed 2024-11-21T17:51:50.173135Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_10000571139536297442_v1 is DEAD 2024-11-21T17:51:50.173296Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:50.173311Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_5_2_10000571139536297442_v1 2024-11-21T17:51:50.173325Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439791788440360523:2497] destroyed 2024-11-21T17:51:50.173356Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [5:7439791788440360520:2494] disconnected; active server actors: 1 2024-11-21T17:51:50.173342Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_5_2_10000571139536297442_v1 2024-11-21T17:51:50.173368Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [5:7439791788440360520:2494] client user disconnected session shared/user_5_2_10000571139536297442_v1 2024-11-21T17:51:50.176426Z :DEBUG: [] MessageGroupId [src-id-test] SessionId [src-id-test|1074b1c3-c5116e13-26facbfe-59f7a37a_0] Write session: destroy >> TestYmqHttpProxy::TestPurgeQueue [GOOD] >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TestYmqHttpProxy::TestListDeadLetterSourceQueues >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] >> DataShardTxOrder::ZigZag_oo >> DataShardOutOfOrder::TestSnapshotReadPriority >> Yq_1::DeleteConnections [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] >> SystemView::QueryStatsAllTables [GOOD] >> TestKinesisHttpProxy::ListShardsToken [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] Test command err: 2024-11-21T17:50:57.987579Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791559269820353:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:57.987736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e15/r3tmp/tmp2blJCa/pdisk_1.dat 2024-11-21T17:50:58.020437Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:58.046269Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63763, node 1 2024-11-21T17:50:58.065346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e15/r3tmp/yandexjBTezt.tmp 2024-11-21T17:50:58.065362Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e15/r3tmp/yandexjBTezt.tmp 2024-11-21T17:50:58.065424Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e15/r3tmp/yandexjBTezt.tmp 2024-11-21T17:50:58.065472Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:50:58.068773Z INFO: TTestServer started on Port 6436 GrpcPort 63763 TClient is connected to server localhost:6436 PQClient connected to localhost:63763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:50:58.089212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:58.089244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:58.090773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:58.095726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:58.097629Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:58.134349Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:50:58.135665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:50:58.207720Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:58.305520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791563564788401:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.305548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791563564788407:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.305554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.306282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:50:58.306580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791563564788442:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.306610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.307859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791563564788415:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:50:58.336299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.342948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.362444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439791563564788739:2585] 2024-11-21T17:51:02.988281Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791559269820353:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:02.988347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:03.595332Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T17:51:03.603331Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:51:03.603970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439791585039625547:2759], Recipient [1:7439791563564788039:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.603982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.603984Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T17:51:03.603991Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439791585039625543:2756], Recipient [1:7439791563564788039:2189]: {TEvModifySchemeTransaction txid# 281474976710674 TabletId# 72057594046644480} 2024-11-21T17:51:03.603992Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:51:03.612592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:03.612705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.612773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T17:51:03.612791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T17:51:03.612796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T17:51:03.612803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T17:51:03.612818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T17:51:03.612846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710674:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:03.612977Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:51:03.612992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2024-11-21T17:51:03.613004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2024-11-21T17:51:03.613207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710674, response: Status: StatusAccepted TxId: 281474976710674 SchemeshardId: 72057594046644480 PathId: 13, at schemeshard: 72057594046644480 2024-11-21T17: ... essing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:53.057658Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:53.057661Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_5_1_427155447318262892_v1 2024-11-21T17:51:53.057667Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [5:7439791794593922633:2836] destroyed 2024-11-21T17:51:53.057682Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_427155447318262892_v1 2024-11-21T17:51:53.057823Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7439791794593922630:2833] disconnected; active server actors: 1 2024-11-21T17:51:53.057833Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7439791794593922630:2833] client test-consumer disconnected session test-consumer_5_1_427155447318262892_v1 2024-11-21T17:51:53.057847Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer family 2 status Active partitions [2] destroyed. >>>>> Session-0 Received TSessionClosedEvent message SessionClosed { Status: SUCCESS Issues: "
: Error: Session was gracefully closed " } 2024-11-21T17:51:53.060387Z :DEBUG: [/Root] SessionId [producer-1|d45a0493-2d223f30-8da7095f-6831cd6_0] PartitionId [4] Generation [1] Write session: destroy 2024-11-21T17:51:53.112887Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791777414052657:2666], Partition 2, Sender [0:0:0], Recipient [5:7439791777414052732:2674], Cookie: 0 2024-11-21T17:51:53.112887Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791773119084440:2434], Partition 0, Sender [0:0:0], Recipient [5:7439791773119084502:2438], Cookie: 0 2024-11-21T17:51:53.112905Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791773119084502:2438]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.112911Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.112915Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791777414052732:2674]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.112919Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.112928Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.112930Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.112957Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.112960Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.112966Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.112968Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.112970Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.112976Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.112983Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791777414052661:2667], Partition 1, Sender [0:0:0], Recipient [5:7439791777414052734:2676], Cookie: 0 2024-11-21T17:51:53.112987Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791777414052734:2676]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.112988Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.112992Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.112998Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.113002Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.113005Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.120059Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [5:7439791747349279553:2141]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2024-11-21T17:51:53.120081Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2024-11-21T17:51:53.120085Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2024-11-21T17:51:53.120088Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T17:51:53.120108Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000614s, queue# 1 2024-11-21T17:51:53.121037Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [5:7439791747349279553:2141]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2024-11-21T17:51:53.121048Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2024-11-21T17:51:53.121050Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2024-11-21T17:51:53.158009Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791790298954995:2758], Partition 4, Sender [0:0:0], Recipient [5:7439791790298955090:2768], Cookie: 0 2024-11-21T17:51:53.158039Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791790298955090:2768]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.158044Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.158060Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.158087Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.158089Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.158096Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.158110Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791790298954999:2759], Partition 3, Sender [0:0:0], Recipient [5:7439791790298955096:2772], Cookie: 0 2024-11-21T17:51:53.158115Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791790298955096:2772]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.158116Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.158120Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.158126Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.158130Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.158133Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.213227Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791777414052657:2666], Partition 2, Sender [0:0:0], Recipient [5:7439791777414052732:2674], Cookie: 0 2024-11-21T17:51:53.213227Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791777414052661:2667], Partition 1, Sender [0:0:0], Recipient [5:7439791777414052734:2676], Cookie: 0 2024-11-21T17:51:53.213245Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791777414052734:2676]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.213250Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.213255Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791777414052732:2674]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.213258Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.213271Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.213271Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.213299Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.213299Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.213302Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.213302Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.213308Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.213308Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.213322Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791773119084440:2434], Partition 0, Sender [0:0:0], Recipient [5:7439791773119084502:2438], Cookie: 0 2024-11-21T17:51:53.213326Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791773119084502:2438]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.213328Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.213330Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.213334Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.213336Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.213339Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup+EvWrite >> DataShardTxOrder::ZigZag >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::QueryStatsAllTables [GOOD] Test command err: 2024-11-21T17:51:36.215122Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791726599993526:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.215177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001955/r3tmp/tmpteguCK/pdisk_1.dat 2024-11-21T17:51:36.272675Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19988, node 1 2024-11-21T17:51:36.300429Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:36.300441Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:36.300442Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:36.300483Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:36.313982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.314007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:6078 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:51:36.386557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:36.399475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.434217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.506979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.556932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726599994458:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.556965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726599994448:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.556991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.557775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T17:51:36.562711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791726599994462:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T17:51:36.627521Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000056313FAFAAC8 2024-11-21T17:51:36.627544Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7439791726599994445:2306], queryUid: , queryText: "\n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n ", keepInCache: 0, split: 0{ TraceId: 01jd7xknkc47wmpnddaysh8gf6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYxYTE2MGEtYjVhNTQ1ZDctNjhmYmI1NjYtYWFjMTY3YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2024-11-21T17:51:36.627580Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: , DatabaseId: /Root, UserSid: , Text: \n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"" }, "settings": { "ydb_user":"" }, "rollback_settings": { } } }} 2024-11-21T17:51:36.627590Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7439791726599994445:2306], queueSize: 1 2024-11-21T17:51:36.627697Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7439791726599994445:2306], compileActor: [1:7439791726599994547:2316] 2024-11-21T17:51:36.684951Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received response, sender: [1:7439791726599994445:2306], status: SUCCESS, compileActor: [1:7439791726599994547:2316] 2024-11-21T17:51:36.684992Z node 1 :KQP_COMPILE_SERVICE DEBUG: Send response, sender: [1:7439791726599994445:2306], queryUid: cae8de3-78c532be-b2fc9379-b3f296fa, status:SUCCESS 2024-11-21T17:51:36.685389Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439791726599994553:2306] TxId: 281474976710662. Ctx: { TraceId: 01jd7xknkc47wmpnddaysh8gf6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYxYTE2MGEtYjVhNTQ1ZDctNjhmYmI1NjYtYWFjMTY3YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 2 2024-11-21T17:51:36.685414Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T17:51:36.685429Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2024-11-21T17:51:36.685577Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710662. Resolved key sets: 1 2024-11-21T17:51:36.685652Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710662. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 1] Access: 1 SyncVersion: false Status: OkData Kind: KindUnknown PartitionsCount: 0 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: () IncFrom: 1 To: () IncTo: 0 } 2024-11-21T17:51:36.685684Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439791726599994553:2306] TxId: 281474976710662. Ctx: { TraceId: 01jd7xknkc47wmpnddaysh8gf6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYxYTE2MGEtYjVhNTQ1ZDctNjhmYmI1NjYtYWFjMTY3YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (KqpTable '"Root/.sys/pg_tables" '"72057594046644480:1" '"pg_tables" '0)) (let $2 '('"hasindexes" '"hasrules" '"hastriggers" '"rowsecurity" '"schemaname" '"tablename" '"tableowner" '"tablespace")) (let $3 (KqpWideReadTableRanges $1 (Void) $2 '() '())) (return (FromFlow (WideTop (WideFilter $3 (lambda '($4 $5 $6 $7 $8 $9 $10 $11) (block '( (let $12 (PgType 'name)) (let $13 (Bool 'false)) (return (Or (Coalesce (FromPg (PgResolvedOp '"=" '"93" $9 (PgConst '"Table0" $12))) $13) (Coalesce (FromPg (PgResolvedOp '"=" '"93" $9 (PgConst '"Table1" $12))) $13))) )))) (Uint64 '"1001") '('('5 (Bool 'true)))))) )))) ) 2024-11-21T17:51:36.685716Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439791726599994553:2306] TxId: 281474976710662. Ctx: { TraceId: 01jd7xknkc47wmpnddaysh8gf6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYxYTE2MGEtYjVhNTQ1ZDctNjhmYmI1NjYtYWFjMTY3YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] create sysview scan task: 1 2024-11-21T17:51:36.685725Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439791726599994553:2306] TxId: 281474976710662. Ctx: { TraceId: 01jd7xknkc47wmpnddaysh8gf6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYxYTE2MGEtYjVhNTQ1ZDctNjhmYmI1NjYtYWFjMTY3YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,1] AST: ( (return (lambda '($1) (block '( (let $2 (WideTopSort (ToFlow $1) (Uint64 '"1001") '('('5 (Bool 'true))))) (return (FromFlow (NarrowMap $2 (lambda '($3 $4 $5 $6 $7 $8 $9 $10) (AsStruct '('"hasindexes" $3) '('"hasrules" $4) '('"hastriggers" $5) '('"rowsecurity" $6) '('"schemaname" $7) '('"tablename" $8) '('"tableowner" $9) '('"tablespace" $10)))))) )))) ) 2024-11-21T17:51:36.685745Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439791726599994553:2306] TxId: 281474976710662. Ctx: { TraceId: 01jd7xknkc47wmpnddaysh8gf6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYxYTE2MGEtYjVhNTQ1ZDctNjhmYmI1NjYtYWFjMTY3YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,1] create compute task: 2 2024-11-21T17:51:36.685755Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710662. Stage [0,1] create channelId: 1 from task: 1 to task: 2 of type UnionAll/Map without spilling 2024-11-21T17:51:36.685761Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 2 from task: 2 with index: 0 2024-11-21T17:51:36.685781Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jd7xknkc47wmpnddaysh8gf6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYxYTE2MGEtYjVhNTQ1ZDctNjhmYmI1NjYtYWFjMTY3YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:36.685792Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710662. Ctx: { TraceId: 01jd7xknkc47wmpnddaysh8gf6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYxYTE2MGEtYjVhNTQ1ZDctNjhmYmI1NjYtYWFjMTY3YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T17:51:36.685958Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7439791726599994553:2306] TxId: 281474976710662. Ctx: { TraceId: 01jd7xknkc47wmpnddaysh8gf6, Database: , DatabaseId: /Root, SessionId: ydb://sess ... cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001955/r3tmp/tmpZurmQK/pdisk_1.dat 2024-11-21T17:51:51.023113Z node 46 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1309, node 46 2024-11-21T17:51:51.046959Z node 46 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:51.046993Z node 46 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:51.046996Z node 46 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:51.047052Z node 46 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:51.101044Z node 46 :HIVE WARN: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:51.101082Z node 46 :HIVE WARN: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:51.104708Z node 46 :HIVE WARN: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:51.105003Z node 46 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:51.111471Z node 46 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:51.323211Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [46:7439791791732990777:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:51.323231Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:51.323346Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [46:7439791791732990789:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:51.323914Z node 46 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:51.328478Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [46:7439791791732990791:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:51.432712Z node 46 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xm40t93wpdzfhnbdn1fn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=46&id=MjAzM2FhMmEtZmVjZmIwNC0xNDVhMDQ2YS1hZjE1YTc3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:51.454172Z node 46 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xm44d7hddh79aeee7jmvn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=46&id=NWNlMDE4MjMtODZkNDUzNDgtODdkMjc2NTctYzY5ZTc1YjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:51.454687Z node 46 :SYSTEM_VIEWS INFO: Scan started, actor: [46:7439791791732990932:2323], owner: [46:7439791791732990928:2321], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_minute] 2024-11-21T17:51:51.454876Z node 46 :SYSTEM_VIEWS INFO: Scan prepared, actor: [46:7439791791732990932:2323], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:51.454992Z node 46 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [46:7439791791732990932:2323], row count: 1, finished: 1 2024-11-21T17:51:51.454997Z node 46 :SYSTEM_VIEWS INFO: Scan finished, actor: [46:7439791791732990932:2323], owner: [46:7439791791732990928:2321], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_minute] 2024-11-21T17:51:51.455605Z node 46 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211511453, txId: 281474976715662] shutting down test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001955/r3tmp/tmpkOeNDX/pdisk_1.dat 2024-11-21T17:51:52.479313Z node 51 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:52.489458Z node 51 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23870, node 51 2024-11-21T17:51:52.512710Z node 51 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:52.512726Z node 51 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:52.512728Z node 51 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:52.512768Z node 51 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6520 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:52.574712Z node 51 :HIVE WARN: HIVE#72057594037968897 Node(51, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:52.574764Z node 51 :HIVE WARN: HIVE#72057594037968897 Node(51, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:52.580859Z node 51 :HIVE WARN: HIVE#72057594037968897 Node(51, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:52.581230Z node 51 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:52.586650Z node 51 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.849308Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [51:7439791796425673067:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:52.849340Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:52.849354Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [51:7439791796425673077:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:52.850212Z node 51 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:52.874654Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [51:7439791796425673081:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:52.989206Z node 51 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xm5gg6b8jfctydmet13h1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=51&id=MjkxZDMwZTItNzgyYTc5ZDMtYjg4NjI0ZTYtMjA3MDIxODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:53.011979Z node 51 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xm5n66fk1f0ad3xhhw657, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=51&id=NzIwNTJmMTMtNjVkMDk1ODYtZGQ4YWVhMzAtNGI3M2I4Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:53.012708Z node 51 :SYSTEM_VIEWS INFO: Scan started, actor: [51:7439791800720640521:2323], owner: [51:7439791800720640518:2321], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2024-11-21T17:51:53.015485Z node 51 :SYSTEM_VIEWS INFO: Scan prepared, actor: [51:7439791800720640521:2323], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:53.015660Z node 51 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [51:7439791800720640521:2323], row count: 1, finished: 1 2024-11-21T17:51:53.015668Z node 51 :SYSTEM_VIEWS INFO: Scan finished, actor: [51:7439791800720640521:2323], owner: [51:7439791800720640518:2321], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2024-11-21T17:51:53.018879Z node 51 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211513010, txId: 281474976715662] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] Test command err: 2024-11-21T17:50:57.991056Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791558502042970:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:57.991107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e11/r3tmp/tmpjEsVhj/pdisk_1.dat 2024-11-21T17:50:58.043157Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:58.062528Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18513, node 1 2024-11-21T17:50:58.071766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e11/r3tmp/yandexGKcqKy.tmp 2024-11-21T17:50:58.071779Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e11/r3tmp/yandexGKcqKy.tmp 2024-11-21T17:50:58.076815Z INFO: TTestServer started on Port 2055 GrpcPort 18513 2024-11-21T17:50:58.090845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:58.090874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:2055 2024-11-21T17:50:58.091885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:50:58.110099Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e11/r3tmp/yandexGKcqKy.tmp 2024-11-21T17:50:58.110182Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration PQClient connected to localhost:18513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:58.137390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:50:58.147109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:50:58.216906Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:58.384133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791562797010881:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.384161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791562797010862:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.384263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.384833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:50:58.385384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791562797010920:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.385500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.386672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791562797010891:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:50:58.411869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.418152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.463493Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791562797011087:2329], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:50:58.463592Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODJlMTg1YTctZTRiYTExZDItZDY2MjA4YzgtNjgxZmRiNmI=, ActorId: [1:7439791562797010859:2304], ActorState: ExecuteState, TraceId: 01jd7xjgaff28s3742yktb3f7m, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:50:58.463991Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:50:58.478205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439791562797011235:2596] 2024-11-21T17:51:02.989541Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791558502042970:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:02.989577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:03.635940Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T17:51:03.640090Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:51:03.640477Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439791584271848012:2761], Recipient [1:7439791562797010533:2195]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.640487Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.640489Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T17:51:03.640495Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439791584271848008:2758], Recipient [1:7439791562797010533:2195]: {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2024-11-21T17:51:03.640497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:51:03.646762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976715673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:03.646865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.646941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T17:51:03.646959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T17:51:03.646973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T17:51:03.646983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [Owner ... 876Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:53.565882Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_2_12421648228891801401_v1 2024-11-21T17:51:53.565888Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439791801520734628:2736] destroyed 2024-11-21T17:51:53.565902Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_2_12421648228891801401_v1 2024-11-21T17:51:53.575540Z :INFO: [/Root] SessionId [producer-1|12f74916-55046ce9-9b838bda-f0677c8f_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T17:51:53.575558Z :INFO: [/Root] SessionId [producer-1|12f74916-55046ce9-9b838bda-f0677c8f_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T17:51:53.575568Z :DEBUG: [/Root] SessionId [producer-1|12f74916-55046ce9-9b838bda-f0677c8f_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T17:51:53.575882Z :INFO: [/Root] SessionId [producer-1|12f74916-55046ce9-9b838bda-f0677c8f_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T17:51:53.575890Z :DEBUG: [/Root] SessionId [producer-1|12f74916-55046ce9-9b838bda-f0677c8f_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2024-11-21T17:51:53.575916Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2024-11-21T17:51:53.575935Z :DEBUG: [/Root] SessionId [producer-1|12f74916-55046ce9-9b838bda-f0677c8f_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2024-11-21T17:51:53.575958Z :DEBUG: [/Root] SessionId [producer-1|12f74916-55046ce9-9b838bda-f0677c8f_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T17:51:53.575945Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: producer-1|12f74916-55046ce9-9b838bda-f0677c8f_0 grpc read done: success: 0 data: 2024-11-21T17:51:53.575963Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|12f74916-55046ce9-9b838bda-f0677c8f_0 grpc read failed 2024-11-21T17:51:53.575972Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|12f74916-55046ce9-9b838bda-f0677c8f_0 grpc closed 2024-11-21T17:51:53.575976Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|12f74916-55046ce9-9b838bda-f0677c8f_0 is DEAD 2024-11-21T17:51:53.576279Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateIdle, received event# 65543, Sender [5:7439791792930799596:2639], Recipient [5:7439791792930799598:2639]: NActors::TEvents::TEvPoison 2024-11-21T17:51:53.576306Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:51:53.576640Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439791792930799627:3133], Recipient [5:7439791792930798859:2429]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:53.576658Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:53.576663Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:53.576678Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439791792930799625:2639] destroyed 2024-11-21T17:51:53.576706Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [5:7439791792930798859:2429], Partition 0, Sender [5:7439791792930798859:2429], Recipient [5:7439791792930798918:2432], Cookie: 0 2024-11-21T17:51:53.576723Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [5:7439791792930798859:2429], Recipient [5:7439791792930798918:2432]: NKikimr::TEvPQ::TEvPipeDisconnected 2024-11-21T17:51:53.576727Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2024-11-21T17:51:53.576740Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:51:53.576752Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2024-11-21T17:51:53.576762Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.576794Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.576803Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.576822Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.655538Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791797225767047:2676], Partition 2, Sender [0:0:0], Recipient [5:7439791797225767131:2684], Cookie: 0 2024-11-21T17:51:53.655542Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791797225767046:2675], Partition 1, Sender [0:0:0], Recipient [5:7439791797225767133:2686], Cookie: 0 2024-11-21T17:51:53.655558Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791797225767133:2686]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.655562Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.655566Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791797225767131:2684]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.655570Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.655576Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.655581Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.655602Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.655603Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.655605Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.655606Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.655611Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.655611Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.655625Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791792930798859:2429], Partition 0, Sender [0:0:0], Recipient [5:7439791792930798918:2432], Cookie: 0 2024-11-21T17:51:53.655628Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791792930798918:2432]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.655633Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.655640Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.655646Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.655648Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.655651Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.756077Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791797225767046:2675], Partition 1, Sender [0:0:0], Recipient [5:7439791797225767133:2686], Cookie: 0 2024-11-21T17:51:53.756076Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791797225767047:2676], Partition 2, Sender [0:0:0], Recipient [5:7439791797225767131:2684], Cookie: 0 2024-11-21T17:51:53.756107Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791797225767131:2684]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.756108Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791797225767133:2686]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.756113Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.756113Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.756131Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.756132Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.756165Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.756168Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.756173Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.756175Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.756175Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.756182Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:53.756192Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791792930798859:2429], Partition 0, Sender [0:0:0], Recipient [5:7439791792930798918:2432], Cookie: 0 2024-11-21T17:51:53.756196Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791792930798918:2432]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.756197Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:53.756201Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:53.756220Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:53.756222Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:53.756242Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> TestYmqHttpProxy::TestSendMessageBatch >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] >> TPQTest::TestReadSubscription [GOOD] >> TPQTest::TestReadAndDeleteConsumer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2024-11-21T17:51:52.096561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:52.096582Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:52.096599Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:51:52.098958Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:51:52.099075Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:51:52.099133Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:52.099718Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:51:52.107113Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:52.107247Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:52.107378Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:51:52.107397Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:51:52.107403Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:51:52.107437Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:52.110694Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:51:52.110786Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:52.110863Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:51:52.110871Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:51:52.110879Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:51:52.110887Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:52.111030Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:52.111043Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:52.111085Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:51:52.111111Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:51:52.111199Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:52.111210Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:52.111220Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:51:52.111226Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:51:52.111232Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:51:52.111240Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:51:52.111249Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:52.119270Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:52.119292Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:52.119301Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:51:52.119716Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:51:52.119726Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:51:52.119744Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:51:52.119772Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:51:52.119781Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:51:52.119789Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:51:52.119796Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:52.119800Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:51:52.119804Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:51:52.119808Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:52.119865Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:51:52.119869Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:51:52.119873Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:51:52.119876Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:52.119885Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:51:52.119888Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:51:52.119891Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:51:52.119894Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:52.119900Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:51:52.141096Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:51:52.141120Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:52.141125Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:52.141133Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:51:52.141144Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:52.141240Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:52.141246Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:52.141252Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:51:52.141266Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:51:52.141269Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:51:52.141305Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:52.141312Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:52.141317Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:51:52.141321Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:51:52.141848Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:51:52.141864Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:52.141920Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:52.141925Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:52.141933Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:52.141940Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:52.141943Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:51:52.141952Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:51:52.141956Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:51:52.141963Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:52.141967Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:51:52.141971Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:51:52.141974Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:51:52.142010Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:51:52.142013Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:52.142015Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:51:52.142017Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:51:52.142019Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:51:52.142029Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:52.142032Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:51:52.142036Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:51:52.142039Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:51:52.142051Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:51:52.142054Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:51:52.142056Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:51:52.142061Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:52.142064Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:51:52.142067Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... 7184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2024-11-21T17:51:54.429829Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:51:54.429845Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:51:54.429849Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:131] at 9437186 on unit CompleteOperation 2024-11-21T17:51:54.429855Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 131] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:51:54.429860Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2024-11-21T17:51:54.429863Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:51:54.429887Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:51:54.429891Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2024-11-21T17:51:54.429897Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:51:54.429902Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T17:51:54.429905Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:51:54.429922Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:51:54.429925Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2024-11-21T17:51:54.429930Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:51:54.429935Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T17:51:54.429939Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:51:54.429954Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:51:54.429957Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2024-11-21T17:51:54.429963Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:51:54.429969Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T17:51:54.429972Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:51:54.429987Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:51:54.429991Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2024-11-21T17:51:54.429996Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:51:54.429999Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:51:54.430017Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:51:54.430020Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2024-11-21T17:51:54.430027Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:51:54.430030Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:51:54.430046Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:51:54.430049Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2024-11-21T17:51:54.430055Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:51:54.430058Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:51:54.430075Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:51:54.430079Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T17:51:54.430085Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:51:54.430089Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:51:54.430139Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2024-11-21T17:51:54.430144Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430149Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2024-11-21T17:51:54.430166Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2024-11-21T17:51:54.430169Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430172Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2024-11-21T17:51:54.430185Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2024-11-21T17:51:54.430188Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430191Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2024-11-21T17:51:54.430201Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T17:51:54.430204Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430207Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T17:51:54.430220Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T17:51:54.430223Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430226Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T17:51:54.430237Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2024-11-21T17:51:54.430240Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430243Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2024-11-21T17:51:54.430251Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T17:51:54.430254Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430257Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T17:51:54.430268Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T17:51:54.430271Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430274Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2024-11-21T17:51:54.430286Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2024-11-21T17:51:54.430291Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430293Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2024-11-21T17:51:54.430302Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2024-11-21T17:51:54.430304Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430307Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2024-11-21T17:51:54.430320Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T17:51:54.430323Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430326Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T17:51:54.430337Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T17:51:54.430340Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430343Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T17:51:54.430352Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T17:51:54.430354Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:54.430357Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2024-11-21T17:51:33.507939Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791713693256809:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:33.507958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001f71/r3tmp/tmpihjZ1q/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9071, node 1 2024-11-21T17:51:33.572525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:51:33.572541Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:33.575342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:33.575358Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:33.575360Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:33.575395Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:33.608125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:33.608149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:33.609247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:33.645265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.646137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.646151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.646292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:33.646331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:33.646340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:51:33.646397Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:33.646402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:33.646481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:33.646726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:33.646834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493691, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.646844Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:33.646889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:33.646986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.647016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.647037Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:33.647047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:33.647052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:33.647060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:51:33.647383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:33.647395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:33.647398Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:33.647406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:4152 2024-11-21T17:51:33.662616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.662695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.662705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.662835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T17:51:33.662858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:33.663131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493712, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.663150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732211493712, at schemeshard: 72057594046644480 2024-11-21T17:51:33.663208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:51:33.663236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:51:33.663251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T17:51:33.663329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:51:33.663386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.663424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.663537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:33.663566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:33.663570Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:33.663583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:51:33.664072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.664125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.664135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.664145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T17:51:33.664165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:51:33.664171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T17:51:33.664281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T17:51:33.664307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.664345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.664476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:51:33.664488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:51:33.664490Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:51:33.664500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T17:51:33.664749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.664783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.664934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 28147 ... ard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715694 2024-11-21T17:51:54.356620Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715694 2024-11-21T17:51:54.356622Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715694, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 3 2024-11-21T17:51:54.356630Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715694, subscribers: 1 2024-11-21T17:51:54.356683Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Registered with mediator time cast 2024-11-21T17:51:54.356691Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Registered with mediator time cast 2024-11-21T17:51:54.356699Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Registered with mediator time cast 2024-11-21T17:51:54.356706Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Registered with mediator time cast 2024-11-21T17:51:54.356706Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Registered with mediator time cast 2024-11-21T17:51:54.356830Z node 7 :HTTP_PROXY INFO: http request [CreateStream] requestId [8c509b03-7e265820-bb26cf8e-d5098e4c] reply ok 2024-11-21T17:51:54.356928Z node 7 :HTTP DEBUG: (#44,[::1]:38564) <- (200 ) 2024-11-21T17:51:54.357024Z node 7 :HTTP DEBUG: (#44,[::1]:38564) connection closed Http output full {} 200 {} 2024-11-21T17:51:54.357397Z node 7 :HTTP DEBUG: (#44,[::1]:38578) incoming connection opened 2024-11-21T17:51:54.357427Z node 7 :HTTP DEBUG: (#44,[::1]:38578) -> (POST /Root) 2024-11-21T17:51:54.357477Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d814:dc7f:a517:0:c014:dc7f:a517:0] request [ListShards] url [/Root] database [/Root] requestId: f9f7edc9-966c5631-af5dc4fa-4324460d 2024-11-21T17:51:54.357605Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [f9f7edc9-966c5631-af5dc4fa-4324460d] got new request from [d814:dc7f:a517:0:c014:dc7f:a517:0] database '/Root' stream 'teststream' 2024-11-21T17:51:54.357751Z node 7 :HTTP_PROXY DEBUG: http request [ListShards] requestId [f9f7edc9-966c5631-af5dc4fa-4324460d] [auth] Authorized successfully 2024-11-21T17:51:54.357778Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [f9f7edc9-966c5631-af5dc4fa-4324460d] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1732211514.357807 786384 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T17:51:54.358303Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.358308Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.358315Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7439791804993838288:2474], now have 1 active actors on pipe 2024-11-21T17:51:54.358316Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7439791804993838289:2475], now have 1 active actors on pipe 2024-11-21T17:51:54.358441Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.358451Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439791804993838288:2474] destroyed 2024-11-21T17:51:54.358454Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.358457Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439791804993838289:2475] destroyed 2024-11-21T17:51:54.358474Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [f9f7edc9-966c5631-af5dc4fa-4324460d] reply ok 2024-11-21T17:51:54.358518Z node 7 :HTTP DEBUG: (#44,[::1]:38578) <- (200 ) 2024-11-21T17:51:54.358581Z node 7 :HTTP DEBUG: (#44,[::1]:38578) connection closed Http output full {"NextToken":"CPa36P60MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CPa36P60MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2024-11-21T17:51:54.359048Z node 7 :HTTP DEBUG: (#44,[::1]:38592) incoming connection opened 2024-11-21T17:51:54.359074Z node 7 :HTTP DEBUG: (#44,[::1]:38592) -> (POST /Root) 2024-11-21T17:51:54.359106Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [5835:3d79:a517:0:4035:3d79:a517:0] request [ListShards] url [/Root] database [/Root] requestId: 86afe17d-c24b1e76-7af14742-8352cd10 2024-11-21T17:51:54.359183Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [86afe17d-c24b1e76-7af14742-8352cd10] got new request from [5835:3d79:a517:0:4035:3d79:a517:0] database '/Root' stream 'teststream' 2024-11-21T17:51:54.359320Z node 7 :HTTP_PROXY DEBUG: http request [ListShards] requestId [86afe17d-c24b1e76-7af14742-8352cd10] [auth] Authorized successfully 2024-11-21T17:51:54.359342Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [86afe17d-c24b1e76-7af14742-8352cd10] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1732211514.359363 786383 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T17:51:54.359616Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.359626Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.359631Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7439791804993838301:2480], now have 1 active actors on pipe 2024-11-21T17:51:54.359630Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7439791804993838300:2479], now have 1 active actors on pipe 2024-11-21T17:51:54.359726Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [86afe17d-c24b1e76-7af14742-8352cd10] reply ok 2024-11-21T17:51:54.359727Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.359730Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439791804993838300:2479] destroyed 2024-11-21T17:51:54.359733Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.359737Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439791804993838301:2480] destroyed 2024-11-21T17:51:54.359763Z node 7 :HTTP DEBUG: (#44,[::1]:38592) <- (200 ) Http output full {"NextToken":"CPe36P60MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2024-11-21T17:51:54.359838Z node 7 :HTTP DEBUG: (#44,[::1]:38592) connection closed 200 {"NextToken":"CPe36P60MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2024-11-21T17:51:54.360175Z node 7 :HTTP DEBUG: (#44,[::1]:38602) incoming connection opened 2024-11-21T17:51:54.360191Z node 7 :HTTP DEBUG: (#44,[::1]:38602) -> (POST /Root) 2024-11-21T17:51:54.360210Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [5835:3d79:a517:0:4035:3d79:a517:0] request [ListShards] url [/Root] database [/Root] requestId: 3ec0fa38-25fcdac7-e0f3fc41-75e1bd0 2024-11-21T17:51:54.360303Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [3ec0fa38-25fcdac7-e0f3fc41-75e1bd0] got new request from [5835:3d79:a517:0:4035:3d79:a517:0] database '/Root' stream 'teststream' 2024-11-21T17:51:54.360393Z node 7 :HTTP_PROXY DEBUG: http request [ListShards] requestId [3ec0fa38-25fcdac7-e0f3fc41-75e1bd0] [auth] Authorized successfully 2024-11-21T17:51:54.360406Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [3ec0fa38-25fcdac7-e0f3fc41-75e1bd0] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1732211514.360419 786383 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T17:51:54.360590Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.360597Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.360601Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7439791804993838313:2485], now have 1 active actors on pipe 2024-11-21T17:51:54.360601Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7439791804993838312:2484], now have 1 active actors on pipe 2024-11-21T17:51:54.360702Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [3ec0fa38-25fcdac7-e0f3fc41-75e1bd0] reply ok 2024-11-21T17:51:54.360709Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.360713Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439791804993838312:2484] destroyed 2024-11-21T17:51:54.360714Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.360717Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439791804993838313:2485] destroyed 2024-11-21T17:51:54.360735Z node 7 :HTTP DEBUG: (#44,[::1]:38602) <- (200 ) 2024-11-21T17:51:54.360775Z node 7 :HTTP DEBUG: (#44,[::1]:38602) connection closed Http output full {"NextToken":"CPi36P60MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CPi36P60MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] Test command err: 2024-11-21T17:51:33.523580Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791713886781255:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:33.523753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001f82/r3tmp/tmplerbMf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25157, node 1 2024-11-21T17:51:33.599049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:51:33.599064Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:33.599518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:33.599528Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:33.599530Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:33.599558Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:33.616152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.616972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.616988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.617093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:33.617124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:33.617131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:51:33.617199Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:33.617209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T17:51:33.617280Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.617435Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:33.617523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493663, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.617533Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:51:33.617600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:51:33.617678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.617699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.617707Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:51:33.617712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:51:33.617717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:51:33.617726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:51:33.617941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:51:33.617955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:51:33.617958Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:33.617966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:51:33.625178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:33.625205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:33.626271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10531 2024-11-21T17:51:33.631511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.631567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.631578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.631682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T17:51:33.631709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:33.631914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493677, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.631925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732211493677, at schemeshard: 72057594046644480 2024-11-21T17:51:33.631966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T17:51:33.631986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T17:51:33.631994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T17:51:33.632074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.632104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.632118Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:51:33.632172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:51:33.632187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:51:33.632190Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:33.632196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T17:51:33.632592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.632621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.632628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.632634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T17:51:33.632640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T17:51:33.632645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T17:51:33.632697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T17:51:33.632722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.632758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.632827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T17:51:33.632835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T17:51:33.632838Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:51:33.632850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T17:51:33.633086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.633112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.633183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 28 ... 6715696:0 3 -> 128 2024-11-21T17:51:54.645783Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T17:51:54.648468Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211514698, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:54.648486Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715696:0 HandleReply TEvOperationPlan, step: 1732211514698, at tablet: 72057594046644480 2024-11-21T17:51:54.648549Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715696:0 128 -> 240 2024-11-21T17:51:54.648725Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:54.648801Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:54.648819Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715696:0 ProgressState 2024-11-21T17:51:54.648830Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715696:0 progress is 1/1 2024-11-21T17:51:54.648837Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715696:0 2024-11-21T17:51:54.648879Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715696, publications: 1, subscribers: 1 2024-11-21T17:51:54.649000Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715696 2024-11-21T17:51:54.649018Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715696 2024-11-21T17:51:54.649021Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715696, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 5 2024-11-21T17:51:54.649033Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715696, subscribers: 1 2024-11-21T17:51:54.649223Z node 7 :HTTP_PROXY INFO: http request [RegisterStreamConsumer] requestId [97e489c0-be996667-c55e7efb-cdf6b10d] reply ok 2024-11-21T17:51:54.649277Z node 7 :HTTP DEBUG: (#44,[::1]:55912) <- (200 ) 2024-11-21T17:51:54.649365Z node 7 :HTTP DEBUG: (#44,[::1]:55912) connection closed Http output full {"Consumer":{"ConsumerCreationTimestamp":1732211.514,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}} 200 {"Consumer":{"ConsumerCreationTimestamp":1732211.514,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}} 2024-11-21T17:51:54.649791Z node 7 :HTTP DEBUG: (#44,[::1]:55928) incoming connection opened 2024-11-21T17:51:54.649817Z node 7 :HTTP DEBUG: (#44,[::1]:55928) -> (POST /Root) 2024-11-21T17:51:54.649854Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d810:d6fa:d714:0:c010:d6fa:d714:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: 87077807-a7f53c53-abc9a873-cbdbf13a E0000 00:00:1732211514.649972 786752 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T17:51:54.650014Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [87077807-a7f53c53-abc9a873-cbdbf13a] got new request from [d810:d6fa:d714:0:c010:d6fa:d714:0] database '/Root' stream 'teststream' E0000 00:00:1732211514.650203 786752 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732211514.650210 786752 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T17:51:54.650201Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [87077807-a7f53c53-abc9a873-cbdbf13a] [auth] Authorized successfully 2024-11-21T17:51:54.650224Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [87077807-a7f53c53-abc9a873-cbdbf13a] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1732211514.650235 786752 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732211514.650245 786752 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T17:51:54.650531Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [87077807-a7f53c53-abc9a873-cbdbf13a] reply ok 2024-11-21T17:51:54.650571Z node 7 :HTTP DEBUG: (#44,[::1]:55928) <- (200 ) 2024-11-21T17:51:54.650629Z node 7 :HTTP DEBUG: (#44,[::1]:55928) connection closed Http output full {"NextToken":"CJq66P60MhABGAEiCnRlc3RzdHJlYW0=","Consumers":[{"ConsumerCreationTimestamp":1732211515,"ConsumerArn":"","ConsumerName":"user1","ConsumerStatus":"ACTIVE"}]} 200 {"NextToken":"CJq66P60MhABGAEiCnRlc3RzdHJlYW0=","Consumers":[{"ConsumerCreationTimestamp":1732211515,"ConsumerArn":"","ConsumerName":"user1","ConsumerStatus":"ACTIVE"}]} 2024-11-21T17:51:54.650977Z node 7 :HTTP DEBUG: (#44,[::1]:55942) incoming connection opened 2024-11-21T17:51:54.650997Z node 7 :HTTP DEBUG: (#44,[::1]:55942) -> (POST /Root) 2024-11-21T17:51:54.651027Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d823:79f7:d714:0:c023:79f7:d714:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: 4f02321e-bb24b9c7-82ea9225-dde7de75 2024-11-21T17:51:54.651269Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [4f02321e-bb24b9c7-82ea9225-dde7de75] got new request from [d823:79f7:d714:0:c023:79f7:d714:0] database '/Root' stream '' 2024-11-21T17:51:54.651372Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [4f02321e-bb24b9c7-82ea9225-dde7de75] [auth] Authorized successfully 2024-11-21T17:51:54.651441Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [4f02321e-bb24b9c7-82ea9225-dde7de75] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:54.651595Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [4f02321e-bb24b9c7-82ea9225-dde7de75] reply with status: BAD_REQUEST message:
: Error: Provided NextToken is malformed, code: 500040 2024-11-21T17:51:54.651614Z node 7 :HTTP DEBUG: (#44,[::1]:55942) <- (400 InvalidArgumentException) 2024-11-21T17:51:54.651624Z node 7 :HTTP DEBUG: (#44,[::1]:55942) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.ListStreamConsumers X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 61 { "NextToken":"CJq66P60MhABGAEiCnRlc3RzdHJlYW0=garbage", "StreamArn":"", "MaxResults":100 } 0 2024-11-21T17:51:54.651634Z node 7 :HTTP DEBUG: (#44,[::1]:55942) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 4f02321e-bb24b9c7-82ea9225-dde7de75 x-amz-crc32: 40394744 Content-Type: application/x-amz-json-1.1 Content-Length: 112 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2024-11-21T17:51:54.651662Z node 7 :HTTP DEBUG: (#44,[::1]:55942) connection closed Http output full {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 400 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2024-11-21T17:51:54.651857Z node 7 :HTTP DEBUG: (#44,[::1]:55948) incoming connection opened 2024-11-21T17:51:54.651880Z node 7 :HTTP DEBUG: (#44,[::1]:55948) -> (POST /Root) 2024-11-21T17:51:54.651896Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [9825:79f7:d714:0:8025:79f7:d714:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: 5a9c9143-68cf4764-82a2fa6a-e7814665 2024-11-21T17:51:54.651965Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [5a9c9143-68cf4764-82a2fa6a-e7814665] got new request from [9825:79f7:d714:0:8025:79f7:d714:0] database '/Root' stream '' 2024-11-21T17:51:54.652056Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [5a9c9143-68cf4764-82a2fa6a-e7814665] [auth] Authorized successfully 2024-11-21T17:51:54.652106Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [5a9c9143-68cf4764-82a2fa6a-e7814665] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:54.652209Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [5a9c9143-68cf4764-82a2fa6a-e7814665] reply with status: BAD_REQUEST message:
: Error: Provided NextToken is malformed, code: 500040 2024-11-21T17:51:54.652237Z node 7 :HTTP DEBUG: (#44,[::1]:55948) <- (400 InvalidArgumentException) 2024-11-21T17:51:54.652243Z node 7 :HTTP DEBUG: (#44,[::1]:55948) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.ListStreamConsumers X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 57 { "NextToken":"Y18AjrkckeND9kjnbEhjhkX$^[]?!", "StreamArn":"", "MaxResults":100 } 0 2024-11-21T17:51:54.652246Z node 7 :HTTP DEBUG: (#44,[::1]:55948) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 5a9c9143-68cf4764-82a2fa6a-e7814665 x-amz-crc32: 40394744 Content-Type: application/x-amz-json-1.1 Content-Length: 112 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} Http output full {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2024-11-21T17:51:54.652272Z node 7 :HTTP DEBUG: (#44,[::1]:55948) connection closed 400 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2024-11-21T17:51:54.652429Z node 7 :HTTP DEBUG: (#44,[::1]:55964) incoming connection opened 2024-11-21T17:51:54.652443Z node 7 :HTTP DEBUG: (#44,[::1]:55964) -> (POST /Root) 2024-11-21T17:51:54.652453Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [9842:79f7:d714:0:8042:79f7:d714:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: e79253f9-617fcb8f-d4503a2f-42594ccf 2024-11-21T17:51:54.652495Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [e79253f9-617fcb8f-d4503a2f-42594ccf] got new request from [9842:79f7:d714:0:8042:79f7:d714:0] database '/Root' stream 'teststream' 2024-11-21T17:51:54.652594Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [e79253f9-617fcb8f-d4503a2f-42594ccf] [auth] Authorized successfully 2024-11-21T17:51:54.652606Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [e79253f9-617fcb8f-d4503a2f-42594ccf] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:54.652788Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [e79253f9-617fcb8f-d4503a2f-42594ccf] reply ok 2024-11-21T17:51:54.652815Z node 7 :HTTP DEBUG: (#44,[::1]:55964) <- (200 ) Http output full {"NextToken":"","Consumers":[{"ConsumerCreationTimestamp":1732211515,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}]}2024-11-21T17:51:54.652848Z node 7 :HTTP DEBUG: (#44,[::1]:55964) connection closed 200 {"NextToken":"","Consumers":[{"ConsumerCreationTimestamp":1732211515,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}]} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] Test command err: 2024-11-21T17:50:57.811130Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791560245409788:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:57.811143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e1f/r3tmp/tmpJHsSLL/pdisk_1.dat 2024-11-21T17:50:57.844671Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:50:57.857128Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7850, node 1 2024-11-21T17:50:57.873113Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e1f/r3tmp/yandexUf8Zbd.tmp 2024-11-21T17:50:57.873130Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e1f/r3tmp/yandexUf8Zbd.tmp 2024-11-21T17:50:57.873174Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e1f/r3tmp/yandexUf8Zbd.tmp 2024-11-21T17:50:57.873204Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:50:57.877278Z INFO: TTestServer started on Port 18769 GrpcPort 7850 TClient is connected to server localhost:18769 PQClient connected to localhost:7850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:57.905121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:57.908135Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:50:57.912786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:57.912814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:57.913714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:50:57.937164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:50:58.020691Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:50:58.028001Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T17:50:58.132332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791564540377846:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.132357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791564540377838:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.132384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.133171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:50:58.135446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791564540377852:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:50:58.166189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.172753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.186662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.192805Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791564540378107:2339], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:50:58.192923Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjVmNzViNy1hYzdkNzdiNS0yNWEzOWFhZi0yYWE4NTk0Zg==, ActorId: [1:7439791564540377835:2304], ActorState: ExecuteState, TraceId: 01jd7xjg2k9v35w9sfwpez6ey5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:50:58.193392Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439791564540378200:2596] 2024-11-21T17:51:02.811259Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791560245409788:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:02.811296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:03.359486Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T17:51:03.362789Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:51:03.363102Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439791586015214982:2766], Recipient [1:7439791560245410187:2191]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.363114Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.363115Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T17:51:03.363120Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439791586015214978:2763], Recipient [1:7439791560245410187:2191]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2024-11-21T17:51:03.363121Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:51:03.368179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:03.368279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.368330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T17:51:03.368343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T17:51:03.368352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T17:51:03.368361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T17:51:03.368367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T17:51:03.368393Z node 1 :FLAT_T ... 705_v1 2024-11-21T17:51:54.563528Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][test-topic] pipe [5:7439791805619778414:2774] disconnected; active server actors: 1 2024-11-21T17:51:54.563538Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][test-topic] pipe [5:7439791805619778414:2774] client test-consumer disconnected session test-consumer_5_3_16632328940254785705_v1 2024-11-21T17:51:54.563552Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][test-topic] consumer test-consumer rebalancing was scheduled 2024-11-21T17:51:54.563569Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][test-topic] consumer test-consumer balancing. Sessions=1, Families=4, UnradableFamilies=3 [1 (0), 4 (3), 3 (2), ], RequireBalancing=0 [] 2024-11-21T17:51:54.563583Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][test-topic] consumer test-consumer balancing of the family=4 (Status=Free, Partitions=[3]) failed because there are no suitable reading sessions. 2024-11-21T17:51:54.563585Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][test-topic] consumer test-consumer balancing of the family=3 (Status=Free, Partitions=[2]) failed because there are no suitable reading sessions. 2024-11-21T17:51:54.563588Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][test-topic] consumer test-consumer balancing of the family=1 (Status=Free, Partitions=[0]) failed because there are no suitable reading sessions. 2024-11-21T17:51:54.563591Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][test-topic] consumer test-consumer balancing duration: 0.000013s 2024-11-21T17:51:54.563683Z :INFO: [/Root] [/Root] [5e1fb265-725266b8-d67ff2a-6f97c687] Closing read session. Close timeout: 0.000000s 2024-11-21T17:51:54.563704Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:1:1:0:0 2024-11-21T17:51:54.563712Z :INFO: [/Root] [/Root] [5e1fb265-725266b8-d67ff2a-6f97c687] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1023 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:51:54.563726Z :NOTICE: [/Root] [/Root] [5e1fb265-725266b8-d67ff2a-6f97c687] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:51:54.563733Z :DEBUG: [/Root] [/Root] [5e1fb265-725266b8-d67ff2a-6f97c687] [] Abort session to cluster >>>>> Session-1 Received TSessionClosedEvent message SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:51:54.563877Z :NOTICE: [/Root] [/Root] [5e1fb265-725266b8-d67ff2a-6f97c687] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:51:54.563955Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_5_1_4401849079421920906_v1 grpc read done: success# 0, data# { } 2024-11-21T17:51:54.563965Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_4401849079421920906_v1 grpc read failed 2024-11-21T17:51:54.563969Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_4401849079421920906_v1 grpc closed 2024-11-21T17:51:54.563981Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_4401849079421920906_v1 is DEAD 2024-11-21T17:51:54.564036Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7439791801324810998:3377], Recipient [5:7439791788439907921:2434]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.564049Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.564052Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.564055Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_1_4401849079421920906_v1 2024-11-21T17:51:54.564059Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439791801324810997:2732] destroyed 2024-11-21T17:51:54.564068Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_4401849079421920906_v1 2024-11-21T17:51:54.564217Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][test-topic] pipe [5:7439791801324810994:2726] disconnected; active server actors: 1 2024-11-21T17:51:54.564226Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][test-topic] pipe [5:7439791801324810994:2726] client test-consumer disconnected session test-consumer_5_1_4401849079421920906_v1 2024-11-21T17:51:54.564568Z :DEBUG: [/Root] SessionId [producer-1|bb75d019-88bbfaaf-69fbca26-98025269_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T17:51:54.564671Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791797029843423:2665], Partition 3, Sender [0:0:0], Recipient [5:7439791797029843510:2675], Cookie: 0 2024-11-21T17:51:54.564678Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791797029843425:2666], Partition 2, Sender [0:0:0], Recipient [5:7439791797029843512:2677], Cookie: 0 2024-11-21T17:51:54.564685Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791797029843510:2675]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:54.564685Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791797029843512:2677]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:54.564688Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:54.564688Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:54.564697Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:54.564709Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:54.564709Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:54.564719Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:54.564720Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:54.564724Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:54.564723Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:54.564731Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:54.588364Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791788439907923:2435], Partition 0, Sender [0:0:0], Recipient [5:7439791788439908009:2440], Cookie: 0 2024-11-21T17:51:54.588365Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791788439907921:2434], Partition 1, Sender [0:0:0], Recipient [5:7439791788439908011:2442], Cookie: 0 2024-11-21T17:51:54.588380Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791788439908011:2442]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:54.588385Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:54.588398Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:54.588403Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791788439908009:2440]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:54.588407Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:54.588418Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:54.588422Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:54.588425Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:54.588431Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:54.588433Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:54.588436Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:54.588440Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:54.665035Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791797029843423:2665], Partition 3, Sender [0:0:0], Recipient [5:7439791797029843510:2675], Cookie: 0 2024-11-21T17:51:54.665062Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791797029843510:2675]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:54.665066Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:54.665078Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:54.665103Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:54.665106Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:54.665111Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:54.665124Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791797029843425:2666], Partition 2, Sender [0:0:0], Recipient [5:7439791797029843512:2677], Cookie: 0 2024-11-21T17:51:54.665133Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791797029843512:2677]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:54.665135Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:54.665138Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:54.665143Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:54.665144Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:54.665147Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TBackupCollectionWithRebootsTests::SimpleDropWithReboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2024-11-21T17:51:33.513165Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791714787297878:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:33.513359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001f6c/r3tmp/tmpI6f4TM/pdisk_1.dat 2024-11-21T17:51:33.555903Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2708, node 1 2024-11-21T17:51:33.567224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:33.567234Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:33.567236Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:33.567265Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:33.587045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.587871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.587885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.588000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:33.588036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:33.588045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:51:33.588117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:33.588126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T17:51:33.588196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.588328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:33.588453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493635, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.588462Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:51:33.588531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:51:33.588618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.588642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.588648Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:51:33.588659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:51:33.588669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:51:33.588673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:51:33.588949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:51:33.588965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:51:33.588968Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:33.588981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:30194 2024-11-21T17:51:33.599967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.600019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.600028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.600121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T17:51:33.600140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:33.600301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493649, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.600309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732211493649, at schemeshard: 72057594046644480 2024-11-21T17:51:33.600381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T17:51:33.600397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T17:51:33.600406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T17:51:33.600443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:51:33.600491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.600522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.600583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:51:33.600594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:51:33.600596Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:33.600603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T17:51:33.600951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.600988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.600997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.601006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T17:51:33.601021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T17:51:33.601027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T17:51:33.601098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T17:51:33.601122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.601159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.601259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T17:51:33.601262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T17:51:33.601264Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:51:33.601273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T17:51:33.601422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.601450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.601535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS 2024-11-21T17:51:33.601560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.601584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.601600Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#28 ... ARD INFO: [72057594046644480] TDone opId# 281474976715685:0 ProgressState 2024-11-21T17:51:52.724956Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715685:0 progress is 1/1 2024-11-21T17:51:52.724968Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715685:0 2024-11-21T17:51:52.726035Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/SQS/.STD/MessageData, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.726199Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 1 -> 2 2024-11-21T17:51:52.726424Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715686:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.726437Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.726616Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715686, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/SQS/.STD/MessageData 2024-11-21T17:51:52.726664Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:52.726718Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.726748Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:51:52.726881Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 15 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T17:51:52.726890Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T17:51:52.726894Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 15 2024-11-21T17:51:52.726964Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T17:51:52.726968Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T17:51:52.726970Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 2 2024-11-21T17:51:52.727145Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715686, at schemeshard: 72057594046644480 2024-11-21T17:51:52.728008Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:51:52.728039Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 2 -> 3 2024-11-21T17:51:52.728157Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:51:52.737286Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:51:52.737306Z node 7 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:51:52.737342Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 3 -> 128 2024-11-21T17:51:52.737493Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:51:52.737821Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211512787, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:52.737838Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211512787 2024-11-21T17:51:52.737881Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 128 -> 129 2024-11-21T17:51:52.738033Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.738140Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.738149Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:51:52.738596Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 16 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T17:51:52.738603Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T17:51:52.738606Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 16 2024-11-21T17:51:52.738643Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T17:51:52.738644Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T17:51:52.738646Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 4 2024-11-21T17:51:52.738903Z node 7 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037906 Status: COMPLETE TxId: 281474976715686 Step: 1732211512787 OrderId: 281474976715686 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037906 CpuTimeUsec: 367 } } 2024-11-21T17:51:52.738968Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:51:52.738975Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.738981Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 129 -> 240 2024-11-21T17:51:52.739082Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2024-11-21T17:51:52.739097Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2024-11-21T17:51:52.739111Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2024-11-21T17:51:53.754471Z node 7 :HTTP INFO: Listening on http://[::]:10234 2024-11-21T17:51:53.755202Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7439791799772684433:2401], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T17:51:53.755997Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7439791799772684491:2413], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T17:51:53.756015Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7439791799772684492:2414], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T17:51:53.756153Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791799772684490:2412], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:53.756176Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:53.778094Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7439791799772684488:2411]: Pool not found 2024-11-21T17:51:53.820983Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7439791799772684474:2410]: Pool not found 2024-11-21T17:51:53.822301Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439791799772684613:2431], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:53.822326Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7439791799772684614:2432], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T17:51:53.822335Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:53.855169Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7439791799772684611:2430]: Pool not found 2024-11-21T17:51:54.753616Z node 7 :HTTP DEBUG: (#44,[::1]:41678) incoming connection opened 2024-11-21T17:51:54.753662Z node 7 :HTTP DEBUG: (#44,[::1]:41678) -> (POST /Root) 2024-11-21T17:51:54.753720Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [18ee:2efb:f14:0:ee:2efb:f14:0] request [CreateStream] url [/Root] database [/Root] requestId: b0d42a25-4b8fd085-83ff4a9-e3b7a02f 2024-11-21T17:51:54.753973Z node 7 :HTTP_PROXY INFO: http request [CreateStream] requestId [b0d42a25-4b8fd085-83ff4a9-e3b7a02f] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:394: Top level of json value is not a map 2024-11-21T17:51:54.754003Z node 7 :HTTP DEBUG: (#44,[::1]:41678) <- (400 MissingParameter) 2024-11-21T17:51:54.754024Z node 7 :HTTP DEBUG: (#44,[::1]:41678) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 4 null 0 2024-11-21T17:51:54.754035Z node 7 :HTTP DEBUG: (#44,[::1]:41678) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: b0d42a25-4b8fd085-83ff4a9-e3b7a02f x-amz-crc32: 3671469627 Content-Type: application/x-amz-json-1.1 Content-Length: 127 {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:394: Top level of json value is not a map"} Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:394: Top level of json value is not a map"} 2024-11-21T17:51:54.754094Z node 7 :HTTP DEBUG: (#44,[::1]:41678) connection closed >> TBackupCollectionWithRebootsTests::SimpleDropWithReboots2 >> SystemView::TopPartitionsFollowers [GOOD] >> SystemView::TabletsShards ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] Test command err: 2024-11-21T17:51:33.447315Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791716077077000:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:33.447328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001f96/r3tmp/tmpmeYUdC/pdisk_1.dat 2024-11-21T17:51:33.506777Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4431, node 1 2024-11-21T17:51:33.516418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:33.516429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:33.516430Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:33.516454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:33.543195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.544245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.544282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.544473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:33.544531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:33.544543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:51:33.544624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:33.544632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:51:33.544724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:33.544955Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:33.545091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493593, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.545107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:51:33.545174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:51:33.545287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.545327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.545340Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:51:33.545349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:51:33.545360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:51:33.545374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:51:33.545697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:51:33.545714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:51:33.545717Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:33.545729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:51:33.548413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:33.548435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:33.549539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23090 2024-11-21T17:51:33.584590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.584662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.584668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.584786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T17:51:33.584817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:33.585047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493628, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.585062Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732211493628, at schemeshard: 72057594046644480 2024-11-21T17:51:33.585107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T17:51:33.585131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T17:51:33.585143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T17:51:33.585210Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:51:33.585261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.585297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.585391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:51:33.585405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:51:33.585408Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:33.585420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T17:51:33.585929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.585975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.585985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.585992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T17:51:33.586001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T17:51:33.586003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T17:51:33.586061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T17:51:33.586078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.586097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.586179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T17:51:33.586195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T17:51:33.586198Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:51:33.586209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T17:51:33.586469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.586508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.586617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CR ... reateQueue] requestId [1dc2bd4e-4d8dbe9f-146d404-f00706d0] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:51:54.898175Z node 7 :HTTP_PROXY INFO: http request [CreateQueue] requestId [1dc2bd4e-4d8dbe9f-146d404-f00706d0] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:54.898570Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4, operationId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T17:51:54.898598Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715706:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/SQS/cloud4', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 28], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, at schemeshard: 72057594046644480 2024-11-21T17:51:54.898807Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715706, database: /Root, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/Root/SQS/cloud4', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 28], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4 2024-11-21T17:51:54.910819Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000301v0, operationId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T17:51:54.910877Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715711:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:54.911089Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715711, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000301v0 2024-11-21T17:51:54.911124Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:54.911165Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:54.911178Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715711:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:54.911210Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715711, at schemeshard: 72057594046644480 2024-11-21T17:51:54.911387Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715711 2024-11-21T17:51:54.911401Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715711 2024-11-21T17:51:54.911422Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 7 2024-11-21T17:51:54.911482Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715711 2024-11-21T17:51:54.911506Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715711 2024-11-21T17:51:54.911513Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 3 2024-11-21T17:51:54.914880Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211514964, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:54.914901Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715711:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211514964, at schemeshard: 72057594046644480 2024-11-21T17:51:54.914959Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715711:0 128 -> 240 2024-11-21T17:51:54.915153Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:54.915213Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:54.915229Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715711:0 ProgressState 2024-11-21T17:51:54.915239Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715711:0 progress is 1/1 2024-11-21T17:51:54.915254Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715711:0 2024-11-21T17:51:54.915271Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715711, publications: 2, subscribers: 1 2024-11-21T17:51:54.915399Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715711 2024-11-21T17:51:54.915417Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715711 2024-11-21T17:51:54.915422Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 8 2024-11-21T17:51:54.915468Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715711 2024-11-21T17:51:54.915478Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715711 2024-11-21T17:51:54.915480Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 4 2024-11-21T17:51:54.915487Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715711, subscribers: 1 2024-11-21T17:51:54.916033Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000301v0/v4, operationId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T17:51:54.916081Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715712:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:54.916256Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715712, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000301v0/v4 2024-11-21T17:51:54.916292Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:54.916325Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:54.916332Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715712:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:54.916420Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715712 2024-11-21T17:51:54.916435Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715712 2024-11-21T17:51:54.916438Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715712, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 5 2024-11-21T17:51:54.916475Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715712 2024-11-21T17:51:54.916478Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715712 2024-11-21T17:51:54.916480Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715712, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 3 2024-11-21T17:51:54.916497Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715712, at schemeshard: 72057594046644480 2024-11-21T17:51:54.921549Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211514971, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:54.921587Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715712:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211514971, at schemeshard: 72057594046644480 2024-11-21T17:51:54.921631Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715712:0 128 -> 240 2024-11-21T17:51:54.921806Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:54.921854Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:54.921867Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715712:0 ProgressState 2024-11-21T17:51:54.921883Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715712:0 progress is 1/1 2024-11-21T17:51:54.921895Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715712:0 2024-11-21T17:51:54.921909Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715712, publications: 2, subscribers: 1 2024-11-21T17:51:54.922003Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715712 2024-11-21T17:51:54.922016Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715712 2024-11-21T17:51:54.922019Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715712, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 6 2024-11-21T17:51:54.922047Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715712 2024-11-21T17:51:54.922054Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715712 2024-11-21T17:51:54.922056Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715712, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 4 2024-11-21T17:51:54.922061Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715712, subscribers: 1 2024-11-21T17:51:54.939668Z node 7 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [1dc2bd4e-4d8dbe9f-146d404-f00706d0] Got succesfult GRPC response. 2024-11-21T17:51:54.939706Z node 7 :HTTP_PROXY INFO: http request [CreateQueue] requestId [1dc2bd4e-4d8dbe9f-146d404-f00706d0] reply ok 2024-11-21T17:51:54.939764Z node 7 :HTTP DEBUG: (#44,[::1]:56470) <- (200 ) 2024-11-21T17:51:54.939868Z node 7 :HTTP DEBUG: (#44,[::1]:56470) connection closed Http output full {"QueueUrl":"http://ghrun-2rqap2spfm.auto.internal:8771/cloud4/000000000000000301v0/ExampleQueueName.fifo"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2024-11-21T17:51:52.639666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:52.639685Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:52.639700Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:51:52.642234Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:51:52.642355Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:51:52.642401Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:52.643250Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:51:52.651730Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:52.651877Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:52.652007Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:51:52.652022Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:51:52.652029Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:51:52.652062Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:52.655234Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:51:52.655300Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:52.655349Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:51:52.655354Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:51:52.655358Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:51:52.655363Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:52.655449Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:52.655454Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:52.655476Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:51:52.655493Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:51:52.655544Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:52.655551Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:52.655556Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:51:52.655560Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:51:52.655564Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:51:52.655568Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:51:52.655573Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:52.662783Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:52.662808Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:52.662817Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:51:52.663269Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:51:52.663290Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:51:52.663314Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:51:52.663344Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:51:52.663352Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:51:52.663359Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:51:52.663365Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:52.663368Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:51:52.663371Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:51:52.663374Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:52.663429Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:51:52.663431Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:51:52.663433Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:51:52.663435Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:52.663442Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:51:52.663444Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:51:52.663446Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:51:52.663448Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:52.663451Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:51:52.684499Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:51:52.684524Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:52.684530Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:52.684542Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:51:52.684554Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:52.684652Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:52.684658Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:52.684664Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:51:52.684680Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2024-11-21T17:51:52.684684Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:51:52.684723Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:52.684730Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:51:52.684734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:51:52.684738Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:51:52.685428Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:51:52.685448Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:52.685503Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:52.685509Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:52.685517Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:52.685523Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:52.685528Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:51:52.685535Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2024-11-21T17:51:52.685540Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2024-11-21T17:51:52.685547Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:51:52.685551Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:51:52.685555Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:51:52.685559Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:51:52.685601Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2024-11-21T17:51:52.685604Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:51:52.685608Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:51:52.685611Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:51:52.685615Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:51:52.685627Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:52.685631Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:51:52.685635Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:51:52.685638Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:51:52.685650Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2024-11-21T17:51:52.685654Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2024-11-21T17:51:52.685657Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2024-11-21T17:51:52.685663Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T17:51:52.685666Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:51:52.685672Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit MakeSnapshot 2024-11-21T17:51:52.685676Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit MakeSnapshot 2024-11-21T17:51:52.685680Z node 1 :TX_DATASHARD ... 7186 2024-11-21T17:51:55.033004Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2024-11-21T17:51:55.033010Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:51:55.033015Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:51:55.033092Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2024-11-21T17:51:55.033100Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033107Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2024-11-21T17:51:55.033134Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2024-11-21T17:51:55.033140Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033143Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2024-11-21T17:51:55.033161Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2024-11-21T17:51:55.033165Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033168Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2024-11-21T17:51:55.033179Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2024-11-21T17:51:55.033183Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033186Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2024-11-21T17:51:55.033196Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T17:51:55.033200Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033203Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T17:51:55.033216Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T17:51:55.033219Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033222Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T17:51:55.033231Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2024-11-21T17:51:55.033234Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033237Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2024-11-21T17:51:55.033252Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T17:51:55.033255Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033258Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T17:51:55.033266Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T17:51:55.033270Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033273Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2024-11-21T17:51:55.033286Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2024-11-21T17:51:55.033291Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033294Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2024-11-21T17:51:55.033306Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T17:51:55.033309Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033312Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T17:51:55.033320Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T17:51:55.033324Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033327Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T17:51:55.033339Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T17:51:55.033343Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033346Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T17:51:55.033360Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:55.033366Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:149] at 9437184 on unit CompleteOperation 2024-11-21T17:51:55.033374Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T17:51:55.033380Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T17:51:55.033385Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:55.033410Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:55.033414Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:151] at 9437184 on unit CompleteOperation 2024-11-21T17:51:55.033420Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T17:51:55.033425Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T17:51:55.033429Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:55.033456Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:55.033460Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437184 on unit CompleteOperation 2024-11-21T17:51:55.033466Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T17:51:55.033471Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T17:51:55.033476Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:55.033493Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:55.033498Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:154] at 9437184 on unit CompleteOperation 2024-11-21T17:51:55.033505Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T17:51:55.033531Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T17:51:55.033535Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:55.033565Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T17:51:55.033569Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033573Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2024-11-21T17:51:55.033593Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T17:51:55.033597Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033600Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T17:51:55.033612Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T17:51:55.033615Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033619Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T17:51:55.033630Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T17:51:55.033633Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:55.033636Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] >> IndexBuildTest::ShadowDataNotAllowedByDefault ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2024-11-21T17:51:33.341023Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791714537785859:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:33.341042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001faf/r3tmp/tmpFaoDa3/pdisk_1.dat 2024-11-21T17:51:33.385398Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19213, node 1 2024-11-21T17:51:33.396872Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:33.396882Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:33.396883Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:33.396917Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:33.441593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:33.441617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:33.442746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:33.468986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.469889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.469906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.470029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:33.470083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:33.470091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:51:33.470151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:33.470157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:33.470219Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:33.470489Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:33.470510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493516, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.470520Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:33.470573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:33.470667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.470697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.470708Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:33.470719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:33.470731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:33.470737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:51:33.471047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:33.471061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:33.471065Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:33.471083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:17287 2024-11-21T17:51:33.486604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.486676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.486687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.486826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T17:51:33.486863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:33.487088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493530, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.487100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732211493530, at schemeshard: 72057594046644480 2024-11-21T17:51:33.487145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:51:33.487167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:51:33.487178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T17:51:33.487241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:51:33.487277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.487309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.487430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:33.487443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:33.487447Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:33.487458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:51:33.487848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.487892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.487900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.487909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T17:51:33.487925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:51:33.487931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T17:51:33.488009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T17:51:33.488033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.488065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.488201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:51:33.488210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:51:33.488212Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:51:33.488221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T17:51:33.488421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.488453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.488548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusAccepted, operation: C ... 9791806943308022:2474] destroyed 2024-11-21T17:51:55.031476Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:55.031477Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:55.031477Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [7:7439791806943308024:2476] destroyed 2024-11-21T17:51:55.031479Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [7:7439791806943308025:2477] destroyed 2024-11-21T17:51:55.031479Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:55.031481Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439791806943308026:2478] destroyed 2024-11-21T17:51:55.031811Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [90b48759-211c4ee0-cfca9ccd-b8e96062] reply ok 2024-11-21T17:51:55.031876Z node 7 :HTTP DEBUG: (#44,[::1]:49248) <- (200 ) 2024-11-21T17:51:55.031917Z node 7 :HTTP DEBUG: (#44,[::1]:49248) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1732211515,"StorageLimitMb":0,"StreamName":"testtopic"}} 200 {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1732211515,"StorageLimitMb":0,"StreamName":"testtopic"}} 2024-11-21T17:51:55.032315Z node 7 :HTTP DEBUG: (#44,[::1]:49260) incoming connection opened 2024-11-21T17:51:55.032379Z node 7 :HTTP DEBUG: (#44,[::1]:49260) -> (POST /Root) 2024-11-21T17:51:55.032411Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d817:917d:4a57:0:c017:917d:4a57:0] request [DescribeStreamSummary] url [/Root] database [/Root] requestId: 728719d3-3260fee2-7a71bb2e-84780e12 2024-11-21T17:51:55.032520Z node 7 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [728719d3-3260fee2-7a71bb2e-84780e12] got new request from [d817:917d:4a57:0:c017:917d:4a57:0] database '/Root' stream 'testtopic' 2024-11-21T17:51:55.032650Z node 7 :HTTP_PROXY DEBUG: http request [DescribeStreamSummary] requestId [728719d3-3260fee2-7a71bb2e-84780e12] [auth] Authorized successfully 2024-11-21T17:51:55.032659Z node 7 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [728719d3-3260fee2-7a71bb2e-84780e12] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:55.032859Z node 7 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [728719d3-3260fee2-7a71bb2e-84780e12] reply ok 2024-11-21T17:51:55.032890Z node 7 :HTTP DEBUG: (#44,[::1]:49260) <- (200 ) 2024-11-21T17:51:55.032926Z node 7 :HTTP DEBUG: (#44,[::1]:49260) connection closed Http output full {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1732211.515,"StreamName":"testtopic"}} 200 {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1732211.515,"StreamName":"testtopic"}} 2024-11-21T17:51:55.033142Z node 7 :HTTP DEBUG: (#44,[::1]:49268) incoming connection opened 2024-11-21T17:51:55.033163Z node 7 :HTTP DEBUG: (#44,[::1]:49268) -> (POST /Root) 2024-11-21T17:51:55.033188Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d817:917d:4a57:0:c017:917d:4a57:0] request [DescribeStream] url [/Root] database [/Root] requestId: f9844bd1-43dda854-c005e0f5-8684638 2024-11-21T17:51:55.033241Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [f9844bd1-43dda854-c005e0f5-8684638] got new request from [d817:917d:4a57:0:c017:917d:4a57:0] database '/Root' stream 'testtopic' 2024-11-21T17:51:55.033302Z node 7 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [f9844bd1-43dda854-c005e0f5-8684638] [auth] Authorized successfully 2024-11-21T17:51:55.033317Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [f9844bd1-43dda854-c005e0f5-8684638] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:55.033460Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:55.033464Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:55.033470Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [7:7439791806943308052:2489], now have 1 active actors on pipe 2024-11-21T17:51:55.033472Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [7:7439791806943308051:2488], now have 1 active actors on pipe 2024-11-21T17:51:55.033481Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:55.033483Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7439791806943308053:2490], now have 1 active actors on pipe 2024-11-21T17:51:55.033486Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:55.033488Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:55.033488Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7439791806943308049:2486], now have 1 active actors on pipe 2024-11-21T17:51:55.033490Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [7:7439791806943308050:2487], now have 1 active actors on pipe 2024-11-21T17:51:55.033588Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:55.033597Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [7:7439791806943308050:2487] destroyed 2024-11-21T17:51:55.033598Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:55.033600Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439791806943308049:2486] destroyed 2024-11-21T17:51:55.033600Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:55.033602Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [7:7439791806943308051:2488] destroyed 2024-11-21T17:51:55.033603Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:55.033605Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [7:7439791806943308052:2489] destroyed 2024-11-21T17:51:55.033605Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:55.033607Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439791806943308053:2490] destroyed 2024-11-21T17:51:55.033649Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [f9844bd1-43dda854-c005e0f5-8684638] reply ok 2024-11-21T17:51:55.033687Z node 7 :HTTP DEBUG: (#44,[::1]:49268) <- (200 ) 2024-11-21T17:51:55.033723Z node 7 :HTTP DEBUG: (#44,[::1]:49268) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1732211515,"StorageLimitMb":0,"StreamName":"testtopic"}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2024-11-21T17:51:33.668703Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791716186890571:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:33.668842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001f67/r3tmp/tmpBjWbrF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4950, node 1 2024-11-21T17:51:33.725105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:51:33.725119Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:33.726246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:33.726259Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:33.726262Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:33.726293Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:33.769428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:33.769453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:33.770491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:33.799941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.801131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.801154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.801308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:33.801353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:33.801363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:51:33.801442Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:33.801456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2024-11-21T17:51:33.801536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.801710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:33.801847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493845, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.801867Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:33.801925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:33.802029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.802070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.802082Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:33.802090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:33.802096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:33.802106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:51:33.802420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:33.802433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:33.802437Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:33.802447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:1163 2024-11-21T17:51:33.816734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.816800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.816814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.816938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T17:51:33.816983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.817247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493866, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.817259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732211493866, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:33.817320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:51:33.817343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:51:33.817355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T17:51:33.817453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.817500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.817599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:51:33.817645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:33.817657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:33.817661Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:33.817675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:51:33.818156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.818207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.818220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.818230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T17:51:33.818267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:51:33.818277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T17:51:33.818383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T17:51:33.818413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.818457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.818558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:51:33.818578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:51:33.818582Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:51:33.818596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T17:51:33.818777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.818815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.818929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 28147 ... 16c0-9b423a6] reply with status: STATUS_UNDEFINED message: The batch request doesn't contain any entries. Http output full {"__type":"AWS.SimpleQueueService.EmptyBatchRequest","message":"The batch request doesn't contain any entries."} 2024-11-21T17:51:55.285355Z node 7 :HTTP DEBUG: (#47,[::1]:34866) <- (400 AWS.SimpleQueueService.EmptyBatchRequest) 2024-11-21T17:51:55.285365Z node 7 :HTTP DEBUG: (#47,[::1]:34866) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ChangeMessageVisibilityBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 84 { "QueueUrl":"http://ghrun-2rqap2spfm.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries": [ ] } 0 2024-11-21T17:51:55.285369Z node 7 :HTTP DEBUG: (#47,[::1]:34866) Response: HTTP/1.1 400 AWS.SimpleQueueService.EmptyBatchRequest Connection: close x-amzn-requestid: 9962dbf7-f9d622a0-ba8916c0-9b423a6 x-amz-crc32: 2331440613 Content-Type: application/x-amz-json-1.1 Content-Length: 112 {"__type":"AWS.SimpleQueueService.EmptyBatchRequest","message":"The batch request doesn't contain any entries."} 2024-11-21T17:51:55.285408Z node 7 :HTTP DEBUG: (#47,[::1]:34866) connection closed 2024-11-21T17:51:55.285635Z node 7 :HTTP DEBUG: (#44,[::1]:34876) incoming connection opened 2024-11-21T17:51:55.285662Z node 7 :HTTP DEBUG: (#44,[::1]:34876) -> (POST /Root) 2024-11-21T17:51:55.285702Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [5883:9c3b:dd46:0:4083:9c3b:dd46:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: abc100c7-592df6a0-ad342a4b-35ae95fc 2024-11-21T17:51:55.285775Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [abc100c7-592df6a0-ad342a4b-35ae95fc] got new request from [5883:9c3b:dd46:0:4083:9c3b:dd46:0] 2024-11-21T17:51:55.285817Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [abc100c7-592df6a0-ad342a4b-35ae95fc] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:51:55.285823Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [abc100c7-592df6a0-ad342a4b-35ae95fc] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:55.285956Z node 7 :SQS WARN: Request [abc100c7-592df6a0-ad342a4b-35ae95fc] Message with offset 2 was not found in infly 2024-11-21T17:51:55.289108Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [abc100c7-592df6a0-ad342a4b-35ae95fc] Got succesfult GRPC response. 2024-11-21T17:51:55.289140Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [abc100c7-592df6a0-ad342a4b-35ae95fc] reply ok 2024-11-21T17:51:55.289182Z node 7 :HTTP DEBUG: (#44,[::1]:34876) <- (200 ) 2024-11-21T17:51:55.289234Z node 7 :HTTP DEBUG: (#44,[::1]:34876) connection closed Http output full {"Failed":[{"Message":"No such message.","Id":"Id-0","Code":"InvalidParameterValue","SenderFault":true}]} 2024-11-21T17:51:55.289702Z node 7 :HTTP DEBUG: (#47,[::1]:34880) incoming connection opened 2024-11-21T17:51:55.289723Z node 7 :HTTP DEBUG: (#47,[::1]:34880) -> (POST /Root) 2024-11-21T17:51:55.289745Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d8fe:9e3b:dd46:0:c0fe:9e3b:dd46:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: b9bfb7d0-a86e5558-ba43e113-609c9713 2024-11-21T17:51:55.289876Z node 7 :HTTP_PROXY WARN: http request [ChangeMessageVisibilityBatch] requestId [b9bfb7d0-a86e5558-ba43e113-609c9713] got new request with incorrect json from [d8fe:9e3b:dd46:0:c0fe:9e3b:dd46:0] 2024-11-21T17:51:55.289887Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [b9bfb7d0-a86e5558-ba43e113-609c9713] reply with status: BAD_REQUEST message: [json.exception.type_error.302] type must be string, but is number 2024-11-21T17:51:55.289906Z node 7 :HTTP DEBUG: (#47,[::1]:34880) <- (400 InvalidArgumentException) 2024-11-21T17:51:55.289911Z node 7 :HTTP DEBUG: (#47,[::1]:34880) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ChangeMessageVisibilityBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked c3 { "QueueUrl":"http://ghrun-2rqap2spfm.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries": [ { "Id":"Id-0", "ReceiptHandle":0 } ] } 0 2024-11-21T17:51:55.289914Z node 7 :HTTP DEBUG: (#47,[::1]:34880) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: b9bfb7d0-a86e5558-ba43e113-609c9713 x-amz-crc32: 3176743181 Content-Type: application/x-amz-json-1.1 Content-Length: 116 {"__type":"InvalidArgumentException","message":"[json.exception.type_error.302] type must be string, but is number"} 2024-11-21T17:51:55.289949Z node 7 :HTTP DEBUG: (#47,[::1]:34880) connection closed Http output full {"__type":"InvalidArgumentException","message":"[json.exception.type_error.302] type must be string, but is number"} 2024-11-21T17:51:55.290238Z node 7 :HTTP DEBUG: (#44,[::1]:34884) incoming connection opened 2024-11-21T17:51:55.290254Z node 7 :HTTP DEBUG: (#44,[::1]:34884) -> (POST /Root) 2024-11-21T17:51:55.290269Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [5883:9c3b:dd46:0:4083:9c3b:dd46:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: 188baa97-a014ce4f-97b79617-8ac8974f 2024-11-21T17:51:55.290325Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [188baa97-a014ce4f-97b79617-8ac8974f] got new request from [5883:9c3b:dd46:0:4083:9c3b:dd46:0] 2024-11-21T17:51:55.290379Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [188baa97-a014ce4f-97b79617-8ac8974f] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:51:55.290387Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [188baa97-a014ce4f-97b79617-8ac8974f] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:55.290582Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [188baa97-a014ce4f-97b79617-8ac8974f] Got succesfult GRPC response. 2024-11-21T17:51:55.290600Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [188baa97-a014ce4f-97b79617-8ac8974f] reply ok 2024-11-21T17:51:55.290620Z node 7 :HTTP DEBUG: (#44,[::1]:34884) <- (200 ) Http output full {"Failed":[{"Message":"VisibilityTimeout was not provided.","Id":"Id-0","Code":"MissingParameter","SenderFault":true}]} 2024-11-21T17:51:55.290646Z node 7 :HTTP DEBUG: (#44,[::1]:34884) connection closed 2024-11-21T17:51:55.290807Z node 7 :HTTP DEBUG: (#44,[::1]:34894) incoming connection opened 2024-11-21T17:51:55.290819Z node 7 :HTTP DEBUG: (#44,[::1]:34894) -> (POST /Root) 2024-11-21T17:51:55.290835Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [5883:9c3b:dd46:0:4083:9c3b:dd46:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: d3ac24db-bd2acdaa-a497b2dc-6e769d8c 2024-11-21T17:51:55.290900Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [d3ac24db-bd2acdaa-a497b2dc-6e769d8c] got new request from [5883:9c3b:dd46:0:4083:9c3b:dd46:0] 2024-11-21T17:51:55.290960Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [d3ac24db-bd2acdaa-a497b2dc-6e769d8c] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:51:55.290962Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [d3ac24db-bd2acdaa-a497b2dc-6e769d8c] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:55.291219Z node 7 :SQS WARN: Request [d3ac24db-bd2acdaa-a497b2dc-6e769d8c] Failed to process receipt handle : (yexception) ydb/core/ymq/base/helpers.cpp:147: Condition violated: `!decoded.empty()' Http output full {"Failed":[{"Message":"The specified receipt handle isn't valid.","Id":"Id-0","Code":"ReceiptHandleIsInvalid","SenderFault":true}]} 2024-11-21T17:51:55.291332Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [d3ac24db-bd2acdaa-a497b2dc-6e769d8c] Got succesfult GRPC response. 2024-11-21T17:51:55.291347Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [d3ac24db-bd2acdaa-a497b2dc-6e769d8c] reply ok 2024-11-21T17:51:55.291364Z node 7 :HTTP DEBUG: (#44,[::1]:34894) <- (200 ) 2024-11-21T17:51:55.291394Z node 7 :HTTP DEBUG: (#44,[::1]:34894) connection closed 2024-11-21T17:51:55.291535Z node 7 :HTTP DEBUG: (#44,[::1]:34896) incoming connection opened 2024-11-21T17:51:55.291545Z node 7 :HTTP DEBUG: (#44,[::1]:34896) -> (POST /Root) 2024-11-21T17:51:55.291568Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [98bd:9d3b:dd46:0:80bd:9d3b:dd46:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: bc3a14d5-4b6981a4-ef0b7bc0-5272725c 2024-11-21T17:51:55.291615Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [bc3a14d5-4b6981a4-ef0b7bc0-5272725c] got new request from [98bd:9d3b:dd46:0:80bd:9d3b:dd46:0] 2024-11-21T17:51:55.291654Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [bc3a14d5-4b6981a4-ef0b7bc0-5272725c] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:51:55.291656Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [bc3a14d5-4b6981a4-ef0b7bc0-5272725c] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:55.297047Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [bc3a14d5-4b6981a4-ef0b7bc0-5272725c] Got succesfult GRPC response. 2024-11-21T17:51:55.297081Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [bc3a14d5-4b6981a4-ef0b7bc0-5272725c] reply ok 2024-11-21T17:51:55.297127Z node 7 :HTTP DEBUG: (#44,[::1]:34896) <- (200 ) 2024-11-21T17:51:55.297177Z node 7 :HTTP DEBUG: (#44,[::1]:34896) connection closed Http output full {"Successful":[{"Id":"Id-0"}]} 2024-11-21T17:51:55.298141Z node 7 :HTTP DEBUG: (#44,[::1]:34908) incoming connection opened 2024-11-21T17:51:55.298175Z node 7 :HTTP DEBUG: (#44,[::1]:34908) -> (POST /Root) 2024-11-21T17:51:55.298219Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [98bd:9d3b:dd46:0:80bd:9d3b:dd46:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: e89556ec-5523b4fc-99a8ed23-746fafc3 2024-11-21T17:51:55.298330Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [e89556ec-5523b4fc-99a8ed23-746fafc3] got new request from [98bd:9d3b:dd46:0:80bd:9d3b:dd46:0] 2024-11-21T17:51:55.298400Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [e89556ec-5523b4fc-99a8ed23-746fafc3] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:51:55.298404Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [e89556ec-5523b4fc-99a8ed23-746fafc3] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2024-11-21T17:51:55.301049Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [e89556ec-5523b4fc-99a8ed23-746fafc3] Got succesfult GRPC response. 2024-11-21T17:51:55.301081Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [e89556ec-5523b4fc-99a8ed23-746fafc3] reply ok 2024-11-21T17:51:55.301120Z node 7 :HTTP DEBUG: (#44,[::1]:34908) <- (200 ) 2024-11-21T17:51:55.301159Z node 7 :HTTP DEBUG: (#44,[::1]:34908) connection closed >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::ShadowDataEdgeCases ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2024-11-21T17:51:51.528160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791790159170636:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:51.528321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 17:51:51.556183493 785883 dns_resolver.cc:162] no server name supplied in dns URI E1121 17:51:51.556225203 785883 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T17:51:51.560486Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:10743: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10743 } ] 2024-11-21T17:51:51.803479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:51:51.803847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791790159170810:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016da/r3tmp/tmpVzxKcW/pdisk_1.dat 2024-11-21T17:51:51.867065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:51.867099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:51.868291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10743, node 1 TClient is connected to server localhost:30706 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:51:51.899824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:51:51.899839Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:51.900065Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:51.900077Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:51.900078Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:51.900126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:52.197508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.198586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.198607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.199307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:52.199384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:52.199394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:51:52.199781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:52.199791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:52.200069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.201059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211512248, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:52.201073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:52.201137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:52.201781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.201831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.201842Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:52.201852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:52.201860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:52.201888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T17:51:52.202255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:52.202269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:52.202272Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:52.202287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2024-11-21T17:51:52.204669Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:52.562482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.562550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.563664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T17:51:52.563703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.563748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.563766Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:52.563939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:52.563951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:52.563954Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:52.563987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:52.563994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:52.563996Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:51:52.564880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211512612, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:52.564896Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211512612, at schemeshard: 72057594046644480 2024-11-21T17:51:52.564919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:51:52.565336Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2024-11-21T17:51:52.565342Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2024-11-21T17:51:52.565346Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2024-11-21T17:51:52.565346Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2024-11-21T17:51:52.565348Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2024-11-21T17:51:52.565348Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2024-11-21T17:51:52.565828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.565877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.565893Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:51:52.565910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:51:52.565919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:51:52.565932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 0 2024-11-21T17:51:52.566141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:52.566157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txI ... effective maxinflight 1024 sorted 0 2024-11-21T17:51:55.290583Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. BEFORE: 1.0 2024-11-21T17:51:55.290591Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. Send EvRead to shardId: 72075186224037900, tablePath: Root/yq/connections, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=18446744073709551615,step=1732211515223), lockTxId = 281474976715699, lockNodeId = 4 2024-11-21T17:51:55.290606Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. AFTER: 0.1 2024-11-21T17:51:55.290608Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2024-11-21T17:51:55.290617Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884649:2538], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:55.290619Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. enter getasyncinputdata results size 0, freeSpace 8388608 2024-11-21T17:51:55.290622Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2024-11-21T17:51:55.290870Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. Recv TEvReadResult from ShardID=72075186224037900, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= LockId: 281474976715699 DataShard: 72075186224037900 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 9, BrokenTxLocks= 2024-11-21T17:51:55.290883Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. Taken 1 locks 2024-11-21T17:51:55.290885Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. new data for read #0 seqno = 1 finished = 1 2024-11-21T17:51:55.290891Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884649:2538], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2024-11-21T17:51:55.290895Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884649:2538], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:55.290924Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T17:51:55.290934Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. enter pack cells method shardId: 72075186224037900 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T17:51:55.290941Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. exit pack cells method shardId: 72075186224037900 processedRows: 0 packed rows: 1 freeSpace: 8388557 2024-11-21T17:51:55.290945Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. returned 1 rows; processed 1 rows 2024-11-21T17:51:55.290963Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. dropping batch for read #0 2024-11-21T17:51:55.290971Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. effective maxinflight 1024 sorted 0 2024-11-21T17:51:55.290973Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T17:51:55.290975Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1, CA Id [4:7439791809028884649:2538]. returned async data processed rows 1 left freeSpace 8388557 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T17:51:55.291037Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884649:2538], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:55.291046Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884649:2538], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:51:55.291052Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T17:51:55.291059Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884650:2539], TxId: 281474976715704, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2024-11-21T17:51:55.291074Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 2. Finish input channelId: 1, from: [4:7439791809028884649:2538] 2024-11-21T17:51:55.291087Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884650:2539], TxId: 281474976715704, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:51:55.291125Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884650:2539], TxId: 281474976715704, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:51:55.291132Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884650:2539], TxId: 281474976715704, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T17:51:55.291136Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T17:51:55.291138Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T17:51:55.291141Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884649:2538], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2024-11-21T17:51:55.291145Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884649:2538], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:55.291147Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884649:2538], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:51:55.291148Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1. Tasks execution finished 2024-11-21T17:51:55.291152Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884649:2538], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T17:51:55.291178Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 1. pass away 2024-11-21T17:51:55.291212Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715704;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:51:55.291277Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884650:2539], TxId: 281474976715704, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:51:55.291286Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884650:2539], TxId: 281474976715704, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T17:51:55.291289Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T17:51:55.291291Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 2. Tasks execution finished 2024-11-21T17:51:55.291292Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791809028884650:2539], TxId: 281474976715704, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==. TraceId : 01jd7xm7w6e3cf2rnwr29xk6t5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T17:51:55.291299Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715704, task: 2. pass away 2024-11-21T17:51:55.291306Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715704;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:51:55.291637Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715705. Ctx: { TraceId: 01jd7xm7w6e3cf2rnwr29xk6t5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Nzc0ZWQ2Ni02ZTEwNmRkMC1lZjUwZjgxYS03ZTM3NDBjMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] >> SystemView::TabletsShards [GOOD] >> IndexBuildTest::BaseCase >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup+EvWrite [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility >> Yq_1::Basic [GOOD] >> Yq_1::Basic_EmptyList >> IndexBuildTest::RejectsCreate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T17:51:52.708904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:52.709536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:52.709588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b23/r3tmp/tmpkZ57Hf/pdisk_1.dat 2024-11-21T17:51:52.821711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.850678Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:52.894359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:52.894395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:52.905093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:53.012593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:53.241538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2024-11-21T17:51:53.517632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:791:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:53.517657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:53.517666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:53.518494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:53.696579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:805:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:53.766939Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xm65d0zpf3ekjmp685ngp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE1MTNiODItMzQ4NzE2NzgtMTRlOWRhNWMtOTQ2YWI2MmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:53.787694Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xm6dmac1f4rpa8apmkcyw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0Nzk4Y2UtNmJjNDJhNDMtNWI5MGY0M2ItZGI5NjMxMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2024-11-21T17:51:53.843006Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xm6e8738tx18at2m3zhgk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzljZDYxNGItNjA5ZjZlZS05M2QwMjUxYS0xY2NjYjcyNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2024-11-21T17:51:53.860003Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xm6fq9yygwcbenkk94wc9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzljZDYxNGItNjA5ZjZlZS05M2QwMjUxYS0xY2NjYjcyNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2024-11-21T17:51:54.170279Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xm6sa0j3v0ywwygsv1rt4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmRiODMzN2MtZGFmOWViMmUtOGIyZTUwYTAtZThiMTJmZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2024-11-21T17:51:54.682821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:54.682850Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:51:54.682867Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b23/r3tmp/tmp6M7BAl/pdisk_1.dat 2024-11-21T17:51:54.758773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:54.773919Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:54.819685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:54.819723Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:54.830316Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:54.941591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:55.151885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2024-11-21T17:51:55.407755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:55.407775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:801:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:55.407784Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:55.408537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:55.609800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:804:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:55.663175Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xm80f8ah0xfzjvbw7bnyj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWZiZTU5NmQtZjg1YWJiM2YtZWNiZmJlMDQtZTc5OTNjZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715661 TEvProposeTransaction 281474976715661 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 889 RawX2: 8589937235 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\ty\003\000\000\000\000\000\000\021S\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\002\002\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\001\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\014\n\010Database\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CustomerSuppliedId\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=NWZiZTU5NmQtZjg1YWJiM2YtZWNiZmJlMDQtZTc5OTNjZDM=\222\001\026\n\022CurrentExecutionId\022\000\222\001%\n\007TraceId\022\03201jd7xm80f8ah0xfzjvbw7bnyj\222\001\021\n\006PoolId\022\007default\230\001\000\"\n\010\231\243\022\020\0020\000@\n" TxId: 281474976715661 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715661 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715661 OrderId: 281474976715661 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 13 ActorId { RawX1: 630 RawX2: 8589937128 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 89 } } 2024-11-21T17:51:55.686502Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xm88xeh8pn532qp7z8gxc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjE3NTViMjYtZjQzYmViN2YtZDdjOTgzNzMtODdiZmU3OTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715662 TEvProposeTransaction 281474976715662 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 911 RawX2: 8589937308 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\217\003\000\000\000\000\000\000\021\234\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\004\002\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\002\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\014\n\010Database\022\000\222\001%\n\007TraceId\022\03201jd7xm88xeh8pn532qp7z8gxc\222\001\026\n\022CustomerSuppliedId\022\000\222\001\021\n\006PoolId\022\007default\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CurrentExecutionId\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=YjE3NTViMjYtZjQzYmViN2YtZDdjOTgzNzMtODdiZmU3OTQ=\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715662 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715662 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715662 OrderId: 281474976715662 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 13 ActorId { RawX1: 718 RawX2: 8589937190 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 79 } } ===== Begin SELECT 2024-11-21T17:51:55.744773Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xm89j062dd7s87y5zcbjs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTBmODdjN2UtYTdkMmVkZDYtOWU0NGFlNWUtYTAxNzczYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2024-11-21T17:51:55.761934Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xm8b3dqz05hdmsq7d0mv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTBmODdjN2UtYTdkMmVkZDYtOWU0NGFlNWUtYTAxNzczYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937330 } TxBody: " \0008\000`\200\200\200\005j\365\006\010\001\022\223\006\010\001\022\024\n\022\t\321\003\000\000\000\000\000\000\021\262\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_1\022\r\010\240\234\001\022\005\t\000\002\006\004\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\003\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\021\n\006PoolId\022\007default\222\001\026\n\022CurrentExecutionId\022\000\222\001\026\n\022CustomerSuppliedId\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\014\n\010Database\022\000\222\001%\n\007TraceId\022\03201jd7xm8b3dqz05hdmsq7d0mv9\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=ZTBmODdjN2UtYTdkMmVkZDYtOWU0NGFlNWUtYTAxNzczYzY=\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937330 } TxBody: " \0008\000`\200\200\200\005j\367\006\010\001\022\225\006\010\002\022\024\n\022\t\321\003\000\000\000\000\000\000\021\262\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\010\004\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\004\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004H\001\200\001\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\021\n\006PoolId\022\007default\222\001%\n\007TraceId\022\03201jd7xm8b3dqz05hdmsq7d0mv9\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=ZTBmODdjN2UtYTdkMmVkZDYtOWU0NGFlNWUtYTAxNzczYzY=\222\001\014\n\010Database\022\000\222\001\026\n\022CurrentExecutionId\022\000\222\001\026\n\022CustomerSuppliedId\022\000\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\001\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0038\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037888 TxId: 281474976715664 MinStep: 2033 MaxStep: 32033 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 15 ActorId { RawX1: 630 RawX2: 8589937128 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 63 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037889 TxId: 281474976715664 MinStep: 2033 MaxStep: 32033 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 14 ActorId { RawX1: 718 RawX2: 8589937190 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 33 } } ... captured readset ... captured readset ===== restarting tablet EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 128 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 176 } } ===== Waiting for commit response ===== Last SELECT 2024-11-21T17:51:56.065426Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xm8mg8a6mdttnrn3ebvrk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODQ4M2I2NGYtNzY5NTlmOGQtNDA5YzE0YzItNDVkY2YyZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::TabletsShards [GOOD] Test command err: 2024-11-21T17:51:35.659111Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791722358391366:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:35.659351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019a9/r3tmp/tmppfHkq7/pdisk_1.dat 2024-11-21T17:51:35.701785Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31446, node 1 2024-11-21T17:51:35.719645Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:35.719670Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:35.719672Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:35.719707Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:35.759080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:35.759117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:35.776446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:35.786192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:35.791341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:35.800029Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439791723695836254:2212];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:35.800754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:35.800772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:35.801017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:35.801035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:51:35.801680Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T17:51:35.801690Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T17:51:35.801870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:35.801908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:35.804221Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:35.807125Z node 4 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2024-11-21T17:51:35.807140Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2024-11-21T17:51:35.808902Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [5:7439791722595892321:2056], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2024-11-21T17:51:35.808928Z node 5 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [5:7439791722595892321:2056], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Tenant1 2024-11-21T17:51:35.808989Z node 5 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [5:7439791722595892321:2056], database# /Root/Tenant1, no sysview processor 2024-11-21T17:51:35.810484Z node 4 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2024-11-21T17:51:35.810502Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2024-11-21T17:51:35.810511Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2024-11-21T17:51:35.810582Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2024-11-21T17:51:35.810591Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2024-11-21T17:51:35.810595Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2024-11-21T17:51:35.810598Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2024-11-21T17:51:35.810601Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2024-11-21T17:51:35.810603Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2024-11-21T17:51:35.810604Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2024-11-21T17:51:35.810607Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2024-11-21T17:51:35.810610Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2024-11-21T17:51:35.810613Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2024-11-21T17:51:35.810615Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2024-11-21T17:51:35.810618Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2024-11-21T17:51:35.810621Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2024-11-21T17:51:35.810624Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2024-11-21T17:51:35.810627Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2024-11-21T17:51:35.810630Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2024-11-21T17:51:35.810634Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2024-11-21T17:51:35.810637Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2024-11-21T17:51:35.810652Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2024-11-21T17:51:35.000000Z 2024-11-21T17:51:35.810689Z node 4 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [4:7439791723695836090:2056], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2024-11-21T17:51:35.810748Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:35.811125Z node 4 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [4:7439791723695836090:2056], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Tenant1 2024-11-21T17:51:35.811137Z node 4 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [4:7439791723695836090:2056], database# /Root/Tenant1, no sysview processor 2024-11-21T17:51:35.812067Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2024-11-21T17:51:35.812101Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Execute 2024-11-21T17:51:35.812112Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryResults: interval end# 2024-11-21T17:51:35.000000Z, query count# 0 2024-11-21T17:51:35.812120Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 8, interval end# 2024-11-21T17:51:35.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:35.812122Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 10, interval end# 2024-11-21T17:51:35.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:35.812125Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 12, interval end# 2024-11-21T17:51:35.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:35.812127Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 14, interval end# 2024-11-21T17:51:35.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:35.812129Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 9, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:35.812136Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 11, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:35.812139Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 13, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:35.812141Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 15, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:35.813545Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Complete 2024-11-21T17:51:35.854740Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Tenant1 2024-11-21T17:51:35.855610Z node 4 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2024-11-21T17:51:35.856361Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2024-11-21T17:51:35.859039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:35.862507Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439791724059677512:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:35.863769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:35.863810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:35.864142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:35.864158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected ... , processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T17:51:54.796446Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2024-11-21T17:51:54.796447Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037891, from:72057594046644480 is reset 2024-11-21T17:51:54.796453Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7439791762431216538:2574], Recipient [6:7439791758136248708:2204]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:54.796454Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:54.796455Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037888, from:72057594046644480 is reset 2024-11-21T17:51:54.796461Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7439791762431216540:2576], Recipient [6:7439791758136248708:2204]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:54.796462Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:54.796462Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7439791763715184715:2058], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T17:51:54.796463Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037890, from:72057594046644480 is reset 2024-11-21T17:51:54.796468Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:7439791762431216468:2514], Recipient [6:7439791758136248708:2204]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.796469Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.796470Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T17:51:54.796474Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7439791762431216543:2579], Recipient [6:7439791758136248708:2204]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:54.796475Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:54.796477Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037893, from:72057594046644480 is reset 2024-11-21T17:51:54.796481Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [6:7439791762431216542:2578], Recipient [6:7439791758136248708:2204]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:54.796483Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:54.796484Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037892, from:72057594046644480 is reset 2024-11-21T17:51:54.796534Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:51:54.796512Z node 9 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 268829696, Sender [9:7439791760969490476:2268], Recipient [9:7439791760969490490:2295]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T17:51:54.796537Z node 9 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2024-11-21T17:51:54.796594Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268829696, Sender [9:7439791760969490205:2111], Recipient [9:7439791760969490247:2272]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T17:51:54.796735Z node 9 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:54.797486Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7439791760969489973:2058], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T17:51:54.797504Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:7439791762431216602:2619], Recipient [6:7439791758136248708:2204]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.797516Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:54.797518Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T17:51:54.797564Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7439791760969489973:2058], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T17:51:54.797798Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7439791760969489958:2098], Type=268959746 2024-11-21T17:51:55.005996Z node 7 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [7:7439791755934393306:2063] 2024-11-21T17:51:55.096644Z node 9 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [9:7439791756674522606:2063] 2024-11-21T17:51:55.149865Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [10:7439791759420217361:2063] 2024-11-21T17:51:55.217449Z node 9 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [9:7439791760969489973:2058] 2024-11-21T17:51:55.217981Z node 9 :SYSTEM_VIEWS DEBUG: Send counters: service id# [9:7439791760969489973:2058], processor id# 72075186224037893, database# /Root/Tenant1, generation# 7592233398023031254, node id# 9, is retrying# 0, is labeled# 0 2024-11-21T17:51:55.252525Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7439791760969489973:2058], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T17:51:55.252605Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7439791760969489973:2058], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T17:51:55.291693Z node 9 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [9:7439791756674522606:2063] 2024-11-21T17:51:55.766925Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7439791807419059082:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:55.767115Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0019a9/r3tmp/tmpHChYPE/pdisk_1.dat 2024-11-21T17:51:55.795202Z node 11 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15484, node 11 2024-11-21T17:51:55.813491Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:55.813509Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:55.813511Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:55.813563Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:55.872644Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:55.872683Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:55.873171Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:55.873551Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:55.874107Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:55.876197Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:56.043638Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439791811714027131:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:56.043653Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439791811714027123:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:56.043666Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:56.044284Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:51:56.045803Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439791811714027137:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:51:56.134982Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xm8fpf88ajdgrny6dtsyq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=NTJiYmJiMjctY2RjMGFkYzUtNmQ0MWYyYzUtZDIwMDdiYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:56.135435Z node 11 :SYSTEM_VIEWS INFO: Scan started, actor: [11:7439791811714027225:2318], owner: [11:7439791811714027221:2316], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:56.135551Z node 11 :SYSTEM_VIEWS INFO: Scan prepared, actor: [11:7439791811714027225:2318], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:56.135692Z node 11 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [11:7439791811714027225:2318], row count: 3, finished: 1 2024-11-21T17:51:56.135704Z node 11 :SYSTEM_VIEWS INFO: Scan finished, actor: [11:7439791811714027225:2318], owner: [11:7439791811714027221:2316], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T17:51:56.136278Z node 11 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211516134, txId: 281474976715661] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] Test command err: 2024-11-21T17:51:33.418611Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791714605411801:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:33.418776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001f80/r3tmp/tmp0a3mBJ/pdisk_1.dat 2024-11-21T17:51:33.471682Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29659, node 1 2024-11-21T17:51:33.477819Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:33.477832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:33.477833Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:33.477865Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:33.505306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.506086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.506107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.506235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:33.506279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:33.506287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:51:33.506363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:33.506375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T17:51:33.506481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.506613Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:33.506801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493551, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.506815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:51:33.506867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:51:33.506989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.507014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.507036Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:51:33.507049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:51:33.507060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:51:33.507071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:51:33.507355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:51:33.507366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:51:33.507369Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:33.507377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:2002 2024-11-21T17:51:33.519184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.519243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.519253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.519365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T17:51:33.519395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.519586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:33.519609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T17:51:33.519666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493565, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.519677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732211493565, at schemeshard: 72057594046644480 2024-11-21T17:51:33.519752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T17:51:33.519776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T17:51:33.519788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T17:51:33.519858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:51:33.519925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.519962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.520051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:51:33.520068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:51:33.520075Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:33.520091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T17:51:33.520628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.520677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.520687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.520696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T17:51:33.520699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:33.520709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T17:51:33.520718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T17:51:33.520774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T17:51:33.520799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.520834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.520915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T17:51:33.520926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T17:51:33.520929Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:51:33.520957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T17:51:33.521229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.521264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.521385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CRE ... 976715708:0 progress is 1/1 2024-11-21T17:51:56.167802Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715708:0 2024-11-21T17:51:56.167809Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715708, publications: 2, subscribers: 1 2024-11-21T17:51:56.167919Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715708 2024-11-21T17:51:56.167931Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715708 2024-11-21T17:51:56.167933Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715708, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 8 2024-11-21T17:51:56.167960Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715708 2024-11-21T17:51:56.167965Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715708 2024-11-21T17:51:56.167966Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715708, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 4 2024-11-21T17:51:56.167970Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715708, subscribers: 1 2024-11-21T17:51:56.168415Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000301v0/v4, operationId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T17:51:56.168465Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715709:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:56.168617Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715709, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000301v0/v4 2024-11-21T17:51:56.168651Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:56.168687Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:56.168699Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715709:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:56.168735Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715709, at schemeshard: 72057594046644480 2024-11-21T17:51:56.168818Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715709 2024-11-21T17:51:56.168849Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715709 2024-11-21T17:51:56.168858Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715709, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 5 2024-11-21T17:51:56.168902Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715709 2024-11-21T17:51:56.168909Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715709 2024-11-21T17:51:56.168911Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715709, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 3 2024-11-21T17:51:56.174802Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211516224, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:56.174825Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715709:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211516224, at schemeshard: 72057594046644480 2024-11-21T17:51:56.174879Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715709:0 128 -> 240 2024-11-21T17:51:56.175087Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:56.175151Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:56.175173Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715709:0 ProgressState 2024-11-21T17:51:56.175193Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715709:0 progress is 1/1 2024-11-21T17:51:56.175215Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715709:0 2024-11-21T17:51:56.175231Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715709, publications: 2, subscribers: 1 2024-11-21T17:51:56.175325Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715709 2024-11-21T17:51:56.175337Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715709 2024-11-21T17:51:56.175340Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715709, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 6 2024-11-21T17:51:56.175368Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715709 2024-11-21T17:51:56.175374Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715709 2024-11-21T17:51:56.175375Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715709, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 4 2024-11-21T17:51:56.175380Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715709, subscribers: 1 2024-11-21T17:51:56.191848Z node 7 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [8754ad91-ba5b3183-b46b2179-35233cd6] Got succesfult GRPC response. 2024-11-21T17:51:56.191884Z node 7 :HTTP_PROXY INFO: http request [CreateQueue] requestId [8754ad91-ba5b3183-b46b2179-35233cd6] reply ok 2024-11-21T17:51:56.191941Z node 7 :HTTP DEBUG: (#44,[::1]:46808) <- (200 ) Http output full {"QueueUrl":"http://ghrun-2rqap2spfm.auto.internal:8771/cloud4/000000000000000301v0/DlqName"} 2024-11-21T17:51:56.192023Z node 7 :HTTP DEBUG: (#44,[::1]:46808) connection closed 2024-11-21T17:51:56.192374Z node 7 :HTTP DEBUG: (#44,[::1]:46812) incoming connection opened 2024-11-21T17:51:56.192405Z node 7 :HTTP DEBUG: (#44,[::1]:46812) -> (POST /Root) 2024-11-21T17:51:56.192447Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d8d0:7ebb:f515:0:c0d0:7ebb:f515:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: 32078a89-272fe8eb-b68ed4e9-df3d2139 2024-11-21T17:51:56.192562Z node 7 :HTTP_PROXY INFO: http request [GetQueueAttributes] requestId [32078a89-272fe8eb-b68ed4e9-df3d2139] got new request from [d8d0:7ebb:f515:0:c0d0:7ebb:f515:0] 2024-11-21T17:51:56.193756Z node 7 :HTTP_PROXY DEBUG: http request [GetQueueAttributes] requestId [32078a89-272fe8eb-b68ed4e9-df3d2139] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:51:56.193767Z node 7 :HTTP_PROXY INFO: http request [GetQueueAttributes] requestId [32078a89-272fe8eb-b68ed4e9-df3d2139] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:56.198053Z node 7 :HTTP_PROXY DEBUG: http request [GetQueueAttributes] requestId [32078a89-272fe8eb-b68ed4e9-df3d2139] Got succesfult GRPC response. 2024-11-21T17:51:56.198095Z node 7 :HTTP_PROXY INFO: http request [GetQueueAttributes] requestId [32078a89-272fe8eb-b68ed4e9-df3d2139] reply ok 2024-11-21T17:51:56.198138Z node 7 :HTTP DEBUG: (#44,[::1]:46812) <- (200 ) 2024-11-21T17:51:56.198198Z node 7 :HTTP DEBUG: (#44,[::1]:46812) connection closed Http output full {"Attributes":{"QueueArn":"yrn:yc:ymq:ru-central1:folder4:DlqName"}} 2024-11-21T17:51:56.198571Z node 7 :HTTP DEBUG: (#44,[::1]:46828) incoming connection opened 2024-11-21T17:51:56.198592Z node 7 :HTTP DEBUG: (#44,[::1]:46828) -> (POST /Root) 2024-11-21T17:51:56.198627Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [18a0:7fbb:f515:0:a0:7fbb:f515:0] request [SetQueueAttributes] url [/Root] database [/Root] requestId: f0603a57-24020a07-d87d5c00-22f6befb 2024-11-21T17:51:56.198742Z node 7 :HTTP_PROXY INFO: http request [SetQueueAttributes] requestId [f0603a57-24020a07-d87d5c00-22f6befb] got new request from [18a0:7fbb:f515:0:a0:7fbb:f515:0] 2024-11-21T17:51:56.198838Z node 7 :HTTP_PROXY DEBUG: http request [SetQueueAttributes] requestId [f0603a57-24020a07-d87d5c00-22f6befb] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:51:56.198846Z node 7 :HTTP_PROXY INFO: http request [SetQueueAttributes] requestId [f0603a57-24020a07-d87d5c00-22f6befb] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:56.210654Z node 7 :HTTP_PROXY DEBUG: http request [SetQueueAttributes] requestId [f0603a57-24020a07-d87d5c00-22f6befb] Got succesfult GRPC response. 2024-11-21T17:51:56.210676Z node 7 :HTTP_PROXY INFO: http request [SetQueueAttributes] requestId [f0603a57-24020a07-d87d5c00-22f6befb] reply ok 2024-11-21T17:51:56.210728Z node 7 :HTTP DEBUG: (#44,[::1]:46828) <- (200 ) 2024-11-21T17:51:56.210774Z node 7 :HTTP DEBUG: (#44,[::1]:46828) connection closed Http output full {} 2024-11-21T17:51:56.211101Z node 7 :HTTP DEBUG: (#44,[::1]:46842) incoming connection opened 2024-11-21T17:51:56.211134Z node 7 :HTTP DEBUG: (#44,[::1]:46842) -> (POST /Root) 2024-11-21T17:51:56.211172Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d877:7dbb:f515:0:c077:7dbb:f515:0] request [ListDeadLetterSourceQueues] url [/Root] database [/Root] requestId: b5efd069-b34af8ad-1b0a094e-ff6b9ab2 2024-11-21T17:51:56.211265Z node 7 :HTTP_PROXY INFO: http request [ListDeadLetterSourceQueues] requestId [b5efd069-b34af8ad-1b0a094e-ff6b9ab2] got new request from [d877:7dbb:f515:0:c077:7dbb:f515:0] 2024-11-21T17:51:56.211337Z node 7 :HTTP_PROXY DEBUG: http request [ListDeadLetterSourceQueues] requestId [b5efd069-b34af8ad-1b0a094e-ff6b9ab2] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:51:56.211345Z node 7 :HTTP_PROXY INFO: http request [ListDeadLetterSourceQueues] requestId [b5efd069-b34af8ad-1b0a094e-ff6b9ab2] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:56.213486Z node 7 :HTTP_PROXY DEBUG: http request [ListDeadLetterSourceQueues] requestId [b5efd069-b34af8ad-1b0a094e-ff6b9ab2] Got succesfult GRPC response. 2024-11-21T17:51:56.213518Z node 7 :HTTP_PROXY INFO: http request [ListDeadLetterSourceQueues] requestId [b5efd069-b34af8ad-1b0a094e-ff6b9ab2] reply ok 2024-11-21T17:51:56.213562Z node 7 :HTTP DEBUG: (#44,[::1]:46842) <- (200 ) 2024-11-21T17:51:56.213620Z node 7 :HTTP DEBUG: (#44,[::1]:46842) connection closed Http output full {"NextToken":"","QueueUrls":["http://ghrun-2rqap2spfm.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName"]} >> IndexBuildTest::CheckLimitWithDroppedIndex >> IndexBuildTest::ShadowDataEdgeCases [GOOD] >> TPersQueueTest::BadTopic >> TPersQueueTest::ReadFromSeveralPartitionsMigrated >> PrivateApi::Nodes [GOOD] >> TPersQueueTest::WriteExisting ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::ShadowDataEdgeCases [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:56.015016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:56.015041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:56.015046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:56.015051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:56.015069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:56.015074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:56.015082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:56.015160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:56.026848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:56.026871Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:56.029961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:56.030792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:56.030834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:56.032194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:56.032426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:56.032531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.032613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:56.033570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.033830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:56.033840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.033875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:56.033882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:56.033887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:56.033899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.034984Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:56.048781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:56.048843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.048890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:56.048926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:56.048931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.049542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.049558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:56.049600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.049606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:56.049609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:56.049612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:56.049919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.049929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:56.049932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:56.050176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.050181Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.050185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.050189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.050666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:56.051251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:56.051309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:56.051496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.051526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:56.051545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.051613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:56.051621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.051650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:56.051662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:56.052142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:56.052150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:56.052193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.052198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:56.052308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.052317Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:56.052329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:56.052333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.052339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:56.052345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.052350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:56.052353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:56.052365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:56.052370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:56.052373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:56.052646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:56.052659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:56.052662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:56.052666Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:56.052669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:56.052680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 46744073709551615 PrepareArriveTime: 152000 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 96 } } 2024-11-21T17:51:56.573091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2024-11-21T17:51:56.573127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 152000 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 96 } } 2024-11-21T17:51:56.573144Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#109:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 152000 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 96 } } 2024-11-21T17:51:56.573152Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T17:51:56.573192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 109:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.573198Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 3 -> 128 2024-11-21T17:51:56.573861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.573923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.573929Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#109:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:56.573940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 109 ready parts: 1/1 2024-11-21T17:51:56.573973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:56.574419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 109:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:109 msg type: 269090816 2024-11-21T17:51:56.574478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 109, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 109 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 109 at step: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 109 at step: 5000008 2024-11-21T17:51:56.574681Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.574707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 109 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:56.574715Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#109:0 HandleReply TEvOperationPlan, operationId: 109:0, stepId: 5000008, at schemeshard: 72057594046678944 2024-11-21T17:51:56.574790Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 128 -> 129 2024-11-21T17:51:56.574840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:51:56.575968Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:56.575982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:51:56.576044Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.576049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 109, path id: 4 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000008 2024-11-21T17:51:56.576205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.576214Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:51:56.576532Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2024-11-21T17:51:56.576549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2024-11-21T17:51:56.576555Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 109 2024-11-21T17:51:56.576560Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T17:51:56.576566Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:51:56.576586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 109 2024-11-21T17:51:56.577992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 2024-11-21T17:51:56.578419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 316 } } 2024-11-21T17:51:56.578432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2024-11-21T17:51:56.578452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 316 } } 2024-11-21T17:51:56.578464Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 316 } } 2024-11-21T17:51:56.578619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 662 RawX2: 8589937212 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2024-11-21T17:51:56.578625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2024-11-21T17:51:56.578638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: Source { RawX1: 662 RawX2: 8589937212 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2024-11-21T17:51:56.578644Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:51:56.578665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 662 RawX2: 8589937212 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2024-11-21T17:51:56.578680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 109:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.578684Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.578688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 109:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:51:56.578694Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 129 -> 240 2024-11-21T17:51:56.579371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.579485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.579552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.579561Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 109:0 ProgressState 2024-11-21T17:51:56.579574Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2024-11-21T17:51:56.579578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2024-11-21T17:51:56.579585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 1/1, is published: true 2024-11-21T17:51:56.579600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:339:2314] message: TxId: 109 2024-11-21T17:51:56.579607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2024-11-21T17:51:56.579613Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 109:0 2024-11-21T17:51:56.579617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 109:0 2024-11-21T17:51:56.579646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:51:56.580046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2024-11-21T17:51:56.580055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:765:2714] TestWaitNotification: OK eventTxId 109 >> TPersQueueTest::SetupLockSession2 >> Yq_1::CreateConnections_With_Idempotency [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] >> TPersQueueTest::ReadFromSeveralPartitions >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> IndexBuildTest::DropIndex >> TPersQueueTest::UpdatePartitionLocation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2024-11-21T17:51:51.490568Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791789690050142:2080];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:51.491364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 17:51:51.531658592 785791 dns_resolver.cc:162] no server name supplied in dns URI E1121 17:51:51.531705734 785791 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T17:51:51.539398Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28127: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:28127 2024-11-21T17:51:51.539505Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28127: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:28127 } ] 2024-11-21T17:51:51.832360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001814/r3tmp/tmpZPpIZW/pdisk_1.dat 2024-11-21T17:51:51.835598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791789690050421:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 28127, node 1 TClient is connected to server localhost:3562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:51.925163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:51:51.925179Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:51.925315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:51.925326Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:51.925327Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:51.925373Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:52.174220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.175114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.175140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.176717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:52.176812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:52.176817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:51:52.177590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:52.177597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:52.179591Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.180468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211512227, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:52.180480Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:52.180546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 waiting... 2024-11-21T17:51:52.184514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.184591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.184606Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:52.184624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:52.184637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:52.184662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T17:51:52.185421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:52.185436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:52.185441Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:52.185486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T17:51:52.185750Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:52.312015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:52.312048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:52.313710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:52.536652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.536722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.537410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T17:51:52.537454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.537512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.537532Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:52.537811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:52.537823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:52.537828Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:52.537865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:52.537871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:52.537873Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:51:52.538872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211512584, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:52.538888Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211512584, at schemeshard: 72057594046644480 2024-11-21T17:51:52.538913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:51:52.539283Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2024-11-21T17:51:52.539294Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2024-11-21T17:51:52.539296Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2024-11-21T17:51:52.539773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:51:52.539990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.540064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.540087Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:51:52.540102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:51:52.540114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:51:52.540133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T17:51:52.540392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 7205759404 ... 0 2024-11-21T17:51:56.385583Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. BEFORE: 1.0 2024-11-21T17:51:56.385597Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. Send EvRead to shardId: 72075186224037897, tablePath: Root/yq/nodes, ranges: [(String : Tenant) ; (String : Tenant)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=18446744073709551615,step=1732211516378), lockTxId = 281474976715677, lockNodeId = 7 2024-11-21T17:51:56.385616Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. AFTER: 0.1 2024-11-21T17:51:56.385623Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2024-11-21T17:51:56.385644Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069258:2435], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:51:56.385652Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. enter getasyncinputdata results size 0, freeSpace 8388608 2024-11-21T17:51:56.385657Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2024-11-21T17:51:56.385933Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. Recv TEvReadResult from ShardID=72075186224037897, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= LockId: 281474976715677 DataShard: 72075186224037897 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 11, BrokenTxLocks= 2024-11-21T17:51:56.385943Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. Taken 1 locks 2024-11-21T17:51:56.385945Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. new data for read #0 seqno = 1 finished = 1 2024-11-21T17:51:56.385950Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069258:2435], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2024-11-21T17:51:56.385955Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069258:2435], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:51:56.385958Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T17:51:56.385961Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T17:51:56.385963Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T17:51:56.385965Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. returned 0 rows; processed 0 rows 2024-11-21T17:51:56.385981Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. dropping batch for read #0 2024-11-21T17:51:56.385982Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. effective maxinflight 1024 sorted 0 2024-11-21T17:51:56.385984Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T17:51:56.385986Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1, CA Id [7:7439791812989069258:2435]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T17:51:56.386013Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069258:2435], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:51:56.386016Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069258:2435], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T17:51:56.386022Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T17:51:56.386026Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069260:2436], TxId: 281474976715677, task: 2. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2024-11-21T17:51:56.386042Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 2. Finish input channelId: 1, from: [7:7439791812989069258:2435] 2024-11-21T17:51:56.386051Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069258:2435], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2024-11-21T17:51:56.386055Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069260:2436], TxId: 281474976715677, task: 2. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:56.386058Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069258:2435], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:51:56.386059Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069258:2435], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T17:51:56.386061Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1. Tasks execution finished 2024-11-21T17:51:56.386063Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069258:2435], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T17:51:56.386093Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069260:2436], TxId: 281474976715677, task: 2. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:56.386094Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 1. pass away 2024-11-21T17:51:56.386095Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069260:2436], TxId: 281474976715677, task: 2. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:51:56.386100Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T17:51:56.386103Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T17:51:56.386126Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715677;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:51:56.386142Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069260:2436], TxId: 281474976715677, task: 2. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:56.386145Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069260:2436], TxId: 281474976715677, task: 2. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:51:56.386147Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T17:51:56.386148Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 2. Tasks execution finished 2024-11-21T17:51:56.386150Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7439791812989069260:2436], TxId: 281474976715677, task: 2. Ctx: { TraceId : 01jd7xm8tscvg03p0mzdgcy6cq. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T17:51:56.386168Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715677, task: 2. pass away 2024-11-21T17:51:56.386214Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715677;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:51:56.399132Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd7xm8z3b2zfqjmxecbkx3vz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MmUzYjk3YWItYzJmODEwZDctNDIyOGJmZTMtNjQ5NDExMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsDropIndex >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:41.194505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:41.194535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:41.194539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:41.194543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:41.194557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:41.194561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:41.194570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:41.194655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:41.202476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:41.202496Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:41.207324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:41.208146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:41.208200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:41.209741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:41.209925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:41.210038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:41.210128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:41.211110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:41.211376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:41.211389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:41.211429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:41.211436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:41.211442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:41.211459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:41.212944Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:41.228873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:41.228984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:41.229055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:41.229113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:41.229122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:41.231471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:41.231504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:41.231552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:41.231562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:41.231565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:41.231569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:41.232082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:41.232090Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:41.232093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:41.232378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:41.232388Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:41.232393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:41.232399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:41.232836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:41.233096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:41.233136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:41.233266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:41.233285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:41.233317Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:41.233362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:41.233367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:41.233395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:41.233406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:41.233677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:41.233682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:41.233717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:41.233720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:41.233776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:41.233780Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:41.233787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:41.233790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:41.233794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:41.233797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:41.233800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:41.233802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:41.233809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:41.233813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:41.233815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:41.234061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:41.234075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:41.234080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:41.234084Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:41.234088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:41.234101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... RN: Table profiles were not loaded 2024-11-21T17:51:57.016598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:57.016681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:51:57.016700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:51:57.016708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.016717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.016774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:57.016807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T17:51:57.016834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:51:57.016852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.016863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:57.016873Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2024-11-21T17:51:57.016878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:51:57.016895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:57.016916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.016949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2024-11-21T17:51:57.016985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.016998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.017221Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T17:51:57.020032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:57.020504Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:1748:3675], Recipient [1:1748:3675]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T17:51:57.020519Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T17:51:57.020994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:57.021011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:57.021098Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1748:3675], Recipient [1:1748:3675]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:57.021106Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T17:51:57.021243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:57.021256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:57.021264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:57.021268Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T17:51:57.021970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1785:3675], Recipient [1:1748:3675]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T17:51:57.021981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T17:51:57.021986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1748:3675] sender: [1:1805:2058] recipient: [1:15:2062] 2024-11-21T17:51:57.047642Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1804:3721], Recipient [1:1748:3675]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T17:51:57.047662Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T17:51:57.047692Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:51:57.047791Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 88us result status StatusSuccess 2024-11-21T17:51:57.047995Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 25856 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 3882 Memory: 156376 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 25856 DataSize: 25856 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2024-11-21T17:51:51.782508Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791793727230475:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:51.782659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 17:51:51.833603761 786413 dns_resolver.cc:162] no server name supplied in dns URI E1121 17:51:51.833653777 786413 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T17:51:51.838330Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:64482: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64482 } ] test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016d0/r3tmp/tmpTxBglK/pdisk_1.dat 2024-11-21T17:51:52.195665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791798022198253:2276], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:52.195705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 64482, node 1 TClient is connected to server localhost:12862 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:51:52.228449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:51:52.228466Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:52.228617Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:52.228620Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:52.228622Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:52.228671Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:51:52.446023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.447097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.447109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.447740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:52.447804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:52.447808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:51:52.448297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:52.448302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:52.448696Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.450720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211512493, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:52.450727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:52.450804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:52.451463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.451508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.451518Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:52.451529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:52.451539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:52.451550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T17:51:52.452212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:52.452220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:52.452224Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:52.452252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T17:51:52.452283Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:52.552517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:52.552542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:52.557818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:52.852596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.852660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.854900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T17:51:52.854979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.855044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.855058Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:52.855514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:52.855523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:52.855528Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:52.855573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:52.855575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:52.855577Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:51:52.860358Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2024-11-21T17:51:52.860373Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2024-11-21T17:51:52.860375Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2024-11-21T17:51:52.861500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:51:52.863461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/yq/mappings, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.863594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 1 -> 2 2024-11-21T17:51:52.863775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.863779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.864380Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2024-11-21T17:51:52.864390Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2024-11-21T17:51:52.864392Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2024-11-21T17:51:52.864934Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2024-11-21T17:51:52.864951Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2024-11-21T17:51:52.864952Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2024-11-21T17:51:52.865166Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2024-11-21T17:51:52.865176Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2024-11-21T17:51:52.865177Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2024-11-21T17:51:52.865303Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2024-11-21T17:51:52.865304Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call crea ... ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596060Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596081Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596108Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596134Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596158Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596182Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596206Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596241Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596273Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596300Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596331Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596354Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596379Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596398Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596416Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596435Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596456Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596475Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596495Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596514Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596531Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596548Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596567Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596589Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596610Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596633Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596653Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596672Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596690Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596710Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596730Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596756Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596783Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596807Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596831Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596855Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596878Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596903Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596925Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596948Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.596976Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597001Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597027Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597050Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597075Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597089Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597103Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597124Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597144Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597166Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597184Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597205Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597223Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597247Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597265Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597289Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597311Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597336Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597351Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597372Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597394Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597414Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597434Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597453Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597477Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597504Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597527Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597548Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597572Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597598Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597621Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597645Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597664Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597680Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597699Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597721Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597739Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597762Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597801Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597833Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597855Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597876Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597898Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597920Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597943Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597966Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.597990Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598015Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598042Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598064Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598088Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598123Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598143Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598162Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598179Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598202Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598222Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598242Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598261Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598282Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598302Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598324Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598345Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598365Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598386Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598410Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598431Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598454Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598473Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598496Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598517Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598537Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598557Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598576Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598593Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598613Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598633Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598654Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598669Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598690Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598704Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598726Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598750Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598774Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598801Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598825Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598849Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598873Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598905Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:51:56.598934Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2024-11-21T17:51:52.008740Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791797489448816:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:52.008953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 17:51:52.105588256 787046 dns_resolver.cc:162] no server name supplied in dns URI E1121 17:51:52.105644257 787046 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T17:51:52.108552Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24152: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24152 2024-11-21T17:51:52.110195Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24152: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24152 } ] 2024-11-21T17:51:52.403234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791797489449118:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:52.403260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016ca/r3tmp/tmpPo1H3J/pdisk_1.dat 2024-11-21T17:51:52.440634Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24152, node 1 2024-11-21T17:51:52.447625Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:51:52.447639Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:51:52.449276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:52.449287Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:52.449288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:52.449327Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:52.452724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.452936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.452941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.452951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:51:52.452968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:61397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:52.754766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:52.754817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:52.756683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:52.778424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.779562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.779586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:52.780170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:52.780275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:52.780282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:51:52.780803Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:52.780808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:52.780859Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:52.781219Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.782201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211512829, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:52.782213Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:52.782314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:52.782678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.782717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.782730Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:52.782743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:52.782755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:52.782769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:51:52.783190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:52.783211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:52.783215Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:52.783226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:51:53.109202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:53.109253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:53.110029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T17:51:53.110074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:53.110120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:53.110135Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:53.110414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:53.110428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:53.110433Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:53.110468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:53.110475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:53.110476Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:51:53.111325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211513158, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:53.111339Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211513158, at schemeshard: 72057594046644480 2024-11-21T17:51:53.111361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:51:53.111669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:53.111703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:53.111716Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:51:53.111729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:51:53.111741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715 ... 5Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. Shards State: TShardState{ TabletId: 72075186224037894, Last Key , Ranges: [#0: [(String : yandexcloud://WTF, String : ) ; (String : yandexcloud://WTF)]], Points: [], RetryAttempt: 0, ResolveAttempt: 0 } 2024-11-21T17:51:56.444568Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. effective maxinflight 1 sorted 1 2024-11-21T17:51:56.444569Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. BEFORE: 1.0 2024-11-21T17:51:56.444586Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. Send EvRead to shardId: 72075186224037894, tablePath: Root/yq/connections, ranges: [(String : yandexcloud://WTF, String : ) ; (String : yandexcloud://WTF)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2024-11-21T17:51:56.444602Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. AFTER: 0.1 2024-11-21T17:51:56.444607Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2024-11-21T17:51:56.444626Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871534:2469], TxId: 281474976715685, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:56.444632Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. enter getasyncinputdata results size 0, freeSpace 8388608 2024-11-21T17:51:56.444634Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2024-11-21T17:51:56.444780Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. Recv TEvReadResult from ShardID=72075186224037894, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2024-11-21T17:51:56.444790Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. Taken 0 locks 2024-11-21T17:51:56.444793Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. new data for read #0 seqno = 1 finished = 1 2024-11-21T17:51:56.444803Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871534:2469], TxId: 281474976715685, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2024-11-21T17:51:56.444813Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871534:2469], TxId: 281474976715685, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:56.444821Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T17:51:56.444829Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. enter pack cells method shardId: 72075186224037894 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T17:51:56.444832Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. exit pack cells method shardId: 72075186224037894 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T17:51:56.444839Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. returned 0 rows; processed 0 rows 2024-11-21T17:51:56.444855Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. dropping batch for read #0 2024-11-21T17:51:56.444862Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. effective maxinflight 1 sorted 1 2024-11-21T17:51:56.444863Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T17:51:56.444867Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1, CA Id [4:7439791812934871534:2469]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T17:51:56.444892Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871536:2470], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2024-11-21T17:51:56.444893Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871534:2469], TxId: 281474976715685, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:56.444895Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871534:2469], TxId: 281474976715685, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:51:56.444902Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T17:51:56.444906Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 2. Finish input channelId: 1, from: [4:7439791812934871534:2469] 2024-11-21T17:51:56.444912Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871534:2469], TxId: 281474976715685, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2024-11-21T17:51:56.444913Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871536:2470], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:56.444918Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871534:2469], TxId: 281474976715685, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:56.444919Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871534:2469], TxId: 281474976715685, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:51:56.444921Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1. Tasks execution finished 2024-11-21T17:51:56.444924Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871534:2469], TxId: 281474976715685, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T17:51:56.444928Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871536:2470], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:56.444929Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871536:2470], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:51:56.444933Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T17:51:56.444934Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T17:51:56.444942Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871536:2470], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:56.444944Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871536:2470], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:51:56.444945Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T17:51:56.444946Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 2. Tasks execution finished 2024-11-21T17:51:56.444948Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791812934871536:2470], TxId: 281474976715685, task: 2. Ctx: { TraceId : 01jd7xm9014htsh96xq5fhgg79. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NDdmYzY5NjItMzg0N2ZlY2UtODgwOTUwYTgtMWQxN2MxYjc=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T17:51:56.444956Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 1. pass away 2024-11-21T17:51:56.444960Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715685, task: 2. pass away 2024-11-21T17:51:56.444979Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715685;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:51:56.444984Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715685;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2024-11-21T17:51:54.631899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:54.631918Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:54.631935Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:51:54.633793Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:51:54.633888Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:51:54.633926Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:54.634474Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:51:54.640448Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:54.640563Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:54.640674Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:51:54.640687Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:51:54.640692Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:51:54.640720Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:54.643295Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:51:54.643354Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:54.643406Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:51:54.643412Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:51:54.643417Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:51:54.643423Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:54.643510Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:54.643518Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:54.643545Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:51:54.643567Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:51:54.643620Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:54.643627Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:54.643634Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:51:54.643640Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:51:54.643644Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:51:54.643650Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:51:54.643656Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:54.649602Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.649623Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.649631Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:51:54.650043Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:51:54.650059Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:51:54.650082Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:51:54.650116Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:51:54.650130Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:51:54.650138Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:51:54.650146Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:54.650150Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:51:54.650155Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:51:54.650159Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:54.650223Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:51:54.650229Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:51:54.650232Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:51:54.650235Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:54.650246Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:51:54.650249Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:51:54.650252Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:51:54.650255Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:54.650261Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:51:54.671257Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:51:54.671283Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:54.671289Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:54.671302Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:51:54.671315Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:54.671418Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.671423Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.671429Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:51:54.671442Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:51:54.671445Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:51:54.671479Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:54.671486Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:54.671489Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:51:54.671491Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:51:54.672127Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:51:54.672141Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:54.672188Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:54.672193Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:54.672198Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:54.672205Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:54.672209Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:51:54.672216Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:51:54.672220Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:51:54.672243Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:54.672248Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:51:54.672251Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:51:54.672255Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:51:54.672296Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:51:54.672299Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:54.672302Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:51:54.672306Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:51:54.672309Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:51:54.672319Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:54.672322Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:51:54.672325Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:51:54.672329Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:51:54.672339Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:51:54.672342Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:51:54.672345Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:51:54.672350Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:54.672353Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:51:54.672356Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... d event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T17:51:56.990644Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:56.990647Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T17:51:56.990672Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:51:56.990679Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 98 Flags# 0} 2024-11-21T17:51:56.990687Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:51:56.990692Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 99 Flags# 0} 2024-11-21T17:51:56.990697Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:51:56.990701Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 100 Flags# 0} 2024-11-21T17:51:56.990706Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:56.990711Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2024-11-21T17:51:56.990723Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T17:51:56.990730Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T17:51:56.990735Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:56.990848Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:56.990858Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2024-11-21T17:51:56.990870Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:51:56.990877Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:56.990932Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:56.990938Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit StoreAndSendOutRS 2024-11-21T17:51:56.990948Z node 1 :TX_DATASHARD DEBUG: Send RS 50 at 9437184 from 9437184 to 9437186 txId 152 2024-11-21T17:51:56.990960Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:56.990964Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2024-11-21T17:51:56.990972Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:51:56.990977Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:56.991004Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:56.991008Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2024-11-21T17:51:56.991015Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:51:56.991019Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:56.991103Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T17:51:56.991110Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:56.991115Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T17:51:56.991131Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T17:51:56.991134Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:56.991137Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T17:51:56.991146Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:227:2222], Recipient [1:433:2383]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2024-11-21T17:51:56.991151Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T17:51:56.991155Z node 1 :TX_DATASHARD DEBUG: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2024-11-21T17:51:56.991168Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2024-11-21T17:51:56.991180Z node 1 :TX_DATASHARD TRACE: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2024-11-21T17:51:56.991197Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T17:51:56.991212Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T17:51:56.991215Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:56.991219Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2024-11-21T17:51:56.991231Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:433:2383], Recipient [1:433:2383]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:56.991234Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:56.991242Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2024-11-21T17:51:56.991247Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:51:56.991253Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2024-11-21T17:51:56.991258Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2024-11-21T17:51:56.991266Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2024-11-21T17:51:56.991270Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2024-11-21T17:51:56.991274Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2024-11-21T17:51:56.991278Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2024-11-21T17:51:56.991424Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2024-11-21T17:51:56.991436Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:51:56.991447Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2024-11-21T17:51:56.991451Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2024-11-21T17:51:56.991455Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2024-11-21T17:51:56.991459Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T17:51:56.991529Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2024-11-21T17:51:56.991534Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2024-11-21T17:51:56.991538Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2024-11-21T17:51:56.991542Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2024-11-21T17:51:56.991548Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2024-11-21T17:51:56.991551Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2024-11-21T17:51:56.991555Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2024-11-21T17:51:56.991559Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:56.991562Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2024-11-21T17:51:56.991566Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2024-11-21T17:51:56.991569Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2024-11-21T17:51:56.991639Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T17:51:56.991645Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:56.991649Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2024-11-21T17:51:57.003737Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:51:57.003764Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T17:51:57.003802Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:51:57.003819Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T17:51:57.003838Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:51:57.003918Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T17:51:57.003924Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:51:57.003931Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] >> IndexBuildTest::RejectsDropIndex [GOOD] >> IndexBuildTest::DropIndex [GOOD] >> TPersQueueTest::DirectReadPreCached >> DemoTx::Scenario_1 >> TopicService::OneConsumer_TheRangesDoNotOverlap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit [GOOD] Test command err: 2024-11-21T17:51:53.107978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:53.108548Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:53.108575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000b1b/r3tmp/tmpbGqLsY/pdisk_1.dat 2024-11-21T17:51:53.215415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:53.236731Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:53.279489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:53.279526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:53.290187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:53.399427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:53.414868Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:51:53.415112Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:51:53.415204Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:51:53.415252Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:53.429081Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:51:53.429260Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:53.429285Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:53.429456Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:51:53.429480Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:51:53.429487Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:51:53.429528Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:53.433086Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:51:53.433181Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:53.433246Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:51:53.433251Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:53.433255Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:51:53.433260Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:53.433421Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:53.433427Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:53.433571Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:51:53.433593Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:51:53.433607Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:53.433611Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:53.433617Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:51:53.433624Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:53.433631Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:53.433637Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:51:53.433642Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:51:53.433645Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:51:53.433650Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:51:53.433655Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:53.433675Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:51:53.433679Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:51:53.433699Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:53.433741Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:51:53.433751Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:51:53.433766Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:51:53.433773Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:51:53.433776Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:51:53.433780Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:51:53.433784Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:51:53.433824Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:51:53.433828Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:51:53.433830Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:51:53.433833Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:51:53.433844Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:51:53.433847Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:51:53.433850Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:51:53.433868Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:51:53.433873Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:51:53.434129Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:51:53.434139Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:53.444528Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:53.444556Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:51:53.444562Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:51:53.444574Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:51:53.444586Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:53.623414Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:53.623434Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:53.623442Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:51:53.623459Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:51:53.623463Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:51:53.623501Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:51:53.623509Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:51:53.623513Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:51:53.623518Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:51:53.624291Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:51:53.624313Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:53.624450Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:53.624457Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:53.624463Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:53.624470Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:53.624474Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:51:53.624482Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... ASHARD INFO: Change sender killed: at tablet: 72075186224037892 2024-11-21T17:51:56.689252Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [2:1138:2897] 2024-11-21T17:51:56.689256Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2024-11-21T17:51:56.689262Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2024-11-21T17:51:56.689266Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2024-11-21T17:51:56.689317Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:988:2787], Recipient [2:988:2787]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:56.689322Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:56.689360Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:988:2787], Recipient [2:718:2598]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037892 OperationCookie: 281474976715665 2024-11-21T17:51:56.689368Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715665 2024-11-21T17:51:56.689427Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1133:2892], Recipient [2:718:2598]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:56.689432Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:56.689450Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:988:2787]: {TEvRegisterTabletResult TabletId# 72075186224037892 Entry# 2000} 2024-11-21T17:51:56.689454Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T17:51:56.689458Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2024-11-21T17:51:56.689462Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2024-11-21T17:51:56.689487Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2024-11-21T17:51:56.689492Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:56.689497Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2024-11-21T17:51:56.689502Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2024-11-21T17:51:56.689505Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2024-11-21T17:51:56.689510Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2024-11-21T17:51:56.689515Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2024-11-21T17:51:56.689569Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1135:2894], Recipient [2:988:2787]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:56.689574Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:56.689580Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1133:2892], serverId# [2:1135:2894], sessionId# [0:0:0] 2024-11-21T17:51:56.689672Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:988:2787]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2024-11-21T17:51:56.689677Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T17:51:56.689682Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 2000 2024-11-21T17:51:56.689688Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2024-11-21T17:51:56.689694Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v2000/18446744073709551615 2024-11-21T17:51:56.699983Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2024-11-21T17:51:56.700009Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2024-11-21T17:51:56.700033Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T17:51:56.700058Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2024-11-21T17:51:56.700069Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1142:2901] 2024-11-21T17:51:56.700073Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2024-11-21T17:51:56.700079Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2024-11-21T17:51:56.700084Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2024-11-21T17:51:56.700153Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:990:2789], Recipient [2:718:2598]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2024-11-21T17:51:56.700171Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2024-11-21T17:51:56.700240Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:990:2789], Recipient [2:990:2789]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:56.700245Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:56.700282Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1134:2893], Recipient [2:718:2598]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:56.700286Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:51:56.700319Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:990:2789]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2024-11-21T17:51:56.700323Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T17:51:56.700327Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2024-11-21T17:51:56.700331Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2024-11-21T17:51:56.700353Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2024-11-21T17:51:56.700356Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:56.700360Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2024-11-21T17:51:56.700363Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2024-11-21T17:51:56.700368Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2024-11-21T17:51:56.700372Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2024-11-21T17:51:56.700376Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2024-11-21T17:51:56.700426Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1136:2895], Recipient [2:990:2789]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:56.700431Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:56.700437Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1134:2893], serverId# [2:1136:2895], sessionId# [0:0:0] 2024-11-21T17:51:56.700530Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:990:2789]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2024-11-21T17:51:56.700534Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T17:51:56.700537Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2024-11-21T17:51:56.700541Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2024-11-21T17:51:56.700546Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2024-11-21T17:51:56.710787Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2024-11-21T17:51:56.711536Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:379:2374], Recipient [2:725:2602] 2024-11-21T17:51:56.711569Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2024-11-21T17:51:56.711876Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2024-11-21T17:51:56.711890Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T17:51:56.711989Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:710:2593], Recipient [2:718:2598]: NKikimr::TEvTablet::TEvFollowerGcApplied 2024-11-21T17:51:57.270221Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:938:2644], Recipient [2:630:2536]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 938 RawX2: 8589937236 } TxBody: " \0008\000`\200\200\200\005j\322\006\010\001\022\223\006\010\001\022\024\n\022\t\252\003\000\000\000\000\000\000\021T\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\00 2024-11-21T17:51:57.270257Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:51:57.270293Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2024-11-21T17:51:57.270474Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2024-11-21T17:51:57.270575Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsDropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:56.662360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:56.662386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:56.662391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:56.662397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:56.662412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:56.662416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:56.662424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:56.662515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:56.671539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:56.671556Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:56.673616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:56.674324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:56.674372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:56.675778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:56.675923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:56.675992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.676043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:56.676882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.677159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:56.677168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.677207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:56.677213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:56.677219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:56.677230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.678405Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:56.691784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:56.691839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.691889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:56.691926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:56.691931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.692475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.692492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:56.692528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.692534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:56.692537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:56.692541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:56.692871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.692880Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:56.692884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:56.693271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.693284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.693287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.693291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.693716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:56.694115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:56.694169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:56.694345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.694368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:56.694376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.694424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:56.694431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.694454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:56.694464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:56.694944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:56.694952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:56.694978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.694982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:56.695046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.695051Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:56.695073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:56.695077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.695084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:56.695089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.695093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:56.695097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:56.695108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:56.695112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:56.695115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:56.695373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:56.695387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:56.695391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:56.695395Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:56.695399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:56.695413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... to tablet: 72057594046316545 cookie: 0:107 msg type: 269090816 2024-11-21T17:51:57.693644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 107, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 107 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 107 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 107 at step: 5000004 2024-11-21T17:51:57.693834Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:57.693858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 107 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:57.693868Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#107:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2024-11-21T17:51:57.693883Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 128 -> 136 2024-11-21T17:51:57.694607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.694623Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T17:51:57.694632Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, no renaming has been detected for this operation 2024-11-21T17:51:57.694636Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 136 -> 137 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 FAKE_COORDINATOR: Erasing txId 107 2024-11-21T17:51:57.695048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 148 } } 2024-11-21T17:51:57.695054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2024-11-21T17:51:57.695065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 148 } } 2024-11-21T17:51:57.695073Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 148 } } 2024-11-21T17:51:57.695337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 323 RawX2: 8589936898 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2024-11-21T17:51:57.695343Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2024-11-21T17:51:57.695351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 323 RawX2: 8589936898 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2024-11-21T17:51:57.695355Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T17:51:57.695401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.695406Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T17:51:57.695411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:51:57.695414Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2024-11-21T17:51:57.695424Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T17:51:57.695446Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2024-11-21T17:51:57.695461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:57.695469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:51:57.696259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.696733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.696807Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:57.696817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:57.696860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:51:57.696889Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:57.696893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 107, path id: 1 2024-11-21T17:51:57.696897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 107, path id: 2 2024-11-21T17:51:57.696996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.697004Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:51:57.697022Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.697026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:51:57.697032Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2024-11-21T17:51:57.697232Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T17:51:57.697247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T17:51:57.697251Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T17:51:57.697256Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:51:57.697263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:57.697470Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T17:51:57.697487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T17:51:57.697493Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T17:51:57.697498Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:51:57.697506Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:51:57.697526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2024-11-21T17:51:57.698118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.698127Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:57.698199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:51:57.698220Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2024-11-21T17:51:57.698223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T17:51:57.698227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2024-11-21T17:51:57.698237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:380:2345] message: TxId: 107 2024-11-21T17:51:57.698239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T17:51:57.698243Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T17:51:57.698245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T17:51:57.698260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:51:57.698363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T17:51:57.698888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T17:51:57.699023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T17:51:57.699032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:567:2530] TestWaitNotification: OK eventTxId 107 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:56.711867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:56.711886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:56.711889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:56.711893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:56.711906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:56.711908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:56.711914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:56.711985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:56.719272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:56.719302Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:56.722034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:56.722608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:56.722640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:56.724080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:56.724315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:56.724384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.724446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:56.725896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.726176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:56.726185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.726213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:56.726219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:56.726224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:56.726239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.727293Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:56.738894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:56.738973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.739029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:56.739075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:56.739080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.739760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.739778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:56.739820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.739827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:56.739830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:56.739834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:56.740114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.740120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:56.740122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:56.740411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.740419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.740424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.740429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.740884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:56.741232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:56.741274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:56.741413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.741440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:56.741445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.741499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:56.741504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.741534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:56.741549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:56.741929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:56.741936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:56.741970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.741974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:56.742037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.742042Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:56.742050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:56.742052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.742057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:56.742060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.742063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:56.742065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:56.742073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:56.742077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:56.742080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:56.742314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:56.742323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:56.742326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:56.742329Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:56.742332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:56.742342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... lator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 9 2024-11-21T17:51:57.749585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.749591Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:51:57.749604Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.749607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:0, datashard: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T17:51:57.749611Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2024-11-21T17:51:57.749691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2024-11-21T17:51:57.749696Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2024-11-21T17:51:57.749705Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2024-11-21T17:51:57.749709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2024-11-21T17:51:57.749718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/3, is published: false 2024-11-21T17:51:57.749775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2024-11-21T17:51:57.749779Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:2 ProgressState at tablet: 72057594046678944 2024-11-21T17:51:57.749786Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:2, at schemeshard: 72057594046678944 2024-11-21T17:51:57.749790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:51:57.749795Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:2 129 -> 240 2024-11-21T17:51:57.749837Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:51:57.749844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:51:57.749847Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:51:57.749850Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2024-11-21T17:51:57.749853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2024-11-21T17:51:57.749907Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:51:57.749913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:51:57.749915Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:51:57.749918Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 18446744073709551615 2024-11-21T17:51:57.749920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2024-11-21T17:51:57.750056Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:51:57.750064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:51:57.750067Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:51:57.750070Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T17:51:57.750074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:57.750340Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:51:57.750355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:51:57.750358Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:51:57.750408Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:51:57.750415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:51:57.750418Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:51:57.750448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:51:57.750453Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:57.750517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T17:51:57.750543Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2024-11-21T17:51:57.750546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2024-11-21T17:51:57.750550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: false 2024-11-21T17:51:57.750646Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:51:57.750654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:51:57.750657Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:51:57.750661Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2024-11-21T17:51:57.750666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 4 2024-11-21T17:51:57.750676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2024-11-21T17:51:57.751076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2024-11-21T17:51:57.751084Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:57.751117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2024-11-21T17:51:57.751131Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2024-11-21T17:51:57.751133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2024-11-21T17:51:57.751136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2024-11-21T17:51:57.751150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:421:2376] message: TxId: 105 2024-11-21T17:51:57.751155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2024-11-21T17:51:57.751159Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T17:51:57.751163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T17:51:57.751181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T17:51:57.751186Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2024-11-21T17:51:57.751189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2024-11-21T17:51:57.751192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2024-11-21T17:51:57.751194Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:2 2024-11-21T17:51:57.751195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:2 2024-11-21T17:51:57.751199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2024-11-21T17:51:57.751305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:51:57.751384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:51:57.751870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:51:57.751884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:51:57.751892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:51:57.751917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:51:57.752263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:51:57.752271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:923:2853] TestWaitNotification: OK eventTxId 105 >> DataShardTxOrder::ZigZag [GOOD] >> TPartitionWriterCacheActorTests::WriteReplyOrder >> IndexBuildTest::Lock ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2024-11-21T17:51:39.739592Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791739024870425:2103];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:39.739659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001f3f/r3tmp/tmpBB1MmV/pdisk_1.dat 2024-11-21T17:51:39.799298Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23175, node 1 2024-11-21T17:51:39.810788Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:39.810805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:39.810806Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:39.810844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13741 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:39.838499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:39.838523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:39.839573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:39.874058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:39.875228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:39.875241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:39.875366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:39.875401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:39.875405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:51:39.875504Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:39.875506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:39.875536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:39.875858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211499921, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:39.875863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:39.875930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:39.876015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:39.876045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:39.876052Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:39.876059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:39.876065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:39.876071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T17:51:39.876594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:39.876603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:39.876607Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:39.876622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T17:51:39.886708Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:13741 waiting... 2024-11-21T17:51:39.924147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:39.924219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:39.924224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:39.924357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T17:51:39.924378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:39.924635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211499970, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:39.924640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732211499970, at schemeshard: 72057594046644480 2024-11-21T17:51:39.924708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:51:39.924725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:51:39.924732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T17:51:39.924814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:39.924844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:39.924973Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:51:39.925103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:39.925108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:39.925111Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:39.925120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:51:39.925482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:39.925526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:39.925530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:39.925540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T17:51:39.925551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:51:39.925554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T17:51:39.925621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T17:51:39.925635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:39.925661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:39.925841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:51:39.925845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:51:39.925847Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:51:39.925855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T17:51:39.926453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:39.926497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:39.926620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusAccepted, operation: CREATE ... ORY, path: /Root/SQS/cloud4/000000000000000101v0 2024-11-21T17:51:57.713085Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:57.713133Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:57.713153Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715700:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:57.713195Z node 6 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715700, at schemeshard: 72057594046644480 2024-11-21T17:51:57.713294Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715700 2024-11-21T17:51:57.713302Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715700 2024-11-21T17:51:57.713306Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 5 2024-11-21T17:51:57.713383Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715700 2024-11-21T17:51:57.713389Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715700 2024-11-21T17:51:57.713391Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 3 2024-11-21T17:51:57.714587Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211517764, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:57.714604Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715700:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211517764, at schemeshard: 72057594046644480 2024-11-21T17:51:57.714639Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715700:0 128 -> 240 2024-11-21T17:51:57.714789Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:57.714840Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:57.714859Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715700:0 ProgressState 2024-11-21T17:51:57.714876Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715700:0 progress is 1/1 2024-11-21T17:51:57.714921Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715700:0 2024-11-21T17:51:57.714942Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715700, publications: 2, subscribers: 1 2024-11-21T17:51:57.715027Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715700 2024-11-21T17:51:57.715039Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715700 2024-11-21T17:51:57.715042Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 6 2024-11-21T17:51:57.715075Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715700 2024-11-21T17:51:57.715081Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715700 2024-11-21T17:51:57.715083Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 4 2024-11-21T17:51:57.715089Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715700, subscribers: 1 2024-11-21T17:51:57.715654Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000101v0/v2, operationId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.715711Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715701:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:57.715860Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715701, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000101v0/v2 2024-11-21T17:51:57.715889Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:57.715928Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:57.715944Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715701:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:57.716025Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715701 2024-11-21T17:51:57.716036Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715701 2024-11-21T17:51:57.716038Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715701, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 5 2024-11-21T17:51:57.716059Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 30 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715701 2024-11-21T17:51:57.716065Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715701 2024-11-21T17:51:57.716066Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715701, pathId: [OwnerId: 72057594046644480, LocalPathId: 30], version: 3 2024-11-21T17:51:57.716121Z node 6 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715701, at schemeshard: 72057594046644480 2024-11-21T17:51:57.721852Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211517771, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:57.721872Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715701:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211517771, at schemeshard: 72057594046644480 2024-11-21T17:51:57.721908Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715701:0 128 -> 240 2024-11-21T17:51:57.722093Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:57.722136Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:57.722147Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715701:0 ProgressState 2024-11-21T17:51:57.722160Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715701:0 progress is 1/1 2024-11-21T17:51:57.722170Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715701:0 2024-11-21T17:51:57.722184Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715701, publications: 2, subscribers: 1 2024-11-21T17:51:57.722287Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715701 2024-11-21T17:51:57.722302Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715701 2024-11-21T17:51:57.722306Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715701, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 6 2024-11-21T17:51:57.722336Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 30 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715701 2024-11-21T17:51:57.722344Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715701 2024-11-21T17:51:57.722345Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715701, pathId: [OwnerId: 72057594046644480, LocalPathId: 30], version: 4 2024-11-21T17:51:57.722351Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715701, subscribers: 1 2024-11-21T17:51:57.739426Z node 6 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [fd94e257-36f95daf-270e6980-c477a260] Got succesfult GRPC response. 2024-11-21T17:51:57.739460Z node 6 :HTTP_PROXY INFO: http request [CreateQueue] requestId [fd94e257-36f95daf-270e6980-c477a260] reply ok 2024-11-21T17:51:57.739518Z node 6 :HTTP DEBUG: (#44,[::1]:47640) <- (200 ) 2024-11-21T17:51:57.739594Z node 6 :HTTP DEBUG: (#44,[::1]:47640) connection closed Http output full {"QueueUrl":"http://ghrun-2rqap2spfm.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName.fifo"} 2024-11-21T17:51:57.739955Z node 6 :HTTP DEBUG: (#44,[::1]:47650) incoming connection opened 2024-11-21T17:51:57.739998Z node 6 :HTTP DEBUG: (#44,[::1]:47650) -> (POST /Root) 2024-11-21T17:51:57.740038Z node 6 :HTTP_PROXY INFO: proxy service: incoming request from [18c3:ddbf:5d55:0:c3:ddbf:5d55:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: 8da30963-2941ed1c-9a984e67-c3109ba5 2024-11-21T17:51:57.740200Z node 6 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [8da30963-2941ed1c-9a984e67-c3109ba5] got new request from [18c3:ddbf:5d55:0:c3:ddbf:5d55:0] 2024-11-21T17:51:57.741201Z node 6 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [8da30963-2941ed1c-9a984e67-c3109ba5] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:51:57.741209Z node 6 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [8da30963-2941ed1c-9a984e67-c3109ba5] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:51:57.753895Z node 6 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [8da30963-2941ed1c-9a984e67-c3109ba5] Got succesfult GRPC response. 2024-11-21T17:51:57.753942Z node 6 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [8da30963-2941ed1c-9a984e67-c3109ba5] reply ok 2024-11-21T17:51:57.753993Z node 6 :HTTP DEBUG: (#44,[::1]:47650) <- (200 ) 2024-11-21T17:51:57.754061Z node 6 :HTTP DEBUG: (#44,[::1]:47650) connection closed Http output full {"Successful":[{"SequenceNumber":"1","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MD5OfMessageAttributes":"3d778967e1fa431d626ffb890c486385","MessageId":"397a0387-1464ae72-8ef7eb1c-3e35cdd0"},{"SequenceNumber":"2","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"301418a4-b16342b6-2d3e2885-8206d4f5"}],"Failed":[{"Message":"No MessageGroupId parameter.","Id":"Id-2","Code":"MissingParameter","SenderFault":true}]} >> TPartitionWriterCacheActorTests::WriteReplyOrder [GOOD] >> TPartitionWriterCacheActorTests::DropOldWriter >> VectorIndexBuildTest::BaseCase >> IndexBuildTest::CancellationNotEnoughRetries >> IndexBuildTest::WithFollowers >> TPQTest::TestReadAndDeleteConsumer [FAIL] >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2024-11-21T17:51:54.519636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:54.519662Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:54.519686Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:51:54.522858Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:51:54.523007Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:51:54.523064Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:54.523941Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:51:54.532665Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:54.532821Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:54.532955Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:51:54.532970Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:51:54.532977Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:51:54.533015Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:54.536593Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:51:54.536667Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:54.536721Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:51:54.536726Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:51:54.536731Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:51:54.536736Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:54.536831Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:54.536838Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:54.536862Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:51:54.536885Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:51:54.536934Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:54.536941Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:54.536947Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:51:54.536953Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:51:54.536957Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:51:54.536963Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:51:54.536968Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:54.545365Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.545391Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.545402Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:51:54.545871Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:51:54.545887Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:51:54.545909Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:51:54.545938Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:51:54.545946Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:51:54.545953Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:51:54.545960Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:54.545963Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:51:54.545967Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:51:54.545969Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:54.546025Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:51:54.546027Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:51:54.546031Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:51:54.546035Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:54.546046Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:51:54.546049Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:51:54.546052Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:51:54.546055Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:54.546060Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:51:54.567136Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:51:54.567162Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:54.567183Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:54.567194Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:51:54.567209Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:54.567313Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.567320Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.567328Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:51:54.567348Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:51:54.567352Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:51:54.567398Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:54.567408Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:54.567412Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:51:54.567417Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:51:54.568074Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:51:54.568088Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:54.568145Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:54.568151Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:54.568159Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:54.568166Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:54.568171Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:51:54.568179Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:51:54.568184Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:51:54.568192Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:54.568196Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:51:54.568201Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:51:54.568204Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:51:54.568268Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:51:54.568275Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:54.568277Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:51:54.568281Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:51:54.568284Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:51:54.568296Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:54.568299Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:51:54.568302Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:51:54.568306Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:51:54.568318Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:51:54.568322Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:51:54.568325Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:51:54.568331Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:54.568334Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:51:54.568337Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... aitInRS 2024-11-21T17:51:58.289803Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:51:58.289806Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2024-11-21T17:51:58.289808Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2024-11-21T17:51:58.289812Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2024-11-21T17:51:58.289881Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2024-11-21T17:51:58.289890Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:51:58.289899Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:51:58.289902Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2024-11-21T17:51:58.289904Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2024-11-21T17:51:58.289906Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T17:51:58.289939Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2024-11-21T17:51:58.289941Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2024-11-21T17:51:58.289943Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2024-11-21T17:51:58.289945Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2024-11-21T17:51:58.289948Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:51:58.289950Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2024-11-21T17:51:58.289952Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2024-11-21T17:51:58.289954Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:58.289956Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T17:51:58.289958Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T17:51:58.289962Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T17:51:58.289997Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:231:2226], Recipient [2:231:2226]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:58.290003Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:58.290024Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:58.290028Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:58.290031Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:51:58.290035Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2024-11-21T17:51:58.290038Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2024-11-21T17:51:58.290041Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:51:58.290043Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2024-11-21T17:51:58.290045Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:51:58.290047Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2024-11-21T17:51:58.290112Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2024-11-21T17:51:58.290114Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:51:58.290116Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:51:58.290118Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2024-11-21T17:51:58.290120Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2024-11-21T17:51:58.290123Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:51:58.290124Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2024-11-21T17:51:58.290126Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:51:58.290128Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:51:58.290132Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2024-11-21T17:51:58.290134Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2024-11-21T17:51:58.290136Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2024-11-21T17:51:58.290139Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:51:58.290141Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:51:58.290143Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2024-11-21T17:51:58.290145Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2024-11-21T17:51:58.290149Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:51:58.290150Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2024-11-21T17:51:58.290152Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2024-11-21T17:51:58.290154Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2024-11-21T17:51:58.290156Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:51:58.290158Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2024-11-21T17:51:58.290161Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2024-11-21T17:51:58.290163Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2024-11-21T17:51:58.290166Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:51:58.290167Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T17:51:58.290169Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T17:51:58.290171Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2024-11-21T17:51:58.290173Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:51:58.290175Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T17:51:58.290177Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:51:58.290179Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2024-11-21T17:51:58.290208Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2024-11-21T17:51:58.290212Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:51:58.290216Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:51:58.290217Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:51:58.290219Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2024-11-21T17:51:58.290222Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T17:51:58.290248Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2024-11-21T17:51:58.290250Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2024-11-21T17:51:58.290251Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2024-11-21T17:51:58.290253Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2024-11-21T17:51:58.290256Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:51:58.290258Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2024-11-21T17:51:58.290259Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2024-11-21T17:51:58.290261Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:58.290263Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:51:58.290265Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:51:58.290266Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:51:58.301264Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2024-11-21T17:51:58.301287Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2024-11-21T17:51:58.301299Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:58.301307Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T17:51:58.301330Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:51:58.301339Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:58.301441Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2024-11-21T17:51:58.301446Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2024-11-21T17:51:58.301452Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T17:51:58.301456Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T17:51:58.301463Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:51:58.301467Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> TPartitionWriterCacheActorTests::DropOldWriter [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted |83.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |83.4%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> IndexBuildTest::WithFollowers [GOOD] >> SystemView::TopPartitionsTables [GOOD] >> SystemView::TopPartitionsRanges |83.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::WithFollowers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:58.709916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:58.709934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:58.709939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:58.709944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:58.709957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:58.709960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:58.709968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:58.710038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:58.720553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:58.720574Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:58.723310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:58.724084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:58.724128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:58.725785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:58.726043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:58.726141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.726228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:58.727336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.727590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:58.727601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.727635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:58.727641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:58.727648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:58.727660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.729091Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:58.746399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:58.746472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.746529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:58.746580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:58.746590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.747239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.747264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:58.747312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.747322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:58.747327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:58.747332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:58.747731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.747743Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:58.747748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:58.748128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.748141Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.748147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.748154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.748602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:58.749037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:58.749093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:58.749279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.749303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:58.749311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.749366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:58.749374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.749400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:58.749414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:58.749844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:58.749853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:58.749894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.749898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:58.749981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.749988Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:58.750000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:58.750005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.750011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:58.750017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.750021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:58.750025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:58.750036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:58.750042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:58.750047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:58.750355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:58.750371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:58.750376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:58.750381Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:58.750386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:58.750398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 04, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:51:58.992611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.992615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T17:51:58.992620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T17:51:58.992624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T17:51:58.992627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 4 2024-11-21T17:51:58.992708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2024-11-21T17:51:58.992716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:2 ProgressState at tablet: 72057594046678944 2024-11-21T17:51:58.992731Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:2, at schemeshard: 72057594046678944 2024-11-21T17:51:58.992736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:2, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T17:51:58.992740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:2 129 -> 240 2024-11-21T17:51:58.992799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.992806Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:1 ProgressState 2024-11-21T17:51:58.992814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:1 progress is 2/3 2024-11-21T17:51:58.992818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2024-11-21T17:51:58.992823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: false 2024-11-21T17:51:58.992884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:51:58.992895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:51:58.992900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:51:58.992904Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:51:58.992909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:51:58.993029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:51:58.993040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:51:58.993043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:51:58.993047Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2024-11-21T17:51:58.993051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:51:58.993367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:51:58.993382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:51:58.993386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:51:58.993451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:51:58.993459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:51:58.993464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:51:58.993468Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:51:58.993473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:51:58.993487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2024-11-21T17:51:58.993625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2024-11-21T17:51:58.993632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:58.993723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:51:58.993760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2024-11-21T17:51:58.993765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T17:51:58.993770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2024-11-21T17:51:58.993779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 104 2024-11-21T17:51:58.993784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T17:51:58.993789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:51:58.993795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:51:58.993812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:51:58.993816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2024-11-21T17:51:58.993819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2024-11-21T17:51:58.993823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:51:58.993826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2024-11-21T17:51:58.993829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2024-11-21T17:51:58.993835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:51:58.994350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:51:58.994744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:51:58.994759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:51:58.994790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:51:58.995080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:51:58.995087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:685:2646] TestWaitNotification: OK eventTxId 104 2024-11-21T17:51:58.995176Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/WithFollowers" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:58.995224Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/WithFollowers" took 42us result status StatusSuccess 2024-11-21T17:51:58.995318Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/WithFollowers" PathDescription { Self { Name: "WithFollowers" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } } Table { Name: "WithFollowers" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "valueFloat" Type: "Float" TypeId: 33 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TBackupCollectionWithRebootsTests::SimpleDropWithReboots [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:58.571119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:58.571144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:58.571148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:58.571151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:58.571162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:58.571165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:58.571170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:58.571229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:58.578499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:58.578517Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:58.581060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:58.581680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:58.581715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:58.583037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:58.583207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:58.583300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.583370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:58.584182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.584437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:58.584447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.584473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:58.584478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:58.584483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:58.584493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.585742Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:58.599683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:58.599755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.599826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:58.599868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:58.599874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.600510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.600534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:58.600582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.600590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:58.600594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:58.600599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:58.601060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.601070Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:58.601075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:58.601402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.601410Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.601415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.601422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.602008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:58.602371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:58.602414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:58.602580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.602601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:58.602611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.602662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:58.602667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.602693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:58.602704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:58.603116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:58.603123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:58.603164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.603168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:58.603246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.603253Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:58.603263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:58.603268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.603275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:58.603280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.603284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:58.603287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:58.603298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:58.603303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:58.603307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:58.603595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:58.603608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:58.603613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:58.603617Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:58.603622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:58.603636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:59.275771Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:51:59.275801Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index" took 32us result status StatusSuccess 2024-11-21T17:51:59.275908Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index" PathDescription { Self { Name: "Index" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:59.275959Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:51:59.275994Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable" took 36us result status StatusSuccess 2024-11-21T17:51:59.276108Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "alice" } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "bob" } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\005\000\000\000alice\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\003\000\000\000bob\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TBackupCollectionWithRebootsTests::CreateDroppedAndDropWithReboots >> TBackupCollectionWithRebootsTests::SimpleDropWithReboots2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::SimpleDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:55.580400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:55.580429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:55.580437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:55.580442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:55.580457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:55.580461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:55.580471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:55.580568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:55.592848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:55.592877Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:55.597645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:55.597734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:55.597769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:55.599153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:55.599213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:55.599329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:55.599394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:55.599766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:55.600029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:55.600039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:55.600071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:55.600077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:55.600083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:55.600099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.602174Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:51:55.620574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:55.620668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.620737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:55.620784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:55.620792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.621552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:55.621594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:55.621651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.621661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:55.621665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:55.621670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:55.622156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.622174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:55.622179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:55.622518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.622526Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.622532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:55.622538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:55.623146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:55.624257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:55.624316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:55.624491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:55.624518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:55.624535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:55.624597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:55.624604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:55.624635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:55.624647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:55.625058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:55.625066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:55.625105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:55.625110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:55.625181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.625188Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:55.625199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:55.625203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:55.625209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:55.625213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:55.625218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:55.625222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:55.625232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:55.625238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:55.625242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:55.625541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:55.625554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:55.625558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:55.625562Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:55.625566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:55.625583Z node 1 ... 2024-11-21T17:51:59.383002Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:51:59.383007Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2024-11-21T17:51:59.383010Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:51:59.383250Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.383265Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.383269Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:51:59.383273Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 1 2024-11-21T17:51:59.383277Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:51:59.383288Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T17:51:59.383606Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2024-11-21T17:51:59.383626Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2024-11-21T17:51:59.383697Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:59.383714Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:59.383723Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1004:0, HandleReply TEvOperationPlan: step# 5000005 2024-11-21T17:51:59.383736Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:51:59.383749Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 1 -> 240 2024-11-21T17:51:59.383770Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:51:59.383778Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:51:59.383939Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.384136Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T17:51:59.384455Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:59.384462Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:51:59.384480Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:51:59.384499Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:59.384504Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T17:51:59.384508Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 5 2024-11-21T17:51:59.384549Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:51:59.384556Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T17:51:59.384566Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:51:59.384570Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:51:59.384575Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T17:51:59.384579Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:51:59.384582Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:51:59.384586Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:51:59.384595Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:51:59.384600Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T17:51:59.384603Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 7 2024-11-21T17:51:59.384607Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T17:51:59.384665Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.384677Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.384681Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:51:59.384685Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T17:51:59.384689Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:51:59.384724Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:59.384729Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:51:59.384736Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:51:59.384764Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.384771Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.384774Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:51:59.384778Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T17:51:59.384781Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:51:59.384789Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T17:51:59.385463Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.385487Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:51:59.385498Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:51:59.385539Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:51:59.385546Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:51:59.385599Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:51:59.385615Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:51:59.385619Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [16:382:2374] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:51:59.385678Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:59.385706Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 35us result status StatusPathDoesNotExist 2024-11-21T17:51:59.385739Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.backups/collections/MyCollection1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.backups/collections\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/.backups/collections/MyCollection1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.backups/collections" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "collections" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] Test command err: 2024-11-21T17:50:58.159173Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791563423350131:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:50:58.159258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001e08/r3tmp/tmpJ3mmG5/pdisk_1.dat 2024-11-21T17:50:58.189689Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created TServer::EnableGrpc on GrpcPort 7218, node 1 2024-11-21T17:50:58.217692Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:50:58.227651Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001e08/r3tmp/yandexUB8a5f.tmp 2024-11-21T17:50:58.227664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001e08/r3tmp/yandexUB8a5f.tmp 2024-11-21T17:50:58.227715Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001e08/r3tmp/yandexUB8a5f.tmp 2024-11-21T17:50:58.227754Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:50:58.233463Z INFO: TTestServer started on Port 12409 GrpcPort 7218 TClient is connected to server localhost:12409 PQClient connected to localhost:7218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:50:58.259057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:50:58.259078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:50:58.260147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:50:58.286394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:50:58.293009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:50:58.489236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791563423350745:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.489273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791563423350740:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.489342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.489967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:50:58.490941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791563423350767:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.491072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:58.491683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791563423350754:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:50:58.514001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.520755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:58.537504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439791563423351096:2596] 2024-11-21T17:51:03.158294Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791563423350131:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:03.158331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:03.743404Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T17:51:03.752598Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T17:51:03.753011Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439791584898187889:2762], Recipient [1:7439791563423350435:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.753016Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:03.753018Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T17:51:03.753026Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439791584898187885:2759], Recipient [1:7439791563423350435:2214]: {TEvModifySchemeTransaction txid# 281474976715674 TabletId# 72057594046644480} 2024-11-21T17:51:03.753028Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:51:03.760814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976715674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:03.760938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:51:03.761021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T17:51:03.761042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T17:51:03.761048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T17:51:03.761055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T17:51:03.761069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T17:51:03.761102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715674:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:03.761243Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:51:03.761258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2024-11-21T17:51:03.761270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2024-11-21T17:51:03.761498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715674, response: Status: StatusAccepted TxId: 281474976715674 SchemeshardId: 72057594046644480 PathId: 13, at schemeshard: 72057594046644480 2024-11-21T17:51:03.761529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715674, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/test-topic 2024-11-21T17:51:03.761543Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T17:51:03.761552Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715674:0 2024-11-21T17:51:03.761616Z node 1 :FLAT_TX_SCHEMESHARD TR ... [0:0:0], Recipient [5:7439791783295621358:2767], Cookie: 0 2024-11-21T17:51:59.150100Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 65538, Sender [0:0:0], Recipient [5:7439791783295621358:2767]: NActors::TEvents::TEvWakeup 2024-11-21T17:51:59.150102Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 65538, Sender [0:0:0], Recipient [5:7439791783295621355:2765]: NActors::TEvents::TEvWakeup 2024-11-21T17:51:59.150141Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188501, Sender [5:7439791783295621355:2765], Recipient [5:7439791783295621277:2757]: NKikimr::TEvPQ::TEvPartitionCounters 2024-11-21T17:51:59.150159Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionCounters 2024-11-21T17:51:59.150161Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188501, Sender [5:7439791783295621358:2767], Recipient [5:7439791783295621284:2758]: NKikimr::TEvPQ::TEvPartitionCounters 2024-11-21T17:51:59.150167Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionCounters 2024-11-21T17:51:59.150167Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvPQ::TEvPartitionCounters PartitionId 2 2024-11-21T17:51:59.150171Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Handle TEvPQ::TEvPartitionCounters PartitionId 1 2024-11-21T17:51:59.150207Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7439791783295621358:2767], Recipient [5:7439791783295621284:2758]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2024-11-21T17:51:59.150222Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2024-11-21T17:51:59.155211Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 65538, Sender [0:0:0], Recipient [5:7439791804770458484:2929]: NActors::TEvents::TEvWakeup 2024-11-21T17:51:59.155211Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 65538, Sender [0:0:0], Recipient [5:7439791804770458478:2928]: NActors::TEvents::TEvWakeup 2024-11-21T17:51:59.156187Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [5:7439791783295621284:2758], Partition 1, Sender [5:7439791783295621366:2770], Recipient [5:7439791783295621358:2767], Cookie: 0 2024-11-21T17:51:59.156187Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [5:7439791783295621277:2757], Partition 2, Sender [5:7439791783295621364:2769], Recipient [5:7439791783295621355:2765], Cookie: 0 2024-11-21T17:51:59.156195Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [5:7439791783295621364:2769], Recipient [5:7439791783295621355:2765]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:59.156199Z node 5 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:59.156205Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [5:7439791783295621366:2770], Recipient [5:7439791783295621358:2767]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:59.156207Z node 5 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:59.158239Z node 5 :PERSQUEUE TRACE: StateIdle event# 65538 (NActors::TEvents::TEvWakeup), Tablet [5:7439791804770458478:2928], Partition 4, Sender [0:0:0], Recipient [5:7439791804770458575:2940], Cookie: 0 2024-11-21T17:51:59.158242Z node 5 :PERSQUEUE TRACE: StateIdle event# 65538 (NActors::TEvents::TEvWakeup), Tablet [5:7439791804770458484:2929], Partition 3, Sender [0:0:0], Recipient [5:7439791804770458578:2942], Cookie: 0 2024-11-21T17:51:59.158250Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 65538, Sender [0:0:0], Recipient [5:7439791804770458578:2942]: NActors::TEvents::TEvWakeup 2024-11-21T17:51:59.158257Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 65538, Sender [0:0:0], Recipient [5:7439791804770458575:2940]: NActors::TEvents::TEvWakeup 2024-11-21T17:51:59.158302Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188501, Sender [5:7439791804770458578:2942], Recipient [5:7439791804770458484:2929]: NKikimr::TEvPQ::TEvPartitionCounters 2024-11-21T17:51:59.158310Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188501, Sender [5:7439791804770458575:2940], Recipient [5:7439791804770458478:2928]: NKikimr::TEvPQ::TEvPartitionCounters 2024-11-21T17:51:59.158312Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionCounters 2024-11-21T17:51:59.158313Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionCounters 2024-11-21T17:51:59.158316Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvPQ::TEvPartitionCounters PartitionId 4 2024-11-21T17:51:59.158317Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037899] Handle TEvPQ::TEvPartitionCounters PartitionId 3 2024-11-21T17:51:59.160814Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [5:7439791804770458484:2929], Partition 3, Sender [5:7439791804770458581:2944], Recipient [5:7439791804770458578:2942], Cookie: 0 2024-11-21T17:51:59.160818Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [5:7439791804770458478:2928], Partition 4, Sender [5:7439791804770458589:2948], Recipient [5:7439791804770458575:2940], Cookie: 0 2024-11-21T17:51:59.160830Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [5:7439791804770458581:2944], Recipient [5:7439791804770458578:2942]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:59.160830Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [5:7439791804770458589:2948], Recipient [5:7439791804770458575:2940]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:59.160833Z node 5 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:59.160833Z node 5 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2024-11-21T17:51:59.179765Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791804770458478:2928], Partition 4, Sender [0:0:0], Recipient [5:7439791804770458575:2940], Cookie: 0 2024-11-21T17:51:59.179787Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791804770458575:2940]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:59.179790Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:59.179799Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:59.179816Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:59.179818Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:59.179821Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:59.179828Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791804770458484:2929], Partition 3, Sender [0:0:0], Recipient [5:7439791804770458578:2942], Cookie: 0 2024-11-21T17:51:59.179830Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791804770458578:2942]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:59.179831Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:59.179833Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:59.179837Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:59.179838Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:59.179839Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:59.190024Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791757525816246:2434], Partition 0, Sender [0:0:0], Recipient [5:7439791757525816305:2438], Cookie: 0 2024-11-21T17:51:59.190059Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791757525816305:2438]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:59.190062Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:59.190072Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:59.190094Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:59.190097Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:59.190102Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:59.205348Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791783295621284:2758], Partition 1, Sender [0:0:0], Recipient [5:7439791783295621358:2767], Cookie: 0 2024-11-21T17:51:59.205359Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439791783295621277:2757], Partition 2, Sender [0:0:0], Recipient [5:7439791783295621355:2765], Cookie: 0 2024-11-21T17:51:59.205370Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791783295621358:2767]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:59.205370Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439791783295621355:2765]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:59.205373Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:59.205373Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T17:51:59.205380Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:59.205380Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T17:51:59.205397Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:59.205397Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T17:51:59.205399Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:59.205399Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T17:51:59.205403Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T17:51:59.205404Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::SimpleDropWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:55.846798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:55.846819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:55.846822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:55.846826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:55.846836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:55.846838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:55.846844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:55.846922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:55.855040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:55.855057Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:55.856958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:55.857007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:55.857030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:55.858056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:55.858116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:55.858246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:55.858320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:55.858749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:55.858996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:55.859009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:55.859034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:55.859041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:55.859047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:55.859063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.860143Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:51:55.874983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:55.875064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.875120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:55.875156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:55.875161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.875925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:55.875951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:55.876004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.876014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:55.876018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:55.876023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:55.876464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.876475Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:55.876480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:55.876812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.876820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.876826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:55.876832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:55.877391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:55.877777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:55.877820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:55.877976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:55.877997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:55.878015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:55.878065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:55.878070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:55.878094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:55.878117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:55.878465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:55.878471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:55.878510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:55.878515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:55.878579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:55.878584Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:55.878594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:55.878598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:55.878603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:55.878607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:55.878611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:55.878614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:55.878624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:55.878629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:55.878633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:55.878879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:55.878890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:55.878894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:55.878909Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:55.878914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:55.878924Z node 1 ... 2024-11-21T17:51:59.646390Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:51:59.646393Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2024-11-21T17:51:59.646395Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:51:59.646585Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.646592Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.646595Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:51:59.646597Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 1 2024-11-21T17:51:59.646599Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:51:59.646605Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T17:51:59.646859Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2024-11-21T17:51:59.646876Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2024-11-21T17:51:59.646948Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:59.646962Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:59.646969Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1004:0, HandleReply TEvOperationPlan: step# 5000005 2024-11-21T17:51:59.646980Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:51:59.646991Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 1 -> 240 2024-11-21T17:51:59.647010Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:51:59.647015Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:51:59.647131Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.647267Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T17:51:59.647447Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:59.647451Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:51:59.647465Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:51:59.647479Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:59.647482Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T17:51:59.647484Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 5 2024-11-21T17:51:59.647514Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:51:59.647518Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2024-11-21T17:51:59.647526Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:51:59.647528Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:51:59.647532Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T17:51:59.647535Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:51:59.647538Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:51:59.647540Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:51:59.647547Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:51:59.647550Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T17:51:59.647553Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 7 2024-11-21T17:51:59.647555Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T17:51:59.647592Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.647600Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.647603Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:51:59.647606Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T17:51:59.647608Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:51:59.647630Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:51:59.647632Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:51:59.647648Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:51:59.647670Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.647675Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.647677Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:51:59.647679Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T17:51:59.647680Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:51:59.647685Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T17:51:59.648262Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:51:59.648292Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:51:59.648300Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:51:59.648338Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:51:59.648343Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:51:59.648394Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:51:59.648407Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:51:59.648410Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [16:382:2374] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:51:59.648457Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:51:59.648479Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 30us result status StatusPathDoesNotExist 2024-11-21T17:51:59.648506Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.backups/collections/MyCollection1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.backups/collections\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/.backups/collections/MyCollection1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.backups/collections" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "collections" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: 2024-11-21T17:51:53.942754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:53.943280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:53.943306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000af7/r3tmp/tmpdDoDs6/pdisk_1.dat 2024-11-21T17:51:54.053131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:54.058189Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2024-11-21T17:51:54.058217Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 1 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2024-11-21T17:51:54.073873Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:54.119773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:54.119824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:54.130271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:54.234244Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2024-11-21T17:51:54.234271Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T17:51:54.234324Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2024-11-21T17:51:54.234422Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2024-11-21T17:51:54.234432Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:380:2375] Proxy 2024-11-21T17:51:54.234891Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T17:51:54.234922Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T17:51:54.234926Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2024-11-21T17:51:54.234931Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2024-11-21T17:51:54.235943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:54.250266Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:622:2531], Recipient [1:631:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:51:54.250475Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:622:2531], Recipient [1:631:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:51:54.250562Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:631:2537] 2024-11-21T17:51:54.250609Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:54.258281Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:622:2531], Recipient [1:631:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:51:54.258470Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:54.258498Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:54.258653Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:51:54.258674Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:51:54.258681Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:51:54.258725Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:54.262441Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:51:54.262522Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:54.262560Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:647:2546] 2024-11-21T17:51:54.262565Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:51:54.262570Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:51:54.262574Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:51:54.262715Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:631:2537], Recipient [1:631:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:54.262722Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:54.262845Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:51:54.262863Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:51:54.262881Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2539], Recipient [1:631:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.262885Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:54.262891Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:627:2534], serverId# [1:636:2539], sessionId# [0:0:0] 2024-11-21T17:51:54.262897Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:51:54.262902Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:54.262907Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:51:54.262928Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:51:54.262931Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:51:54.262935Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:51:54.262940Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:51:54.262959Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:636:2539] 2024-11-21T17:51:54.262962Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:51:54.262982Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:51:54.263028Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:51:54.263037Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:51:54.263052Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:51:54.263058Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:51:54.263062Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:51:54.263066Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:51:54.263070Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:51:54.263111Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:51:54.263115Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:51:54.263118Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:51:54.263121Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:51:54.263132Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:51:54.263135Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:51:54.263138Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:51:54.263141Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:51:54.263145Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:51:54.263318Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:51:54.263328Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:51:54.263332Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:51:54.263340Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:51:54.263348Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:54.263775Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 HANDLE EvProposeTransaction marker# C0 2024-11-21T17:51:54.263790Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 step# 501 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2024-11-21T17:51:54.263844Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:648:2547], Recipient [1:631:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:51:54.263850Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:51:54.437092Z node 1 :TX_COORDINATOR DEBUG: Transaction 281474976715657 has been planned 2024-11-21T17:51:54.437119Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T17:51:54.437126Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for mediator 72057594046382081 tablet 72075186224037888 2024-11-21T17:51:54.437193Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1500 in 0.500000s at 1.450000s 2024-11-21T17:51:54.437273Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1000, txid# 281474976715 ... :51:59.566561Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1636:2420], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:51:59.566580Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1636:2420], 0} finished in read 2024-11-21T17:51:59.566588Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T17:51:59.566591Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:51:59.566595Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:51:59.566598Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:51:59.566606Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T17:51:59.566607Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:51:59.566610Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2024-11-21T17:51:59.566613Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:51:59.566626Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T17:51:59.566788Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1636:2420], Recipient [3:1215:2358]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:51:59.566794Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2024-11-21T17:51:59.577388Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmc2f3btt351jq014851g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWFjZmVmNjgtYzFlZDMxN2UtNjdlNGQxOGItYzFkMzJlODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:59.577913Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1654:2421], Recipient [3:1215:2358]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T17:51:59.577943Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:51:59.577956Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T17:51:59.577970Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:51:59.577973Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:51:59.577977Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:51:59.577979Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:51:59.577990Z node 3 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T17:51:59.577993Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:51:59.577995Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:51:59.577997Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:51:59.577999Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:51:59.578011Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2024-11-21T17:51:59.578061Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T17:51:59.578067Z node 3 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2024-11-21T17:51:59.578070Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1654:2421], 0} after executionsCount# 1 2024-11-21T17:51:59.578075Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1654:2421], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:51:59.578087Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1654:2421], 0} finished in read 2024-11-21T17:51:59.578095Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:51:59.578097Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:51:59.578099Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:51:59.578101Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:51:59.578108Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:51:59.578110Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:51:59.578112Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T17:51:59.578115Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:51:59.578129Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T17:51:59.578272Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1654:2421], Recipient [3:1215:2358]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:51:59.578277Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2024-11-21T17:51:59.600848Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmc2vb1r1nxm2z0af6pdk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTgwMTRhYmItYjFjNTMzMzAtNjIyNGZjNzMtOTYzZTdiNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:59.601524Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1678:2422], Recipient [3:1215:2358]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T17:51:59.601570Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:51:59.601589Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2024-11-21T17:51:59.601612Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T17:51:59.601618Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:51:59.601624Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:51:59.601628Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:51:59.601645Z node 3 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2024-11-21T17:51:59.601650Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T17:51:59.601653Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:51:59.601657Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:51:59.601660Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:51:59.601677Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2024-11-21T17:51:59.601754Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T17:51:59.601761Z node 3 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2024-11-21T17:51:59.601765Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1678:2422], 0} after executionsCount# 1 2024-11-21T17:51:59.601772Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1678:2422], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:51:59.601786Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1678:2422], 0} finished in read 2024-11-21T17:51:59.601795Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T17:51:59.601798Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:51:59.601800Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:51:59.601802Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:51:59.601809Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T17:51:59.601811Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:51:59.601817Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2024-11-21T17:51:59.601820Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:51:59.601839Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T17:51:59.602018Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1678:2422], Recipient [3:1215:2358]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:51:59.602024Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T17:51:59.602153Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [3:195:2110], Recipient [3:1215:2358]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 2 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest |83.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> Yq_1::Basic_EmptyList [GOOD] >> Yq_1::Basic_EmptyDict >> Yq_1::CreateQuery_Without_Connection [GOOD] |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] >> TBackupCollectionWithRebootsTests::ParallelCreateDrop |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:58.905596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:58.905616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:58.905620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:58.905623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:58.905633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:58.905636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:58.905642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:58.905698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:58.914554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:58.914571Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:58.916777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:58.917354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:58.917387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:58.918690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:58.918869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:58.918962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.919016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:58.919918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.920152Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:58.920162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.920197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:58.920203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:58.920209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:58.920221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.921456Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:58.934568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:58.934633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.934680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:58.934729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:58.934738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.935263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.935283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:58.935320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.935328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:58.935332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:58.935337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:58.935655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.935664Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:58.935668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:58.936029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.936044Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.936049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.936055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.936594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:58.936957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:58.936999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:58.937145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.937165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:58.937171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.937214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:58.937219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.937245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:58.937253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:58.937637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:58.937645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:58.937675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.937678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:58.937741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.937748Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:58.937758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:58.937761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.937767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:58.937785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.937789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:58.937792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:58.937804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:58.937809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:58.937812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:58.938094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:58.938113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:58.938118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:58.938122Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:58.938127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:58.938140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... r: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:00.549836Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/by_embedding/indexImplLevelTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:52:00.549873Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/by_embedding/indexImplLevelTable" took 38us result status StatusSuccess 2024-11-21T17:52:00.549976Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/by_embedding/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 12345 } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 54321 } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\00090\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409556 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0001\324\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409557 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409558 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:00.550032Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/by_embedding/indexImplPostingTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:52:00.550049Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/by_embedding/indexImplPostingTable" took 19us result status StatusSuccess 2024-11-21T17:52:00.550112Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/by_embedding/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "covered" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 12345 } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 54321 } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\00090\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0001\324\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TBackupCollectionWithRebootsTests::CreateDroppedWithReboots >> TBackupCollectionWithRebootsTests::CreateWithReboots >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> TestYmqHttpProxy::TestDeleteQueue [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> TBackupCollectionWithRebootsTests::DropWithReboots >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2024-11-21T17:51:51.736318Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791792126798906:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:51.771557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 17:51:51.773062302 786281 dns_resolver.cc:162] no server name supplied in dns URI E1121 17:51:51.773106921 786281 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T17:51:51.775347Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:62919: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:62919 } ] 2024-11-21T17:51:52.068937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791796421766349:2276], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:52.068965Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016d6/r3tmp/tmpjrAvYd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62919, node 1 TClient is connected to server localhost:62165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:52.240651Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:52.240658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:51:52.240662Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:52.240666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:52.240668Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:52.240714Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:52.386353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.387566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.387602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.388358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:52.388438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:52.388447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:51:52.388892Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:52.388903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:52.389002Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:52.389273Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.390202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211512437, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:52.390213Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:52.390317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:52.390767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.390832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.390845Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:52.390857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:52.390866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:52.390880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:51:52.391409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:52.391425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:52.391430Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:52.391444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:51:52.485392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:52.485423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:52.488971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:52.778966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.779027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.780054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T17:51:52.780116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.780183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.780200Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:52.780398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:52.780413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:52.780418Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:52.780480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:52.780483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:52.780485Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:51:52.781686Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:51:52.785261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211512829, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:52.785288Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211512829, at schemeshard: 72057594046644480 2024-11-21T17:51:52.785325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:51:52.785704Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2024-11-21T17:51:52.785708Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2024-11-21T17:51:52.785710Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2024-11-21T17:51:52.785835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.785907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.785928Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:51:52.785941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:51:52.785951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:51:52.785966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T17:51:52.786311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:52.786317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:52.786320Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057 ... , TxId: 281474976715745, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xmcb5bf7fqy1p9csppbym. SessionId : ydb://session/3?node_id=1&id=NjllNzAxZGEtNzIyMGFlZmQtZTkwNWEzZDgtNTg0NGY0ZTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2024-11-21T17:51:59.848476Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539389:2353], TxId: 281474976715745, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xmcb5bf7fqy1p9csppbym. SessionId : ydb://session/3?node_id=1&id=NjllNzAxZGEtNzIyMGFlZmQtZTkwNWEzZDgtNTg0NGY0ZTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:59.848479Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539391:2353], TxId: 281474976715745, task: 1, CA Id [1:7439791825642539389:2353]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T17:51:59.848482Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539391:2353], TxId: 281474976715745, task: 1, CA Id [1:7439791825642539389:2353]. enter pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T17:51:59.848488Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539391:2353], TxId: 281474976715745, task: 1, CA Id [1:7439791825642539389:2353]. exit pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 1 freeSpace: 8388576 2024-11-21T17:51:59.848490Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539391:2353], TxId: 281474976715745, task: 1, CA Id [1:7439791825642539389:2353]. returned 1 rows; processed 1 rows 2024-11-21T17:51:59.848499Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539391:2353], TxId: 281474976715745, task: 1, CA Id [1:7439791825642539389:2353]. dropping batch for read #0 2024-11-21T17:51:59.848500Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539391:2353], TxId: 281474976715745, task: 1, CA Id [1:7439791825642539389:2353]. effective maxinflight 1024 sorted 0 2024-11-21T17:51:59.848502Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539391:2353], TxId: 281474976715745, task: 1, CA Id [1:7439791825642539389:2353]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T17:51:59.848504Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539391:2353], TxId: 281474976715745, task: 1, CA Id [1:7439791825642539389:2353]. returned async data processed rows 1 left freeSpace 8388576 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T17:51:59.848552Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539389:2353], TxId: 281474976715745, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xmcb5bf7fqy1p9csppbym. SessionId : ydb://session/3?node_id=1&id=NjllNzAxZGEtNzIyMGFlZmQtZTkwNWEzZDgtNTg0NGY0ZTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:59.848558Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539389:2353], TxId: 281474976715745, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xmcb5bf7fqy1p9csppbym. SessionId : ydb://session/3?node_id=1&id=NjllNzAxZGEtNzIyMGFlZmQtZTkwNWEzZDgtNTg0NGY0ZTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:51:59.848561Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715745, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T17:51:59.848572Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539389:2353], TxId: 281474976715745, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xmcb5bf7fqy1p9csppbym. SessionId : ydb://session/3?node_id=1&id=NjllNzAxZGEtNzIyMGFlZmQtZTkwNWEzZDgtNTg0NGY0ZTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:59.848575Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539389:2353], TxId: 281474976715745, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xmcb5bf7fqy1p9csppbym. SessionId : ydb://session/3?node_id=1&id=NjllNzAxZGEtNzIyMGFlZmQtZTkwNWEzZDgtNTg0NGY0ZTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:51:59.848576Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715745, task: 1. Tasks execution finished 2024-11-21T17:51:59.848577Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539389:2353], TxId: 281474976715745, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xmcb5bf7fqy1p9csppbym. SessionId : ydb://session/3?node_id=1&id=NjllNzAxZGEtNzIyMGFlZmQtZTkwNWEzZDgtNTg0NGY0ZTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T17:51:59.848588Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715745, task: 1. pass away 2024-11-21T17:51:59.848606Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715745;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:51:59.848620Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539395:2600], TxId: 281474976715746, task: 1, CA Id [1:7439791825642539393:2600]. Recv TEvReadResult from ShardID=72075186224037893, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= LockId: 281474976715746 DataShard: 72075186224037893 Generation: 1 Counter: 11 SchemeShard: 72057594046644480 PathId: 8, BrokenTxLocks= 2024-11-21T17:51:59.848629Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539395:2600], TxId: 281474976715746, task: 1, CA Id [1:7439791825642539393:2600]. Taken 1 locks 2024-11-21T17:51:59.848631Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539395:2600], TxId: 281474976715746, task: 1, CA Id [1:7439791825642539393:2600]. new data for read #0 seqno = 1 finished = 1 2024-11-21T17:51:59.848635Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539393:2600], TxId: 281474976715746, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=Mzk1MWExNDYtNTRlOWU4YjgtYmFiMGNkOWQtMzMzMGU4MzM=. CustomerSuppliedId : . TraceId : 01jd7xmcb52gpzdb39t40fhmbk. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2024-11-21T17:51:59.848639Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539393:2600], TxId: 281474976715746, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=Mzk1MWExNDYtNTRlOWU4YjgtYmFiMGNkOWQtMzMzMGU4MzM=. CustomerSuppliedId : . TraceId : 01jd7xmcb52gpzdb39t40fhmbk. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:59.848643Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539395:2600], TxId: 281474976715746, task: 1, CA Id [1:7439791825642539393:2600]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T17:51:59.848645Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539395:2600], TxId: 281474976715746, task: 1, CA Id [1:7439791825642539393:2600]. enter pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T17:51:59.848650Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539395:2600], TxId: 281474976715746, task: 1, CA Id [1:7439791825642539393:2600]. exit pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 1 freeSpace: 8388576 2024-11-21T17:51:59.848652Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539395:2600], TxId: 281474976715746, task: 1, CA Id [1:7439791825642539393:2600]. returned 1 rows; processed 1 rows 2024-11-21T17:51:59.848658Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539395:2600], TxId: 281474976715746, task: 1, CA Id [1:7439791825642539393:2600]. dropping batch for read #0 2024-11-21T17:51:59.848660Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539395:2600], TxId: 281474976715746, task: 1, CA Id [1:7439791825642539393:2600]. effective maxinflight 1024 sorted 0 2024-11-21T17:51:59.848663Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539395:2600], TxId: 281474976715746, task: 1, CA Id [1:7439791825642539393:2600]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T17:51:59.848666Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539395:2600], TxId: 281474976715746, task: 1, CA Id [1:7439791825642539393:2600]. returned async data processed rows 1 left freeSpace 8388576 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T17:51:59.848692Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539393:2600], TxId: 281474976715746, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=Mzk1MWExNDYtNTRlOWU4YjgtYmFiMGNkOWQtMzMzMGU4MzM=. CustomerSuppliedId : . TraceId : 01jd7xmcb52gpzdb39t40fhmbk. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:59.848701Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539393:2600], TxId: 281474976715746, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=Mzk1MWExNDYtNTRlOWU4YjgtYmFiMGNkOWQtMzMzMGU4MzM=. CustomerSuppliedId : . TraceId : 01jd7xmcb52gpzdb39t40fhmbk. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:51:59.848705Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715746, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T17:51:59.848720Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539393:2600], TxId: 281474976715746, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=Mzk1MWExNDYtNTRlOWU4YjgtYmFiMGNkOWQtMzMzMGU4MzM=. CustomerSuppliedId : . TraceId : 01jd7xmcb52gpzdb39t40fhmbk. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:51:59.848722Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539393:2600], TxId: 281474976715746, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=Mzk1MWExNDYtNTRlOWU4YjgtYmFiMGNkOWQtMzMzMGU4MzM=. CustomerSuppliedId : . TraceId : 01jd7xmcb52gpzdb39t40fhmbk. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:51:59.848723Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715746, task: 1. Tasks execution finished 2024-11-21T17:51:59.848725Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439791825642539393:2600], TxId: 281474976715746, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=Mzk1MWExNDYtNTRlOWU4YjgtYmFiMGNkOWQtMzMzMGU4MzM=. CustomerSuppliedId : . TraceId : 01jd7xmcb52gpzdb39t40fhmbk. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T17:51:59.848739Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715746, task: 1. pass away 2024-11-21T17:51:59.848758Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715746;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:51:59.849720Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jd7xmcb94nvjsmdvycd5q1g2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk1MWExNDYtNTRlOWU4YjgtYmFiMGNkOWQtMzMzMGU4MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:59.849722Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jd7xmcb955sty6ynmvpwtahm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjllNzAxZGEtNzIyMGFlZmQtZTkwNWEzZDgtNTg0NGY0ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root [good] Yq_1::CreateQuery_Without_Connection >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> DataShardTxOrder::RandomPointsAndRanges >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::CancelBuild >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier+StreamLookup >> DataShardScan::ScanFollowedByUpdate >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite >> DbCounters::TabletsSimple [GOOD] >> LabeledDbCounters::OneTablet >> TopicService::OneConsumer_TheRangesDoNotOverlap [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup-EvWrite >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup-EvWrite |83.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs >> Secret::ValidationQueryService [GOOD] |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs >> TPersQueueTest::UpdatePartitionLocation [GOOD] >> TPersQueueTest::TopicServiceCommitOffset >> Yq_1::Basic_EmptyDict [GOOD] >> TPersQueueTest::BadTopic [GOOD] >> TBackupCollectionWithRebootsTests::CreateDroppedAndDropWithReboots [GOOD] >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable >> TopicService::OneConsumer_TheRangesOverlap >> DataShardScan::ScanFollowedByUpdate [GOOD] >> DataShardTxOrder::DelayData ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] Test command err: 2024-11-21T17:49:36.017646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:36.018238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:36.018272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c99/r3tmp/tmpUOuWve/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12415, node 1 TClient is connected to server localhost:14018 2024-11-21T17:49:36.152787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:36.172270Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:36.173052Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:36.173066Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:36.173068Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:36.173129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:49:36.216741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:36.216789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:36.228880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2024-11-21T17:49:48.928795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:718:2599], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.928826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:730:2605], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.928836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:48.929711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T17:49:48.931727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:733:2608], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T17:49:48.954586Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:794:2649], status: GENERIC_ERROR, issues:
:1:20: Error: Unexpected token '-' : syntax error... 2024-11-21T17:49:48.954848Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJjNmE1NGQtYWFkYWQ2M2QtYmZjMDE0MGMtYzRkNGFhZGE=, ActorId: [1:715:2596], ActorState: ExecuteState, TraceId: 01jd7xgcfy24sr22z8qg8svxyy, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: Unexpected token '-' : syntax error... ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2024-11-21T17:49:59.220387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2024-11-21T17:49:59.354151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:49:59.428821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2024-11-21T17:49:59.722005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.109096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.219922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T17:50:00.468513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:50:01.040606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T17:50:01.716752Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzQ4MmNkZWUtNzU2MWY5OTAtMzFmNjU3YmYtMTk1NjY0MGU=, ActorId: [1:820:2667], ActorState: ExecuteState, TraceId: 01jd7xgpgwc8m12vqctekhhq3c, Create QueryResponse for error on request, msg: 2024-11-21T17:50:01.717441Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jd7xgpgwc8m12vqctekhhq3c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ4MmNkZWUtNzU2MWY5OTAtMzFmNjU3YmYtMTk1NjY0MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2024-11-21T17:50:01.925294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:50:01.925319Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2024-11-21T17:50:33.812107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T17:50:34.270468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715726:0, at schemeshard: 72057594046644480 2024-11-21T17:50:34.816164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715733:0, at schemeshard: 72057594046644480 2024-11-21T17:50:34.974436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715738:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 2024-11-21T17:50:45.844759Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzBjNmYxNC1lZWY3Y2FkMi1lMTExMDNmYy1hNjNjNjJhMA==, ActorId: [1:3234:4513], ActorState: ExecuteState, TraceId: 01jd7xj3z96knnf0k2vtm4h7wv, Create QueryResponse for error on request, msg: 2024-11-21T17:50:45.845030Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jd7xj3z96knnf0k2vtm4h7wv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzBjNmYxNC1lZWY3Y2FkMi1lMTExMDNmYy1hNjNjNjJhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2024-11-21T17:50:56.732768Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3635:4808], TxId: 281474976715768, task: 1. Ctx: { TraceId : 01jd7xjemwd4pr5pq9qgtb1fnp. SessionId : ydb://session/3?node_id=1&id=Nzg0ZGZlLTJlODQ2M2E0LTI3Y2ZjNjcxLTZjN2JkODg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T17:50:56.734884Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3636:4809], TxId: 281474976715768, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xjemwd4pr5pq9qgtb1fnp. SessionId : ydb://session/3?node_id=1&id=Nzg0ZGZlLTJlODQ2M2E0LTI3Y2ZjNjcxLTZjN2JkODg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:3632:4738], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T17:50:56.735312Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Nzg0ZGZlLTJlODQ2M2E0LTI3Y2ZjNjcxLTZjN2JkODg=, ActorId: [1:3537:4738], ActorState: ExecuteState, TraceId: 01jd7xjemwd4pr5pq9qgtb1fnp, Create QueryResponse for error on request, msg: 2024-11-21T17:50:56.748203Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jd7xjejz1e78qq42jkptmd2g" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=Nzg0ZGZlLTJlODQ2M2E0LTI3Y2ZjNjcxLTZjN2JkODg=" tx_control { tx_id: "01jd7xjejz1e78qq42jkptmd2g" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2024-11-21T17:51:07.601350Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmNkOGNkOGEtYmUzMmU1MDYtMjNlZTI4MjktMmQ5ZjA4YmM=, ActorId: [1:3841:4958], ActorState: ExecuteState, TraceId: 01jd7xjs582mtvcjf2e5wb89mm, Create QueryResponse for error on request, msg: 2024-11-21T17:51:07.601659Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715781. Ctx: { TraceId: 01jd7xjs582mtvcjf2e5wb89mm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmNkOGNkOGEtYmUzMmU1MDYtMjNlZTI4MjktMmQ5ZjA4YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2024-11-21T17:51:18.422550Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4156:5195], for# root@builtin, access# DescribeSchema 2024-11-21T17:51:18.422574Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4156:5195], for# root@builtin, access# DescribeSchema 2024-11-21T17:51:18.422821Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4153:5192], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:18.423042Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTZlMDBjYzQtMzc3MmQzNmQtMThiMmQ5ZjctZjY4OThkNmQ=, ActorId: [1:4147:5187], ActorState: ExecuteState, TraceId: 01jd7xk3wm87he3vxmse1xeg8g, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2024-11-21T17:51:29.141274Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator 2024-11-21T17:51:29.232836Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2ViNGNhMjQtMTFjYWMwYmUtMzFiNWJiYmYtNDFiZGI1MTM=, ActorId: [1:4406:5380], ActorState: ExecuteState, TraceId: 01jd7xkebn1q2g3vh2w9azv4cb, Create QueryResponse for error on request, msg: 2024-11-21T17:51:29.232996Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715805. Ctx: { TraceId: 01jd7xkebn1q2g3vh2w9azv4cb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ViNGNhMjQtMTFjYWMwYmUtMzFiNWJiYmYtNDFiZGI1MTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2024-11-21T17:51:40.223408Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmU0YjhjNGYtNGFjZjJiNWEtNTRmZWZmZWEtY2YzMmQyNjg=, ActorId: [1:4793:5668], ActorState: ExecuteState, TraceId: 01jd7xkrxf3qckcyqg4sfskwe2, Create QueryResponse for error on request, msg: 2024-11-21T17:51:40.223675Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715821. Ctx: { TraceId: 01jd7xkrxf3qckcyqg4sfskwe2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmU0YjhjNGYtNGFjZjJiNWEtNTRmZWZmZWEtY2YzMmQyNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2024-11-21T17:52:02.331934Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715853. Ctx: { TraceId: 01jd7xmeqr4kwhsma4y5gee3db, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTcyNTYxZjItNzAzNzBmN2UtODY4OGUzYjAtMTA2MjgwNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TPersQueueTest::WriteExisting [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] >> IndexBuildTest::CancelBuild [GOOD] >> TPersQueueTest::WriteExistingBigValue >> TPersQueueTest::SetupLockSession2 [GOOD] >> TBackupCollectionWithRebootsTests::ParallelCreateDrop [GOOD] >> DemoTx::Scenario_1 [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup-EvWrite [GOOD] >> TPersQueueTest::DirectReadPreCached [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> TDataShardLocksTest::UseLocksCache [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier+StreamLookup [GOOD] >> TBackupCollectionWithRebootsTests::CreateDroppedWithReboots [GOOD] >> TBackupCollectionWithRebootsTests::CreateWithReboots [GOOD] >> TPersQueueTest::SetupLockSession >> DemoTx::Scenario_2 >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup+EvWrite >> TPersQueueTest::DirectReadNotCached >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier-StreamLookup >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup+EvWrite |83.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:56.366668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:56.366684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:56.366688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:56.366691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:56.366703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:56.366705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:56.366712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:56.366770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:56.374141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:56.374159Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:56.376791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:56.377573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:56.377616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:56.379092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:56.379325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:56.379415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.379478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:56.380569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.380853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:56.380864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.380901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:56.380909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:56.380915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:56.380927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.382226Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:56.394259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:56.394326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.394380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:56.394421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:56.394426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.395117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.395135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:56.395185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.395192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:56.395195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:56.395198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:56.395504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.395511Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:56.395514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:56.395753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.395758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.395762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.395766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.396118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:56.396450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:56.396489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:56.396606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.396622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:56.396628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.396664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:56.396669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.396692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:56.396702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:56.396993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:56.396998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:56.397028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.397031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:56.397098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.397102Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:56.397109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:56.397111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.397115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:56.397118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.397121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:56.397123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:56.397131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:56.397134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:56.397137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:56.397324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:56.397333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:56.397336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:56.397338Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:56.397341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:56.397349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hed: true 2024-11-21T17:52:04.380377Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T17:52:04.380450Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:04.380473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:04.380480Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T17:52:04.380485Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T17:52:04.380963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T17:52:04.380976Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T17:52:04.380987Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T17:52:04.380991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T17:52:04.380997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T17:52:04.381008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:123:2149] message: TxId: 281474976710760 2024-11-21T17:52:04.381014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T17:52:04.381019Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T17:52:04.381022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T17:52:04.381035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T17:52:04.383755Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T17:52:04.383784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T17:52:04.383799Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T17:52:04.383820Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1142:3006], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:52:04.384313Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:52:04.384332Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1142:3006], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:52:04.384343Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2024-11-21T17:52:04.384820Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:52:04.384842Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1142:3006], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:52:04.384847Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T17:52:04.384872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:52:04.384878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1234:3088] TestWaitNotification: OK eventTxId 102 2024-11-21T17:52:04.385247Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2024-11-21T17:52:04.385339Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } 2024-11-21T17:52:04.385515Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:52:04.385573Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 63us result status StatusSuccess 2024-11-21T17:52:04.385684Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:04.385843Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:52:04.385866Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 25us result status StatusPathDoesNotExist 2024-11-21T17:52:04.385890Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000005, drop txId: 281474976710759, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/Table/index1" PathId: 3 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::CreateDroppedAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:59.807912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:59.807939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:59.807945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:59.807949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:59.807965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:59.807969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:59.807978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:59.808056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:59.819655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:59.819679Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:59.822930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:59.823011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:59.823048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:59.824604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:59.824670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:59.824811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:59.824888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:59.825496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:59.825897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:59.825912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:59.825951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:59.825959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:59.825965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:59.825989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:59.827660Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:51:59.845595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:59.845694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:59.845765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:59.845817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:59.845825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:59.846710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:59.846754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:59.846817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:59.846827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:59.846832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:59.846837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:59.847259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:59.847270Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:59.847275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:59.847623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:59.847632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:59.847638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:59.847644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:59.848269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:59.848661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:59.848714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:59.848899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:59.848922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:59.848940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:59.848996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:59.849002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:59.849035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:59.849048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:59.849451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:59.849459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:59.849503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:59.849507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:59.849581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:59.849588Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:59.849598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:59.849602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:59.849607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:59.849612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:59.849616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:59.849620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:59.849630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:59.849635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:59.849639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:59.849902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:59.849916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:59.849920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:59.849925Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:59.849930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:59.849943Z node 1 ... 11-21T17:52:03.578058Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:52:03.578063Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 10 2024-11-21T17:52:03.578066Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:52:03.578170Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:03.578178Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:03.578182Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:52:03.578185Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 1 2024-11-21T17:52:03.578188Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:52:03.578196Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T17:52:03.578651Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1006 msg type: 269090816 2024-11-21T17:52:03.578677Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000007 2024-11-21T17:52:03.578976Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:03.579005Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:03.579014Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1006:0, HandleReply TEvOperationPlan: step# 5000007 2024-11-21T17:52:03.579032Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T17:52:03.579048Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 1 -> 240 2024-11-21T17:52:03.579091Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:52:03.579098Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T17:52:03.579195Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:52:03.579264Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 FAKE_COORDINATOR: Erasing txId 1006 2024-11-21T17:52:03.579643Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:03.579650Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:52:03.579675Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2024-11-21T17:52:03.579694Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:03.579698Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 4 2024-11-21T17:52:03.579701Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 6 2024-11-21T17:52:03.579744Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:52:03.579749Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1006:0 ProgressState 2024-11-21T17:52:03.579760Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T17:52:03.579763Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:52:03.579768Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T17:52:03.579773Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:52:03.579777Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T17:52:03.579780Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T17:52:03.579790Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T17:52:03.579796Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 2, subscribers: 0 2024-11-21T17:52:03.579799Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 4], 11 2024-11-21T17:52:03.579802Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2024-11-21T17:52:03.579859Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:03.579867Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:03.579871Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:52:03.579875Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2024-11-21T17:52:03.579878Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T17:52:03.579913Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:52:03.579917Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T17:52:03.579924Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:52:03.579948Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:03.579956Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:03.579962Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:52:03.579965Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T17:52:03.579968Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:52:03.579974Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T17:52:03.580755Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:52:03.580779Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:52:03.580786Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T17:52:03.580852Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T17:52:03.580860Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T17:52:03.580944Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T17:52:03.580961Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T17:52:03.580965Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [16:437:2429] TestWaitNotification: OK eventTxId 1006 2024-11-21T17:52:03.581024Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:52:03.581053Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 36us result status StatusPathDoesNotExist 2024-11-21T17:52:03.581085Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.backups/collections/MyCollection1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.backups/collections\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/.backups/collections/MyCollection1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.backups/collections" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "collections" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2024-11-21T17:51:51.894735Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791789677769426:2104];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:51.894805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 17:51:51.936525698 786804 dns_resolver.cc:162] no server name supplied in dns URI E1121 17:51:51.936572079 786804 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T17:51:51.940970Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23256: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23256 } ] test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016ce/r3tmp/tmp2nY7kX/pdisk_1.dat 2024-11-21T17:51:52.278094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 23256, node 1 TClient is connected to server localhost:1110 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:52.325024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:51:52.325036Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:52.325220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:52.325228Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:52.325229Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:52.325268Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:52.601875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.602970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.603005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.603617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:52.603718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:52.603729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:51:52.604072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:52.604083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:52.604363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:52.604867Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.606070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211512654, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:52.606082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:52.606141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:52.606555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:52.606596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:52.606610Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:52.606622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:52.606633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:52.606648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:51:52.607023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:52.607040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:52.607043Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:52.607054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:51:52.661800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:52.661832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:52.663422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:52.943213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.943304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.946488Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2024-11-21T17:51:52.946505Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2024-11-21T17:51:52.946507Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2024-11-21T17:51:52.946569Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2024-11-21T17:51:52.946577Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2024-11-21T17:51:52.946579Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2024-11-21T17:51:52.947865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/yq/result_sets, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.947873Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2024-11-21T17:51:52.947878Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2024-11-21T17:51:52.947879Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2024-11-21T17:51:52.948022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 1 -> 2 2024-11-21T17:51:52.948047Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2024-11-21T17:51:52.948058Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2024-11-21T17:51:52.948059Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2024-11-21T17:51:52.948183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.948195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.948253Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2024-11-21T17:51:52.948262Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2024-11-21T17:51:52.948263Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2024-11-21T17:51:52.948361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/yq/compute_databases, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.948384Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2024-11-21T17:51:52.948397Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2024-11-21T17:51:52.948398Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2024-11-21T17:51:52.948423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 1 -> 2 2024-11-21T17:51:52.948487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:52.948490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:52.948578Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2024-11-21T17:51:52.948587Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2024-11-21T17:51:52.948587Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2024-11-21T17:51:52.948633Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2024-11-21T17:51:52.948643Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2024-11-21T17:51:52.948645Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2024-11-21T17:51:52.948694Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2024-11-21T17:51:52.948704Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2024-11-21T17:51:52.948705Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_sm ... ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007620Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007632Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007647Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007651Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007662Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007666Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007671Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007682Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007689Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007698Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007707Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007715Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007725Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007732Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007742Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007755Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007760Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007772Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007776Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007782Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007791Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007800Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007809Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007821Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007826Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007837Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007841Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007846Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007859Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007866Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007878Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007883Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007890Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007895Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007900Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007910Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007918Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007926Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007936Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007948Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007953Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007957Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007967Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007974Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007979Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007986Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007990Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.007996Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008004Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008016Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008024Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008115Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008125Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008142Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008157Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008161Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008174Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008178Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008183Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008197Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008205Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008216Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008223Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008254Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008266Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008273Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008284Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008291Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008304Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008316Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008322Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008333Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008338Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008345Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008358Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008363Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008369Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008377Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008385Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008396Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008403Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008412Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008420Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008428Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008436Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008447Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008460Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008466Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008477Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008485Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008495Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008503Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008514Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008523Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008534Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008543Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008553Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008561Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008568Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008579Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008585Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008595Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008604Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008611Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008622Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008633Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008638Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008648Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008653Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008665Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008670Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008680Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008686Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008697Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008705Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008716Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008721Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008727Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008735Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008746Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008757Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008763Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008771Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008781Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008788Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008798Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008809Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008814Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008837Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:03.008853Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: 2024-11-21T17:52:01.743492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:01.744007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:01.744039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016a8/r3tmp/tmpKwXbhO/pdisk_1.dat 2024-11-21T17:52:01.851043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:01.869061Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:01.911475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:01.911503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:01.922013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:02.025608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:02.039561Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:02.039771Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:02.039846Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:638:2540] 2024-11-21T17:52:02.039890Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:02.046745Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:02.046769Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:624:2532], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:02.047049Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:624:2532], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:02.047126Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:640:2542] 2024-11-21T17:52:02.047157Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:02.048005Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:624:2532], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:02.048085Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:02.048118Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:02.048311Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:02.048328Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:02.048335Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:02.048374Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:02.051822Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:02.051875Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:02.051894Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:671:2559] 2024-11-21T17:52:02.051898Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:02.051901Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:02.051905Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:02.051984Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:638:2540], Recipient [1:638:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.051989Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.052070Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:02.052085Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:02.052111Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:02.052116Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:02.052121Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:52:02.052125Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:02.052128Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:02.052133Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:52:02.052137Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:02.052217Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:653:2548], Recipient [1:638:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.052221Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.052269Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:631:2536], serverId# [1:653:2548], sessionId# [0:0:0] 2024-11-21T17:52:02.052292Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:653:2548] 2024-11-21T17:52:02.052298Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:02.052317Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:02.052355Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:52:02.052365Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:52:02.052378Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:52:02.052390Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:02.052392Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:52:02.052396Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:52:02.052399Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:02.052430Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:02.052433Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:52:02.052436Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:02.052438Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:02.052446Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:52:02.052448Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:02.052451Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:52:02.052453Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:02.052456Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:02.052479Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:02.052493Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:02.052593Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T17:52:02.052600Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T17:52:02.052604Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T17:52:02.052629Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:02.052633Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T17:52:02.052642Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:02.052652Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:672:2560] 2024-11-21T17:52:02.052655Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T17:52:02.052657Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T17:52:02.052660Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T17:52:02.052753Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:640:2542], Recipient [1:640:2542]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.052758Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.052818Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T17:52:02.052828Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T17:52:02.052853Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:661:2555], Recipient [1:640:2542]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.052857Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.052862Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:632:2537], serverId# [1:661:2555], sessionId# [0:0:0] 2024-11-21T17:52:02.052877Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T17:52:02.052882Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:02.052886Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037889 2024-11-21T17:52:02.052891Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T17:52:02.052894Z node 1 :TX_DATASHARD TRACE: Uni ... 8 is DelayComplete 2024-11-21T17:52:04.714657Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:04.714659Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:52:04.714663Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:52:04.714682Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2024-11-21T17:52:04.714684Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:52:04.714687Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037888 has finished 2024-11-21T17:52:04.724935Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:04.724979Z node 2 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715662] at 72075186224037888 on unit CompleteOperation 2024-11-21T17:52:04.725000Z node 2 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [2:898:2691], exec latency: 9 ms, propose latency: 9 ms 2024-11-21T17:52:04.725017Z node 2 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2500 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T17:52:04.725023Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:04.725030Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:04.725034Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:04.725042Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T17:52:04.725062Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:04.725180Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:938:2748], Recipient [2:639:2541]: {TEvReadSet step# 2500 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T17:52:04.725190Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:04.725196Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2024-11-21T17:52:04.725553Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:639:2541]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2024-11-21T17:52:04.746151Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xmh3newa3tk2hs8qv66qv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWNlNjY1ZDMtZDFhN2E3NjctZDU0MmRlZS0zMTE5MWFhZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:04.746801Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:979:2774], Recipient [2:938:2748]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T17:52:04.746838Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:52:04.746854Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2024-11-21T17:52:04.746886Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T17:52:04.746890Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:52:04.746895Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:04.746899Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:52:04.746911Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2024-11-21T17:52:04.746916Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T17:52:04.746919Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:04.746923Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:52:04.746926Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:52:04.746942Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T17:52:04.746983Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2024-11-21T17:52:04.746990Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:979:2774], 0} after executionsCount# 1 2024-11-21T17:52:04.746996Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:979:2774], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:52:04.747009Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:979:2774], 0} finished in read 2024-11-21T17:52:04.747018Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T17:52:04.747021Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:52:04.747024Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:52:04.747028Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:52:04.747036Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T17:52:04.747039Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:52:04.747043Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2024-11-21T17:52:04.747047Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:52:04.747068Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T17:52:04.747242Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:979:2774], Recipient [2:938:2748]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:52:04.747249Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T17:52:04.747301Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:979:2774], Recipient [2:639:2541]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2024-11-21T17:52:04.747312Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2024-11-21T17:52:04.747319Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2024-11-21T17:52:04.747325Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T17:52:04.747328Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2024-11-21T17:52:04.747331Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:04.747334Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T17:52:04.747341Z node 2 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037889 2024-11-21T17:52:04.747345Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T17:52:04.747348Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:04.747351Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2024-11-21T17:52:04.747354Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2024-11-21T17:52:04.747364Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2024-11-21T17:52:04.747382Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2500/18446744073709551615 2024-11-21T17:52:04.747387Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:979:2774], 1} after executionsCount# 1 2024-11-21T17:52:04.747391Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:979:2774], 1} sends rowCount# 2, bytes# 48, quota rows left# 997, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:52:04.747399Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:979:2774], 1} finished in read 2024-11-21T17:52:04.747406Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T17:52:04.747409Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T17:52:04.747412Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T17:52:04.747415Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2024-11-21T17:52:04.747420Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T17:52:04.747423Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T17:52:04.747428Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2024-11-21T17:52:04.747432Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T17:52:04.747441Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T17:52:04.747522Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:979:2774], Recipient [2:639:2541]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2024-11-21T17:52:04.747528Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2024-11-21T17:51:33.355769Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791714128163243:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:33.355910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001f9b/r3tmp/tmpPjFKlj/pdisk_1.dat 2024-11-21T17:51:33.408046Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63072, node 1 2024-11-21T17:51:33.416749Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:33.416763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:33.416764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:33.416795Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:33.456849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:33.456870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:33.457926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:33.488868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.489921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.489938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.490070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:33.490125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:33.490134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:51:33.490234Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:33.490246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2024-11-21T17:51:33.490329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.490549Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:33.490645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493537, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.490658Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:33.490725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:33.490855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.490895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.490908Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:33.490923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:33.490935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:33.490948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:51:33.491290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:33.491307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:33.491311Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:33.491327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:63831 2024-11-21T17:51:33.505221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.505293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.505299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.505435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T17:51:33.505478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:33.505720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211493551, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:33.505734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715658:0, stepId:1732211493551, at schemeshard: 72057594046644480 2024-11-21T17:51:33.505794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:51:33.505822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:51:33.505834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T17:51:33.505918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:51:33.505963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.506012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.506117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:51:33.506127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:51:33.506131Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:51:33.506140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:51:33.506596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.506638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.506647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.506655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T17:51:33.506667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:51:33.506674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T17:51:33.506735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T17:51:33.506753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:33.506781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:33.506895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:51:33.506914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:51:33.506917Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:51:33.506928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T17:51:33.507105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:51:33.507138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:33.507238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusAccepted, operation: C ... Y WARN: http request [DeleteMessageBatch] requestId [e7cbb8a2-b26fc26e-6bccf394-b949d52] got new request with incorrect json from [58a1:e53a:3946:0:40a1:e53a:3946:0] 2024-11-21T17:52:04.377940Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [e7cbb8a2-b26fc26e-6bccf394-b949d52] reply with status: BAD_REQUEST message: LogMessageFatal exception 2024-11-21T17:52:04.377966Z node 7 :HTTP DEBUG: (#47,[::1]:57890) <- (400 InvalidArgumentException) 2024-11-21T17:52:04.377973Z node 7 :HTTP DEBUG: (#47,[::1]:57890) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.DeleteMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 84 { "QueueUrl":"http://ghrun-2rqap2spfm.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries": { } } 0 2024-11-21T17:52:04.377975Z node 7 :HTTP DEBUG: (#47,[::1]:57890) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: e7cbb8a2-b26fc26e-6bccf394-b949d52 x-amz-crc32: 3518254967 Content-Type: application/x-amz-json-1.1 Content-Length: 75 {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} 2024-11-21T17:52:04.377999Z node 7 :HTTP DEBUG: (#47,[::1]:57890) connection closed Http output full {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} 2024-11-21T17:52:04.378158Z node 7 :HTTP DEBUG: (#44,[::1]:57906) incoming connection opened 2024-11-21T17:52:04.378185Z node 7 :HTTP DEBUG: (#44,[::1]:57906) -> (POST /Root) 2024-11-21T17:52:04.378210Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [188d:6f3e:3946:0:8d:6f3e:3946:0] request [DeleteMessageBatch] url [/Root] database [/Root] requestId: 9e923fa1-a93c0e29-be4b6b9a-f378ca71 F0000 00:00:1732211524.378256 797157 generated_message_reflection.cc:181] F0000 00:00:1732211524.378256 797157 generated_message_reflection.cc:181] 2024-11-21T17:52:04.378321Z node 7 :HTTP_PROXY WARN: http request [DeleteMessageBatch] requestId [9e923fa1-a93c0e29-be4b6b9a-f378ca71] got new request with incorrect json from [188d:6f3e:3946:0:8d:6f3e:3946:0] 2024-11-21T17:52:04.378328Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [9e923fa1-a93c0e29-be4b6b9a-f378ca71] reply with status: BAD_REQUEST message: LogMessageFatal exception 2024-11-21T17:52:04.378356Z node 7 :HTTP DEBUG: (#44,[::1]:57906) <- (400 InvalidArgumentException) 2024-11-21T17:52:04.378365Z node 7 :HTTP DEBUG: (#44,[::1]:57906) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.DeleteMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 7a { "QueueUrl":"http://ghrun-2rqap2spfm.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries":"" } 0 2024-11-21T17:52:04.378373Z node 7 :HTTP DEBUG: (#44,[::1]:57906) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 9e923fa1-a93c0e29-be4b6b9a-f378ca71 x-amz-crc32: 3518254967 Content-Type: application/x-amz-json-1.1 Content-Length: 75 {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} Http output full {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} 2024-11-21T17:52:04.378410Z node 7 :HTTP DEBUG: (#44,[::1]:57906) connection closed 2024-11-21T17:52:04.378536Z node 7 :HTTP DEBUG: (#44,[::1]:57910) incoming connection opened 2024-11-21T17:52:04.378558Z node 7 :HTTP DEBUG: (#44,[::1]:57910) -> (POST /Root) 2024-11-21T17:52:04.378579Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [188d:6f3e:3946:0:8d:6f3e:3946:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: c0ce2a3c-ca2e5d9-a7c36ebf-94257ac4 2024-11-21T17:52:04.378680Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [c0ce2a3c-ca2e5d9-a7c36ebf-94257ac4] got new request from [188d:6f3e:3946:0:8d:6f3e:3946:0] 2024-11-21T17:52:04.378756Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [c0ce2a3c-ca2e5d9-a7c36ebf-94257ac4] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:52:04.378763Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [c0ce2a3c-ca2e5d9-a7c36ebf-94257ac4] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:52:04.388084Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [c0ce2a3c-ca2e5d9-a7c36ebf-94257ac4] Got succesfult GRPC response. 2024-11-21T17:52:04.388140Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [c0ce2a3c-ca2e5d9-a7c36ebf-94257ac4] reply ok 2024-11-21T17:52:04.388199Z node 7 :HTTP DEBUG: (#44,[::1]:57910) <- (200 ) Http output full {"Successful":[{"SequenceNumber":"0","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MessageId":"3b570c10-4472fa5d-f18dbadc-48b6dcdf"},{"SequenceNumber":"0","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"5178a3a5-b6a855b5-a54b0326-6a4e0d54"}]} 2024-11-21T17:52:04.388291Z node 7 :HTTP DEBUG: (#44,[::1]:57910) connection closed 2024-11-21T17:52:04.388667Z node 7 :HTTP DEBUG: (#44,[::1]:57912) incoming connection opened 2024-11-21T17:52:04.388690Z node 7 :HTTP DEBUG: (#44,[::1]:57912) -> (POST /Root) 2024-11-21T17:52:04.388723Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [188d:6f3e:3946:0:8d:6f3e:3946:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 7b74b3cc-17bd8164-a461559b-9607d9a3 2024-11-21T17:52:04.388816Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [7b74b3cc-17bd8164-a461559b-9607d9a3] got new request from [188d:6f3e:3946:0:8d:6f3e:3946:0] 2024-11-21T17:52:04.388880Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [7b74b3cc-17bd8164-a461559b-9607d9a3] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:52:04.388883Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [7b74b3cc-17bd8164-a461559b-9607d9a3] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:52:04.409002Z node 7 :SQS WARN: Mark infly [cloud4/000000000000000101v0/1] for reloading. Reason: MessageDeleted 2024-11-21T17:52:04.409213Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [7b74b3cc-17bd8164-a461559b-9607d9a3] Got succesfult GRPC response. 2024-11-21T17:52:04.409281Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [7b74b3cc-17bd8164-a461559b-9607d9a3] reply ok 2024-11-21T17:52:04.409338Z node 7 :HTTP DEBUG: (#44,[::1]:57912) <- (200 ) 2024-11-21T17:52:04.409421Z node 7 :HTTP DEBUG: (#44,[::1]:57912) connection closed Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SenderId":"fake_user_sid@as","SentTimestamp":"1732211524379","ApproximateFirstReceiveTimestamp":"1732211524401","ApproximateReceiveCount":"1"},"ReceiptHandle":"EAEgsYbp_rQyKAE","Body":"MessageBody-0","MessageId":"3b570c10-4472fa5d-f18dbadc-48b6dcdf"}]} 2024-11-21T17:52:04.409768Z node 7 :HTTP DEBUG: (#44,[::1]:57920) incoming connection opened 2024-11-21T17:52:04.409795Z node 7 :HTTP DEBUG: (#44,[::1]:57920) -> (POST /Root) 2024-11-21T17:52:04.409835Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d8c1:e73a:3946:0:c0c1:e73a:3946:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: d2fedce0-e5d1c002-525834e3-a81c5ca0 2024-11-21T17:52:04.409917Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [d2fedce0-e5d1c002-525834e3-a81c5ca0] got new request from [d8c1:e73a:3946:0:c0c1:e73a:3946:0] 2024-11-21T17:52:04.409975Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [d2fedce0-e5d1c002-525834e3-a81c5ca0] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:52:04.409985Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [d2fedce0-e5d1c002-525834e3-a81c5ca0] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:52:04.415752Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [d2fedce0-e5d1c002-525834e3-a81c5ca0] Got succesfult GRPC response. 2024-11-21T17:52:04.415804Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [d2fedce0-e5d1c002-525834e3-a81c5ca0] reply ok 2024-11-21T17:52:04.415860Z node 7 :HTTP DEBUG: (#44,[::1]:57920) <- (200 ) 2024-11-21T17:52:04.415906Z node 7 :HTTP DEBUG: (#44,[::1]:57920) connection closed Http output full {"Messages":[{"MD5OfBody":"3bf7e6d806a0b8062135ae945eca30bf","Attributes":{"SenderId":"fake_user_sid@as","SentTimestamp":"1732211524379","ApproximateFirstReceiveTimestamp":"1732211524410","ApproximateReceiveCount":"1"},"ReceiptHandle":"EAIguobp_rQyKAE","Body":"MessageBody-1","MessageId":"5178a3a5-b6a855b5-a54b0326-6a4e0d54"}]} 2024-11-21T17:52:04.416180Z node 7 :HTTP DEBUG: (#44,[::1]:57932) incoming connection opened 2024-11-21T17:52:04.416206Z node 7 :HTTP DEBUG: (#44,[::1]:57932) -> (POST /Root) 2024-11-21T17:52:04.416260Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [983a:6c3e:3946:0:803a:6c3e:3946:0] request [DeleteMessageBatch] url [/Root] database [/Root] requestId: 8cb4daea-e8b79bad-28c04ca5-b0725b3d 2024-11-21T17:52:04.416351Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [8cb4daea-e8b79bad-28c04ca5-b0725b3d] got new request from [983a:6c3e:3946:0:803a:6c3e:3946:0] 2024-11-21T17:52:04.416417Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [8cb4daea-e8b79bad-28c04ca5-b0725b3d] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:52:04.416425Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [8cb4daea-e8b79bad-28c04ca5-b0725b3d] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:52:04.422833Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [8cb4daea-e8b79bad-28c04ca5-b0725b3d] Got succesfult GRPC response. 2024-11-21T17:52:04.422880Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [8cb4daea-e8b79bad-28c04ca5-b0725b3d] reply ok 2024-11-21T17:52:04.422936Z node 7 :HTTP DEBUG: (#44,[::1]:57932) <- (200 ) 2024-11-21T17:52:04.422980Z node 7 :HTTP DEBUG: (#44,[::1]:57932) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2024-11-21T17:52:04.423269Z node 7 :HTTP DEBUG: (#44,[::1]:57942) incoming connection opened 2024-11-21T17:52:04.423287Z node 7 :HTTP DEBUG: (#44,[::1]:57942) -> (POST /Root) 2024-11-21T17:52:04.423323Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [58e2:6c3e:3946:0:40e2:6c3e:3946:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: e8328476-8c6e7b76-6953665c-d39f11ee 2024-11-21T17:52:04.423407Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [e8328476-8c6e7b76-6953665c-d39f11ee] got new request from [58e2:6c3e:3946:0:40e2:6c3e:3946:0] 2024-11-21T17:52:04.423465Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [e8328476-8c6e7b76-6953665c-d39f11ee] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T17:52:04.423472Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [e8328476-8c6e7b76-6953665c-d39f11ee] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T17:52:04.423668Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [e8328476-8c6e7b76-6953665c-d39f11ee] Got succesfult GRPC response. 2024-11-21T17:52:04.423680Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [e8328476-8c6e7b76-6953665c-d39f11ee] reply ok 2024-11-21T17:52:04.423699Z node 7 :HTTP DEBUG: (#44,[::1]:57942) <- (200 ) Http output full {} 2024-11-21T17:52:04.423727Z node 7 :HTTP DEBUG: (#44,[::1]:57942) connection closed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::ParallelCreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:52:00.955999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:00.956028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:00.956033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:00.956038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:00.956050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:00.956053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:00.956059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:00.956120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:00.964394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:00.964412Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:00.966155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:00.966194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:00.966214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:52:00.967185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:00.967219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:00.967297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:00.967341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:00.967625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:00.967803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:00.967809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:00.967832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:00.967837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:00.967840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:00.967852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:52:00.968934Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:52:00.980565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:00.980646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:00.980706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:00.980752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:00.980758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:00.981396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:00.981428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:00.981469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:00.981477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:00.981479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:00.981483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:00.981897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:00.981912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:00.981916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:00.982266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:00.982274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:00.982278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:00.982283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:00.982725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:00.983057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:00.983100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:00.983241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:00.983260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:00.983273Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:00.983309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:00.983313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:00.983338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:00.983347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:00.983749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:00.983759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:00.983805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:00.983810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:52:00.983863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:00.983869Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:00.983877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:00.983880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:00.983883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:00.983887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:00.983890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:00.983893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:00.983903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:00.983907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:00.983909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:52:00.984149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:00.984159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:00.984162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:52:00.984166Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:52:00.984168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:00.984177Z node 1 ... 2024-11-21T17:52:04.731052Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:52:04.731056Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2024-11-21T17:52:04.731060Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:52:04.731174Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:52:04.731183Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:52:04.731186Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:52:04.731190Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 1 2024-11-21T17:52:04.731193Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:52:04.731201Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T17:52:04.731915Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1005 msg type: 269090816 2024-11-21T17:52:04.731942Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1005 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1005 at step: 5000005 2024-11-21T17:52:04.732032Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:04.732051Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:04.732058Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1005:0, HandleReply TEvOperationPlan: step# 5000005 2024-11-21T17:52:04.732072Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:52:04.732086Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 1 -> 240 2024-11-21T17:52:04.732111Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:52:04.732119Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:52:04.732200Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:52:04.732497Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T17:52:04.732747Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:04.732753Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:52:04.732773Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:52:04.732794Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:04.732799Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T17:52:04.732803Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 5 2024-11-21T17:52:04.732839Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:52:04.732845Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:0 ProgressState 2024-11-21T17:52:04.732859Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T17:52:04.732862Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:52:04.732867Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2024-11-21T17:52:04.732872Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:52:04.732877Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T17:52:04.732880Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T17:52:04.732890Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:52:04.732895Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2024-11-21T17:52:04.732899Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 4], 7 2024-11-21T17:52:04.732905Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T17:52:04.732963Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:52:04.732972Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:52:04.732976Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:52:04.732981Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T17:52:04.732984Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:52:04.733016Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:52:04.733021Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:52:04.733029Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:52:04.733053Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:52:04.733060Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:52:04.733064Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:52:04.733067Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2024-11-21T17:52:04.733070Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:52:04.733077Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2024-11-21T17:52:04.733513Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:52:04.733735Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:52:04.733746Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2024-11-21T17:52:04.733794Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T17:52:04.733801Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T17:52:04.733918Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T17:52:04.733933Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:52:04.733940Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [16:388:2380] TestWaitNotification: OK eventTxId 1005 2024-11-21T17:52:04.734008Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:52:04.734033Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 34us result status StatusPathDoesNotExist 2024-11-21T17:52:04.734065Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.backups/collections/MyCollection1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.backups/collections\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/.backups/collections/MyCollection1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.backups/collections" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "collections" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TBackupCollectionWithRebootsTests::DropWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::CreateDroppedWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:52:01.254051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:01.254073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:01.254076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:01.254080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:01.254093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:01.254096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:01.254102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:01.254172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:01.262480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:01.262503Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:01.264522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:01.264575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:01.264607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:52:01.265894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:01.265946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:01.266042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:01.266098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:01.266470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:01.266703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:01.266710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:01.266735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:01.266740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:01.266745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:01.266757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.268340Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:52:01.285126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:01.285220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.285294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:01.285346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:01.285354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.286004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:01.286029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:01.286084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.286095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:01.286099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:01.286104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:01.286497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.286507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:01.286511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:01.286793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.286801Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.286806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:01.286814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:01.287397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:01.287725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:01.287773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:01.287957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:01.287981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:01.287998Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:01.288058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:01.288064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:01.288099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:01.288111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:01.288669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:01.288683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:01.288737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:01.288744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:52:01.288836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.288845Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:01.288859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:01.288863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:01.288869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:01.288875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:01.288880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:01.288884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:01.288899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:01.288906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:01.288910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:52:01.289259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:01.289282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:01.289288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:52:01.289294Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:52:01.289299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:01.289334Z node 1 ... 01:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 4 2024-11-21T17:52:05.012822Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 6 2024-11-21T17:52:05.012853Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:52:05.012858Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1006:0, ProgressState 2024-11-21T17:52:05.012863Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1006 ready parts: 1/1 2024-11-21T17:52:05.012879Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1006 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:05.012938Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 10 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.012943Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 10 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.012945Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:52:05.012948Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 10 2024-11-21T17:52:05.012951Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:52:05.013044Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.013051Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.013054Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:52:05.013057Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 1 2024-11-21T17:52:05.013061Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:52:05.013085Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T17:52:05.013625Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1006 msg type: 269090816 2024-11-21T17:52:05.013654Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000007 2024-11-21T17:52:05.013898Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:05.013919Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:05.013928Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1006:0, HandleReply TEvOperationPlan: step# 5000007 2024-11-21T17:52:05.013946Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T17:52:05.013959Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 1 -> 240 2024-11-21T17:52:05.013987Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:52:05.013995Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T17:52:05.014087Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.014143Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 FAKE_COORDINATOR: Erasing txId 1006 2024-11-21T17:52:05.014435Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:05.014442Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:52:05.014462Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2024-11-21T17:52:05.014481Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:05.014486Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 4 2024-11-21T17:52:05.014490Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 6 2024-11-21T17:52:05.014538Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:52:05.014544Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1006:0 ProgressState 2024-11-21T17:52:05.014556Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T17:52:05.014559Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:52:05.014565Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T17:52:05.014573Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:52:05.014578Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T17:52:05.014581Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T17:52:05.014591Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T17:52:05.014596Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 2, subscribers: 0 2024-11-21T17:52:05.014600Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 4], 11 2024-11-21T17:52:05.014603Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2024-11-21T17:52:05.014665Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.014675Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.014679Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:52:05.014683Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2024-11-21T17:52:05.014686Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T17:52:05.014729Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:52:05.014734Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T17:52:05.014742Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:52:05.014769Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.014777Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.014780Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:52:05.014784Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T17:52:05.014787Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:52:05.014795Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T17:52:05.015577Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.015597Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:52:05.015607Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T17:52:05.015647Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T17:52:05.015654Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T17:52:05.015712Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T17:52:05.015725Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T17:52:05.015727Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [16:437:2429] TestWaitNotification: OK eventTxId 1006 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::CreateWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:52:01.320921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:01.320945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:01.320950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:01.320955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:01.320969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:01.320973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:01.320983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:01.321054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:01.332131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:01.332147Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:01.333916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:01.333993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:01.334025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:52:01.335397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:01.335445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:01.335537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:01.335590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:01.335931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:01.336142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:01.336147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:01.336172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:01.336178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:01.336182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:01.336194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.337802Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:52:01.349925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:01.350047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.350127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:01.350180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:01.350188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.351113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:01.351184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:01.351260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.351271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:01.351276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:01.351281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:01.351779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.351791Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:01.351796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:01.352158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.352169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.352176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:01.352184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:01.352851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:01.353317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:01.353374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:01.353585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:01.353615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:01.353635Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:01.353698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:01.353708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:01.353746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:01.353760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:01.354263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:01.354273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:01.354330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:01.354336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:52:01.354423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.354431Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:01.354444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:01.354448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:01.354455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:01.354460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:01.354465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:01.354469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:01.354483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:01.354489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:01.354492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:52:01.354817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:01.354838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:01.354844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:52:01.354850Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:52:01.354855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:01.354871Z node 1 ... 11-21T17:52:05.046867Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:52:05.046871Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T17:52:05.046875Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:52:05.046887Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T17:52:05.046891Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [16:299:2291] 2024-11-21T17:52:05.047357Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:52:05.047372Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateBackupCollection TPropose, operationId: 1003:0ProgressState 2024-11-21T17:52:05.047382Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1003 ready parts: 1/1 2024-11-21T17:52:05.047415Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1003 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:05.047530Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:52:05.047861Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:52:05.047892Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:52:05.047906Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:52:05.047910Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [16:300:2292] 2024-11-21T17:52:05.048209Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2024-11-21T17:52:05.048256Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T17:52:05.048327Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:05.048348Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:05.048356Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateBackupCollection TPropose, operationId: 1003:0HandleReply TEvOperationPlan: step# 5000004 2024-11-21T17:52:05.048385Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T17:52:05.048414Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:52:05.048423Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:52:05.048922Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:05.048933Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:52:05.048974Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:52:05.048992Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:52:05.049023Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:05.049028Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T17:52:05.049033Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2024-11-21T17:52:05.049037Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 5 2024-11-21T17:52:05.049101Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:52:05.049108Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:52:05.049125Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:52:05.049130Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:52:05.049135Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:52:05.049141Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:52:05.049146Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:52:05.049150Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:52:05.049162Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:52:05.049168Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 1 2024-11-21T17:52:05.049173Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2024-11-21T17:52:05.049176Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 5], 1 2024-11-21T17:52:05.049356Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:52:05.049368Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:52:05.049372Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:52:05.049377Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2024-11-21T17:52:05.049381Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:52:05.049643Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:52:05.049655Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:52:05.049659Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:52:05.049663Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 1 2024-11-21T17:52:05.049667Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:52:05.049677Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T17:52:05.049682Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [16:299:2291] 2024-11-21T17:52:05.050832Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:52:05.051231Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:52:05.051259Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:52:05.051268Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [16:300:2292] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 2024-11-21T17:52:05.051400Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:52:05.051460Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 69us result status StatusSuccess 2024-11-21T17:52:05.051579Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1" PathDescription { Self { Name: "MyCollection1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeBackupCollection CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 1 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 1 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 BackupCollectionVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } BackupCollectionDescription { Name: "MyCollection1" ExplicitEntryList { Entries { Type: ETypeTable Path: "/MyRoot/Table1" } } Cluster { } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_backup_collection_reboots/unittest >> TBackupCollectionWithRebootsTests::DropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:52:01.606480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:01.606501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:01.606504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:01.606507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:01.606519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:01.606522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:01.606528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:01.606587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:01.614332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:01.614354Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:01.616508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:01.616557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:01.616582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:52:01.617929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:01.617982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:01.618112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:01.618186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:01.618563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:01.618784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:01.618791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:01.618817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:01.618822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:01.618827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:01.618839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.619877Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:52:01.630572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:01.630660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.630724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:01.630768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:01.630776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.631614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:01.631638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:01.631695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.631703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:01.631706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:01.631710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:01.632129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.632138Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:01.632141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:01.632410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.632417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.632421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:01.632427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:01.633030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:01.633469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:01.633520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:01.633700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:01.633718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:01.633734Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:01.633781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:01.633786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:01.633821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:01.633833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:01.634167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:01.634173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:01.634211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:01.634214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:52:01.634283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:01.634289Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:01.634297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:01.634302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:01.634308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:01.634313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:01.634317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:01.634321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:01.634332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:01.634337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:01.634340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:52:01.634608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:01.634626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:01.634631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:52:01.634636Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:52:01.634641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:01.634655Z node 1 ... 11-21T17:52:05.361812Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:52:05.361816Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 10 2024-11-21T17:52:05.361820Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:52:05.362129Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.362140Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.362144Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:52:05.362147Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 1 2024-11-21T17:52:05.362152Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:52:05.362160Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2024-11-21T17:52:05.362429Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1006 msg type: 269090816 2024-11-21T17:52:05.362452Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000007 2024-11-21T17:52:05.362552Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:05.362566Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 68719478890 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:05.362572Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropBackupCollection TPropose, operationId: 1006:0, HandleReply TEvOperationPlan: step# 5000007 2024-11-21T17:52:05.362583Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T17:52:05.362598Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 1 -> 240 2024-11-21T17:52:05.362618Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:52:05.362623Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T17:52:05.362798Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.362823Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 FAKE_COORDINATOR: Erasing txId 1006 2024-11-21T17:52:05.363048Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:05.363052Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:52:05.363070Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2024-11-21T17:52:05.363085Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:05.363088Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 4 2024-11-21T17:52:05.363090Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 6 2024-11-21T17:52:05.363129Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:52:05.363134Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1006:0 ProgressState 2024-11-21T17:52:05.363142Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T17:52:05.363145Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:52:05.363149Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T17:52:05.363152Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:52:05.363155Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T17:52:05.363157Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T17:52:05.363164Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T17:52:05.363168Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 2, subscribers: 0 2024-11-21T17:52:05.363171Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 4], 11 2024-11-21T17:52:05.363173Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2024-11-21T17:52:05.363207Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.363212Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.363215Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:52:05.363220Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2024-11-21T17:52:05.363222Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T17:52:05.363245Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:52:05.363248Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T17:52:05.363255Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:52:05.363272Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.363276Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.363278Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:52:05.363280Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T17:52:05.363282Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:52:05.363287Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T17:52:05.363766Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:52:05.363780Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:52:05.363785Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T17:52:05.363826Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T17:52:05.363832Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T17:52:05.363889Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T17:52:05.363901Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T17:52:05.363904Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [16:436:2428] TestWaitNotification: OK eventTxId 1006 2024-11-21T17:52:05.363952Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:52:05.363971Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 25us result status StatusPathDoesNotExist 2024-11-21T17:52:05.363994Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.backups/collections/MyCollection1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.backups/collections\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/.backups/collections/MyCollection1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.backups/collections" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "collections" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |83.6%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardTxOrder::ImmediateBetweenOnline_oo8 >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail+StreamLookup >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate+StreamLookup >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderLockLost+StreamLookup >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup+EvWrite [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T17:52:03.399370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:03.399751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:03.399770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ac8/r3tmp/tmpK1FAQL/pdisk_1.dat 2024-11-21T17:52:03.493912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:03.511842Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:03.554531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:03.554572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:03.565073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:03.668773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:03.682048Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:03.682190Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:03.682247Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:52:03.682278Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:03.687589Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:03.687710Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:03.687732Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:03.687857Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:03.687877Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:03.687883Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:03.687923Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:03.690651Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:03.690709Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:03.690738Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:52:03.690743Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:03.690747Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:03.690750Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:03.690853Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:03.690859Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:03.690985Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:03.691004Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:03.691017Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:03.691020Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:03.691025Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:52:03.691031Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:03.691036Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:03.691041Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:52:03.691045Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:03.691049Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:03.691052Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:52:03.691057Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:03.691077Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:52:03.691080Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:03.691098Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:03.691135Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:52:03.691143Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:52:03.691157Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:52:03.691163Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:03.691166Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:52:03.691171Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:52:03.691175Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:03.691212Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:03.691216Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:52:03.691218Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:03.691221Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:03.691229Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:52:03.691232Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:03.691234Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:52:03.691237Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:03.691240Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:03.691463Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:52:03.691474Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:52:03.701727Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:03.701752Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:03.701758Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:03.701769Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:52:03.701780Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:03.875541Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:03.875561Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:03.875567Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:52:03.875579Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:52:03.875582Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:03.875599Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:03.875606Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:52:03.875609Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:52:03.875612Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:52:03.876133Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:52:03.876144Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:03.876221Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:03.876225Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:03.876246Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:03.876251Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:03.876254Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:52:03.876259Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 76715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1046:2841], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 33 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 7 FinishTimeMs: 1732211526232 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 2 BuildCpuTimeUs: 5 WaitInputTimeUs: 620 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211526231 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:06.232475Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1046:2841] 2024-11-21T17:52:06.232479Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T17:52:06.232482Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T17:52:06.232514Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1047:2842], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 91 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 66 FinishTimeMs: 1732211526232 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 57 BuildCpuTimeUs: 9 WaitInputTimeUs: 592 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211526231 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:06.232517Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1047:2842] 2024-11-21T17:52:06.232521Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T17:52:06.232524Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T17:52:06.232553Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1048:2843], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 121 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 102 FinishTimeMs: 1732211526232 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 98 BuildCpuTimeUs: 4 WaitInputTimeUs: 511 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211526231 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:06.232556Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1048:2843] 2024-11-21T17:52:06.232559Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1049:2844], 2024-11-21T17:52:06.232562Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1049:2844], 2024-11-21T17:52:06.232597Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1049:2844], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 107 DurationUs: 1000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 86 FinishTimeMs: 1732211526232 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 74 BuildCpuTimeUs: 12 WaitInputTimeUs: 619 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211526231 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:06.232600Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1049:2844] 2024-11-21T17:52:06.232605Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], 2024-11-21T17:52:06.232608Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], 2024-11-21T17:52:06.232628Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2845], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 37 DurationUs: 1000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 21 FinishTimeMs: 1732211526232 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 11 BuildCpuTimeUs: 10 WaitInputTimeUs: 844 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211526231 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:06.232631Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2845] 2024-11-21T17:52:06.232634Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1051:2846], 2024-11-21T17:52:06.232637Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1051:2846], 2024-11-21T17:52:06.232646Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2846], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 40 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 23 FinishTimeMs: 1732211526232 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 13 BuildCpuTimeUs: 10 WaitInputTimeUs: 888 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211526231 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:06.232649Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2846] 2024-11-21T17:52:06.232680Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:52:06.232686Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmjh19a09825jjbeskaa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWZmZTcxZjEtNzFlYmMxNTEtMjFkOTAxZTYtZDkxZDk1MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000602s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T17:52:03.489215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:03.489613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:03.489634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ac7/r3tmp/tmpXJXQBN/pdisk_1.dat 2024-11-21T17:52:03.592298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:03.609124Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:03.651214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:03.651241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:03.661737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:03.764882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:03.778171Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:03.778327Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:03.778393Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:52:03.778432Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:03.783952Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:03.784062Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:03.784078Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:03.784164Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:03.784181Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:03.784186Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:03.784252Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:03.786612Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:03.786653Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:03.786698Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:52:03.786702Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:03.786705Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:03.786708Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:03.786787Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:03.786791Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:03.786899Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:03.786911Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:03.786921Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:03.786924Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:03.786928Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:52:03.786932Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:03.786936Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:03.786940Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:52:03.786943Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:03.786945Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:03.786948Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:52:03.786952Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:03.786969Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:52:03.786972Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:03.786984Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:03.787022Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:52:03.787029Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:52:03.787040Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:52:03.787044Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:03.787046Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:52:03.787049Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:52:03.787052Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:03.787093Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:03.787097Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:52:03.787101Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:03.787105Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:03.787112Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:52:03.787114Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:03.787116Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:52:03.787118Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:03.787122Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:03.787308Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:52:03.787316Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:52:03.797564Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:03.797593Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:03.797597Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:03.797606Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:52:03.797615Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:03.971322Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:03.971338Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:03.971347Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:52:03.971366Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:52:03.971371Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:03.971390Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:03.971396Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:52:03.971400Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:52:03.971404Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:52:03.972027Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:52:03.972036Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:03.972126Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:03.972132Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:03.972137Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:03.972143Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:03.972146Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:52:03.972154Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1008:2807], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 33 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 6 FinishTimeMs: 1732211526280 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 1 BuildCpuTimeUs: 5 WaitInputTimeUs: 697 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211526279 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:06.280210Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1008:2807] 2024-11-21T17:52:06.280215Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2024-11-21T17:52:06.280218Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2024-11-21T17:52:06.280296Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1009:2808], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 93 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 71 FinishTimeMs: 1732211526280 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 63 BuildCpuTimeUs: 8 WaitInputTimeUs: 650 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211526279 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:06.280300Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1009:2808] 2024-11-21T17:52:06.280304Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T17:52:06.280307Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T17:52:06.280340Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1010:2809], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 103 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 78 FinishTimeMs: 1732211526280 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 74 BuildCpuTimeUs: 4 WaitInputTimeUs: 610 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211526279 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:06.280343Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1010:2809] 2024-11-21T17:52:06.280346Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T17:52:06.280348Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T17:52:06.280397Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1011:2810], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 98 DurationUs: 1000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 74 FinishTimeMs: 1732211526280 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 63 BuildCpuTimeUs: 11 WaitInputTimeUs: 784 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211526279 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:06.280400Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1011:2810] 2024-11-21T17:52:06.280403Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1012:2811], 2024-11-21T17:52:06.280406Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1012:2811], 2024-11-21T17:52:06.280425Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1012:2811], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 30 DurationUs: 1000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 18 FinishTimeMs: 1732211526280 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 10 WaitInputTimeUs: 934 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211526279 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:06.280427Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1012:2811] 2024-11-21T17:52:06.280430Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], 2024-11-21T17:52:06.280432Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1013:2812], 2024-11-21T17:52:06.280443Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1013:2812], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 41 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 18 FinishTimeMs: 1732211526280 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 10 BuildCpuTimeUs: 8 WaitInputTimeUs: 978 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211526279 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:06.280445Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1013:2812] 2024-11-21T17:52:06.280477Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:52:06.280482Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmjk22xyrv1qh32ccaeam, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg2MzdmOTgtMTA2MDBlMzktZWQ3NTRmMjQtNjJjYTA2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000587s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> YdbYqlClient::TestYqlIssues >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] Test command err: 2024-11-21T17:52:03.164043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:03.164431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:03.164449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ac9/r3tmp/tmpscfSNt/pdisk_1.dat 2024-11-21T17:52:03.259110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:03.275671Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:03.317993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:03.318030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:03.328551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:03.432000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:03.640841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:03.892649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:791:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:03.892667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:03.892673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:03.893273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:52:04.068895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:805:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:52:04.149288Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xmg9m3xtgk03khbc3mahe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM2N2QzYWMtZGU0ZjRkYzUtOTQ0YWUzNTUtM2Q3YzBjNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715661 TEvProposeTransaction 281474976715661 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 890 RawX2: 4294969940 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\tz\003\000\000\000\000\000\000\021T\n\000\000\001\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\002\002\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\001\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\014\n\010Database\022\000\222\001\021\n\006PoolId\022\007default\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CurrentExecutionId\022\000\222\001%\n\007TraceId\022\03201jd7xmg9m3xtgk03khbc3mahe\222\001Z\n\tSessionId\022Mydb://session/3?node_id=1&id=OTM2N2QzYWMtZGU0ZjRkYzUtOTQ0YWUzNTUtM2Q3YzBjNGE=\222\001\026\n\022CustomerSuppliedId\022\000\230\001\000\"\n\010\233\243\022\020\0020\000@\n" TxId: 281474976715661 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715661 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715661 OrderId: 281474976715661 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 13 ActorId { RawX1: 632 RawX2: 4294969833 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 101 } } 2024-11-21T17:52:04.172019Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xmgj26f76q6kb6qetb7s7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTkxMTIwNmUtODIwMDkwMDctOGM0MGI5MTgtOGEyZjQ4ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715662 TEvProposeTransaction 281474976715662 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 912 RawX2: 4294970013 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\220\003\000\000\000\000\000\000\021\235\n\000\000\001\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\004\004\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\002\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\026\n\022CustomerSuppliedId\022\000\222\001%\n\007TraceId\022\03201jd7xmgj26f76q6kb6qetb7s7\222\001\021\n\006PoolId\022\007default\222\001\014\n\010Database\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=1&id=OTkxMTIwNmUtODIwMDkwMDctOGM0MGI5MTgtOGEyZjQ4ZDg=\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CurrentExecutionId\022\000\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715662 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715662 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715662 OrderId: 281474976715662 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 13 ActorId { RawX1: 719 RawX2: 4294969895 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 93 } } 2024-11-21T17:52:04.479055Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xmgts7ageew4pfzqjdamd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTgxMTdiNzItNTljNDE5YzktNWRmOGJlMjctNDVlNjYxOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2024-11-21T17:52:04.828344Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xmh6m4c97tndbtq909avf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc1ZTc5NjMtOGFkZmZjZjctN2NhZTQ0OWYtZTBlNGEyMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1003 RawX2: 4294970093 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\353\003\000\000\000\000\000\000\021\355\n\000\000\001\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\006\006\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\003\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001%\n\007TraceId\022\03201jd7xmh6m4c97tndbtq909avf\222\001\014\n\010Database\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=1&id=ZTc1ZTc5NjMtOGFkZmZjZjctN2NhZTQ0OWYtZTBlNGEyMTM=\222\001\026\n\022CustomerSuppliedId\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\021\n\006PoolId\022\007default\222\001\026\n\022CurrentExecutionId\022\000\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715664 OrderId: 281474976715664 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 21 ActorId { RawX1: 632 RawX2: 4294969833 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 106 } } 2024-11-21T17:52:04.849686Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmh771hbnpst8sznfygpz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTgxMTdiNzItNTljNDE5YzktNWRmOGJlMjctNDVlNjYxOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:04.859280Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmh7k6vs5k5ykhd5kb6t8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTgxMTdiNzItNTljNDE5YzktNWRmOGJlMjctNDVlNjYxOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:04.866407Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTgxMTdiNzItNTljNDE5YzktNWRmOGJlMjctNDVlNjYxOTk=, ActorId: [1:930:2747], ActorState: ExecuteState, TraceId: 01jd7xmh7v78mygt0c709vsev6, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2024-11-21T17:52:04.877025Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmh7v78mygt0c709vsev6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTgxMTdiNzItNTljNDE5YzktNWRmOGJlMjctNDVlNjYxOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvProposeTransaction 281474976715667 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1052 RawX2: 4294970043 } TxBody: " \0018\001j3\010\001\032\'\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n" TxId: 281474976715667 ExecLevel: 0 Flags: 8 MvccSnapshot { Step: 4000 TxId: 18446744073709551615 } 2024-11-21T17:52:04.877235Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=MvccSnapshot without LockTxId is not implemented at tablet# 72075186224037888;tx_id=281474976715667; 2024-11-21T17:52:04.877242Z node 1 :TX_DATASHARD ERROR: MvccSnapshot without LockTxId is not implemented TEvProposeTransaction 281474976715667 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1052 RawX2: 4294970043 } TxBody: " \0018\001j3\010\001\032\'\n#\t\217\023\000\000\000\000\001\000\021\001\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0038\000 \003\"\006\020\0020\000@\n" TxId: 281474976715667 ExecLevel: 0 Flags: 8 MvccSnapshot { Step: 4000 TxId: 18446744073709551615 } EvWriteResult 281474976715667 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_BAD_REQUEST Issues { message: "MvccSnapshot without LockTxId is not implemented at tablet# 72075186224037888" } Origin: 72075186224037888 TxId: 281474976715667 2024-11-21T17:52:04.878267Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=YTgxMTdiNzItNTljNDE5YzktNWRmOGJlMjctNDVlNjYxOTk=, ActorId: [1:930:2747], ActorState: CleanupState, TraceId: 01jd7xmh7v78mygt0c709vsev6, Failed to cleanup:
: Error: Kikimr cluster or one of its subsystems was unavailable., code: 2005 2024-11-21T17:52:04.878479Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=MvccSnapshot without LockTxId is not implemented at tablet# 72075186224037889;tx_id=281474976715667; 2024-11-21T17:52:04.878487Z node 1 :TX_DATASHARD ERROR: MvccSnapshot without LockTxId is not implemented 2024-11-21T17:52:05.203628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:05.203660Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:52:05.203680Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ac9/r3tmp/tmpQAhoOA/pdisk_1.dat 2024-11-21T17:52:05.279003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:05.292575Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:05.334527Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:05.334561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:05.344986Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:05.448472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:05.654806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:05.908880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:05.908907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:801:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:05.908917Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:05.909722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:52:06.086254Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:804:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:52:06.125466Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xmj8mbafwnpw8zdfyrgxw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjczYWIxNTctNmE2OWE4MGUtOGNiN2Y0MzgtZDc2M2RkZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:06.143695Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xmjfs8n7k9asbgekypdx3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTk1ZGFmMDgtNzc4MTVmNmQtZGVhZGVkY2EtZmY1NGZmNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:06.444341Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xmjqz9zr6vneza6bpz5bd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTI5ZmQ1ZS00NjFiOTM3YS02OTJhMzIxNS0xMDM1ODMzNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2024-11-21T17:52:06.802737Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xmk4cannzwfr6bdfzebr8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTcwM2MzMTUtMzVhMTQwZmYtYjA0NmRjMjAtMmM1MWM2N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:06.820826Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmk4xfg2trnnpsyh030x6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTI5ZmQ1ZS00NjFiOTM3YS02OTJhMzIxNS0xMDM1ODMzNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:06.827862Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmk55bsxta52fth8pn0r2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTI5ZmQ1ZS00NjFiOTM3YS02OTJhMzIxNS0xMDM1ODMzNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:06.833769Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTI5ZmQ1ZS00NjFiOTM3YS02OTJhMzIxNS0xMDM1ODMzNA==, ActorId: [2:928:2745], ActorState: ExecuteState, TraceId: 01jd7xmk5cety8y17hqkk5k58f, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2024-11-21T17:52:06.844208Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmk5cety8y17hqkk5k58f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTI5ZmQ1ZS00NjFiOTM3YS02OTJhMzIxNS0xMDM1ODMzNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2024-11-21T17:52:06.479314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:06.479333Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:06.479348Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:06.481947Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:06.482073Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:06.482118Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:06.483003Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:06.491640Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:06.491766Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:06.491906Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:06.491921Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:06.491929Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:06.491971Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:06.495684Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:06.495729Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:06.495774Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:06.495778Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:06.495782Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:06.495786Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:06.495846Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.495852Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.495869Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:06.495881Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:06.495917Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:06.495922Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:06.495927Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:06.495930Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:06.495934Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:06.495937Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:06.495942Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:06.503055Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.503073Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.503080Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:06.503509Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:06.503533Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:06.503545Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:06.503567Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:06.503576Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:06.503583Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:06.503589Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:06.503593Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:06.503597Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:06.503601Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:06.503649Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:06.503654Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:06.503657Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:06.503661Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:06.503669Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:06.503672Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:06.503676Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:06.503679Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:06.503684Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:06.524584Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:06.524601Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:06.524607Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:06.524616Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:06.524626Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:06.524705Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.524711Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.524717Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:52:06.524732Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:06.524737Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:06.524771Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:06.524778Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:06.524783Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:06.524787Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:06.525428Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:06.525439Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:06.525472Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.525477Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.525483Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:06.525489Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:06.525493Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:06.525500Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:06.525505Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:06.525510Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:06.525514Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:06.525518Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:06.525522Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:06.525557Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:06.525561Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:06.525564Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:06.525568Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:06.525572Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:06.525581Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:06.525584Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:06.525588Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:06.525591Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:06.525599Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:06.525602Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:06.525605Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:06.525610Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:06.525613Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:06.525617Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... 2024-11-21T17:52:07.019586Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:07.019601Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:4] at 9437184 on unit CompleteOperation 2024-11-21T17:52:07.019615Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 4] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:07.019626Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2024-11-21T17:52:07.019632Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:07.019638Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:07.019641Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:07.019643Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:07.019646Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:07.019677Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:07.019680Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:6] at 9437184 on unit CompleteOperation 2024-11-21T17:52:07.019686Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 6] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:07.019690Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T17:52:07.019692Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:07.019695Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:07.019697Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:07.019699Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:07.019717Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:07.019719Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2024-11-21T17:52:07.019722Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:07.019725Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T17:52:07.019727Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:07.019756Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:07.019759Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2024-11-21T17:52:07.019763Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:07.019766Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T17:52:07.019769Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:07.019784Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:07.019786Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2024-11-21T17:52:07.019791Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:07.019794Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T17:52:07.019800Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:07.019802Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:07.019805Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:14] at 9437184 on unit FinishPropose 2024-11-21T17:52:07.019809Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2024-11-21T17:52:07.019822Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:07.019848Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:07.019850Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2024-11-21T17:52:07.019854Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:07.019857Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T17:52:07.019860Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:07.019871Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:07.019874Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2024-11-21T17:52:07.019878Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:07.019882Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T17:52:07.019885Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:07.019901Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:07.019904Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2024-11-21T17:52:07.019908Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:07.019910Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:07.019913Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:07.019925Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:07.019927Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2024-11-21T17:52:07.019932Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:07.019935Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T17:52:07.019937Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:07.019964Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2024-11-21T17:52:07.019968Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:07.019972Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2024-11-21T17:52:07.019982Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T17:52:07.019984Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:07.019986Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2024-11-21T17:52:07.019992Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T17:52:07.019993Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:07.019995Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2024-11-21T17:52:07.020000Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T17:52:07.020002Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:07.020010Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2024-11-21T17:52:07.020016Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T17:52:07.020018Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:07.020020Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2024-11-21T17:52:07.020024Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T17:52:07.020026Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:07.020028Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2024-11-21T17:52:07.020034Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T17:52:07.020036Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:07.020038Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2024-11-21T17:52:07.020042Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T17:52:07.020044Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:07.020046Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - >> TExportToS3WithRebootsTests::CancelShouldSucceedOnManyTables [GOOD] >> DataShardTxOrder::ZigZag_oo [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier-StreamLookup [GOOD] >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> YdbYqlClient::DeleteTableWithDeletedIndex >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier-StreamLookup [GOOD] Test command err: 2024-11-21T17:52:02.876739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:02.877146Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:02.877164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000acc/r3tmp/tmpKnOdb3/pdisk_1.dat 2024-11-21T17:52:02.971364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:02.987940Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:03.029998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:03.030030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:03.040482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:03.143891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:03.157424Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:03.157591Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:03.157652Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:52:03.157692Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:03.163422Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:03.163529Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:03.163545Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:03.163649Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:03.163669Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:03.163676Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:03.163714Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:03.166354Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:03.166406Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:03.166436Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:52:03.166440Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:03.166443Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:03.166446Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:03.166535Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:03.166542Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:03.166643Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:03.166657Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:03.166666Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:03.166669Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:03.166674Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:52:03.166679Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:03.166684Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:03.166688Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:52:03.166692Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:03.166695Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:03.166698Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:52:03.166702Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:03.166717Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:52:03.166720Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:03.166739Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:03.166789Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:52:03.166800Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:52:03.166818Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:52:03.166825Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:03.166832Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:52:03.166837Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:52:03.166841Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:03.166887Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:03.166890Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:52:03.166892Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:03.166895Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:03.166902Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:52:03.166904Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:03.166906Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:52:03.166908Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:03.166911Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:03.167128Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:52:03.167134Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:52:03.177343Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:03.177361Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:03.177366Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:03.177374Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:52:03.177384Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:03.350703Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:03.350722Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:03.350731Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:52:03.350750Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:52:03.350755Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:03.350789Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:03.350798Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:52:03.350803Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:52:03.350808Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:52:03.351354Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:52:03.351363Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:03.351452Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:03.351456Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:03.351461Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:03.351466Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:03.351469Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:52:03.351475Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 74976715672. Resolved key sets: 1 2024-11-21T17:52:07.332097Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T17:52:07.332115Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2024-11-21T17:52:07.332158Z node 2 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T17:52:07.332200Z node 2 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715672. Shard resolve complete, resolved shards: 1 2024-11-21T17:52:07.332208Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2024-11-21T17:52:07.332213Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 2: [72075186224037888] 2024-11-21T17:52:07.332220Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:07.332242Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T17:52:07.332314Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T17:52:07.332324Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1162:2911], 2024-11-21T17:52:07.332328Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1162:2911], 2024-11-21T17:52:07.332331Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T17:52:07.332429Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1162:2911], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2024-11-21T17:52:07.332433Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1162:2911], 2024-11-21T17:52:07.332436Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1162:2911], 2024-11-21T17:52:07.332508Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1164:2911], Recipient [2:1067:2852]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2024-11-21T17:52:07.332525Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:52:07.332533Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2002/281474976715665 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v2000/18446744073709551615 2024-11-21T17:52:07.332537Z node 2 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v4000/18446744073709551615 2024-11-21T17:52:07.332542Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2024-11-21T17:52:07.332554Z node 2 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:52:07.332557Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:52:07.332561Z node 2 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:07.332564Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:52:07.332573Z node 2 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2024-11-21T17:52:07.332576Z node 2 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:52:07.332578Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:07.332580Z node 2 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:52:07.332582Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:52:07.332590Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2024-11-21T17:52:07.332619Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1164:2911], 0} after executionsCount# 1 2024-11-21T17:52:07.332624Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1164:2911], 0} sends rowCount# 1, bytes# 24, quota rows left# 32766, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:52:07.332634Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1164:2911], 0} finished in read 2024-11-21T17:52:07.332641Z node 2 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:52:07.332643Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:52:07.332645Z node 2 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:52:07.332647Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:52:07.332654Z node 2 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T17:52:07.332656Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:52:07.332658Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2024-11-21T17:52:07.332661Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:52:07.332764Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1164:2911], Recipient [2:1067:2852]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:52:07.332768Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T17:52:07.332849Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1162:2911], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 159 Tasks { TaskId: 1 CpuTimeUs: 37 FinishTimeMs: 1732211527332 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 11 BuildCpuTimeUs: 26 WaitInputTimeUs: 241 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211527332 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:07.332854Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1162:2911] 2024-11-21T17:52:07.332878Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:52:07.332884Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2911] TxId: 281474976715672. Ctx: { TraceId: 01jd7xmkmt66g6p5t032jhedcj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODFiZGM4ZWEtNTRmN2M2NjMtYzI3YTcwOGItOTk5NjUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000159s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } } >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate |83.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate+StreamLookup [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead >> YdbYqlClient::TestYqlIssues [GOOD] >> YdbYqlClient::TestYqlSessionClosed |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut |83.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |83.6%| [LD] {RESULT} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2024-11-21T17:51:53.706195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:53.706225Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:53.706246Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:51:53.709567Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:51:53.709726Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:51:53.709791Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:53.710762Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:51:53.719092Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:53.719297Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:53.719445Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:51:53.719464Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:51:53.719470Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:51:53.719519Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:53.722895Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:51:53.723006Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:53.723074Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:51:53.723080Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:51:53.723084Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:51:53.723090Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:53.723197Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:53.723205Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:53.723232Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:51:53.723258Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:51:53.723328Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:53.723336Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:53.723343Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:51:53.723348Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:51:53.723353Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:51:53.723358Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:51:53.723364Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:53.734062Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:53.734086Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:53.734095Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:51:53.734398Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:51:53.734405Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:51:53.734428Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:51:53.734456Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:51:53.734465Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:51:53.734472Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:51:53.734479Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:53.734482Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:51:53.734486Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:51:53.734490Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:53.734559Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:51:53.734562Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:51:53.734564Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:51:53.734566Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:53.734575Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:51:53.734577Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:51:53.734580Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:51:53.734581Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:53.734585Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:51:53.755679Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:51:53.755705Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:53.755712Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:53.755722Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:51:53.755735Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:53.755837Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:53.755843Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:53.755849Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:51:53.755866Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:51:53.755871Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:51:53.755914Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:53.755920Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:53.755924Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:51:53.755928Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:51:53.756565Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:51:53.756575Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:53.756615Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:53.756620Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:53.756625Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:53.756633Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:53.756636Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:51:53.756644Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:51:53.756648Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:51:53.756654Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:53.756657Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:51:53.756661Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:51:53.756664Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:51:53.756714Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:51:53.756717Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:53.756720Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:51:53.756723Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:51:53.756728Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:51:53.756737Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:53.756740Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:51:53.756743Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:51:53.756746Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:51:53.756757Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:51:53.756760Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:51:53.756763Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:51:53.756768Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:53.756771Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:51:53.756774Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... aitInRS 2024-11-21T17:52:07.261308Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:07.261311Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2024-11-21T17:52:07.261314Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2024-11-21T17:52:07.261319Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2024-11-21T17:52:07.261397Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2024-11-21T17:52:07.261406Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:52:07.261416Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:07.261420Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2024-11-21T17:52:07.261424Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2024-11-21T17:52:07.261428Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T17:52:07.261484Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2024-11-21T17:52:07.261489Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2024-11-21T17:52:07.261492Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2024-11-21T17:52:07.261496Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2024-11-21T17:52:07.261502Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:07.261505Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2024-11-21T17:52:07.261509Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2024-11-21T17:52:07.261513Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:07.261516Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T17:52:07.261520Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T17:52:07.261523Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T17:52:07.261567Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [6:228:2223], Recipient [6:228:2223]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:07.261573Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:07.261580Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:07.261583Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:07.261586Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:07.261594Z node 6 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2024-11-21T17:52:07.261597Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2024-11-21T17:52:07.261614Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:07.261618Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:07.261621Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:07.261627Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:07.261752Z node 6 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2024-11-21T17:52:07.261759Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:07.261762Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:07.261766Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2024-11-21T17:52:07.261770Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2024-11-21T17:52:07.261774Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:07.261777Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2024-11-21T17:52:07.261781Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:07.261785Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:07.261799Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2024-11-21T17:52:07.261803Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2024-11-21T17:52:07.261806Z node 6 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2024-11-21T17:52:07.261811Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:07.261814Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:07.261818Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2024-11-21T17:52:07.261823Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2024-11-21T17:52:07.261830Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:07.261834Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2024-11-21T17:52:07.261837Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2024-11-21T17:52:07.261842Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2024-11-21T17:52:07.261847Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:07.261850Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2024-11-21T17:52:07.261854Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2024-11-21T17:52:07.261860Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2024-11-21T17:52:07.261865Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:07.261868Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T17:52:07.261871Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T17:52:07.261876Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2024-11-21T17:52:07.261880Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:07.261883Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T17:52:07.261886Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:52:07.261899Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2024-11-21T17:52:07.261958Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2024-11-21T17:52:07.261965Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:52:07.261983Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:07.261987Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:52:07.261990Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2024-11-21T17:52:07.261994Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T17:52:07.262079Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2024-11-21T17:52:07.262083Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2024-11-21T17:52:07.262087Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2024-11-21T17:52:07.262090Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2024-11-21T17:52:07.262095Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:07.262098Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2024-11-21T17:52:07.262102Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2024-11-21T17:52:07.262106Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:07.262109Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:07.262112Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:07.262115Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:07.273250Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2024-11-21T17:52:07.273272Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2024-11-21T17:52:07.273298Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:07.273305Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T17:52:07.273325Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:07.273334Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:07.273401Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2024-11-21T17:52:07.273405Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2024-11-21T17:52:07.273413Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T17:52:07.273418Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T17:52:07.273426Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:07.273430Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup-EvWrite >> DataShardOutOfOrder::TestOutOfOrderLockLost+StreamLookup [GOOD] >> DataShardOutOfOrder::TestOutOfOrderLockLost-StreamLookup >> BasicUsage::BrokenCredentialsProvider [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW |83.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> YdbYqlClient::DeleteTableWithDeletedIndex [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup-EvWrite >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] >> YdbYqlClient::TestYqlSessionClosed [GOOD] >> YdbYqlClient::TestYqlLongSessionPrepareError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2024-11-21T17:51:44.556119Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1732211504556109 2024-11-21T17:51:44.753034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791761146887547:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:44.772806Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791760079981897:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00185c/r3tmp/tmpEnW70H/pdisk_1.dat 2024-11-21T17:51:44.824937Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:44.826059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:44.832474Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:44.834055Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:44.921794Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:44.931912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:44.931946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:44.932611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:44.932626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:44.933121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:44.935654Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:44.935998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63790, node 1 2024-11-21T17:51:44.969379Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/00185c/r3tmp/yandexd3KiQ5.tmp 2024-11-21T17:51:44.969398Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/00185c/r3tmp/yandexd3KiQ5.tmp 2024-11-21T17:51:44.974412Z INFO: TTestServer started on Port 26016 GrpcPort 63790 2024-11-21T17:51:44.974386Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/00185c/r3tmp/yandexd3KiQ5.tmp 2024-11-21T17:51:44.974473Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26016 PQClient connected to localhost:63790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:44.998581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:45.005306Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:51:45.185388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791764374949309:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:45.185421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:45.185530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791764374949339:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:45.186863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T17:51:45.193603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791764374949341:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T17:51:45.247252Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791765441855701:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:45.247352Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJlNzIyNDUtNTFkZDhmN2ItODQ3NzhkZjEtODcxZDNiMjk=, ActorId: [1:7439791765441855649:2297], ActorState: ExecuteState, TraceId: 01jd7xky1xd0z3t90qc7qk95fh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:45.247799Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:45.248387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:51:45.308130Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791764374949376:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:45.308392Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGRiNzkyMDktNjY4NGNiNjctYTZkMDhhYTctZjMxZjc0YjQ=, ActorId: [2:7439791764374949307:2277], ActorState: ExecuteState, TraceId: 01jd7xky0y0ybtsdm1vydx834t, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:45.308568Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:45.324148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:45.457094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:63790", true, true, 1000); 2024-11-21T17:51:45.539510Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xkyb1cpgmnq6j3ra8w3q1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2IwNDY5OTItMTlkNTM0NDYtZGJkM2RhNzktNWY5OGQzZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439791765441856118:2936] 2024-11-21T17:51:49.752415Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791761146887547:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:49.752453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:51:49.772733Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439791760079981897:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:49.772771Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:51.588768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:63790 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:51:51.616907Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:63790 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 Consum ... 03261Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T17:52:08.203399Z node 5 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:52:08.203414Z node 5 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T17:52:08.203509Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T17:52:08.203533Z node 5 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:50978 2024-11-21T17:52:08.203544Z node 5 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:50978 proto=v1 topic=test-topic durationSec=0 2024-11-21T17:52:08.203547Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:52:08.203907Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T17:52:08.203946Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:52:08.203948Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:52:08.203949Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:52:08.203954Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439791864722086508:2464] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:52:08.204420Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439791864722086508:2464] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:52:08.218350Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439791864722086508:2464] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T17:52:08.218434Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439791864722086538:2464] connected; active server actors: 1 2024-11-21T17:52:08.218458Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439791864722086508:2464] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T17:52:08.218464Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439791864722086508:2464] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T17:52:08.218502Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439791864722086538:2464] disconnected; active server actors: 1 2024-11-21T17:52:08.218507Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439791864722086538:2464] disconnected no session 2024-11-21T17:52:08.231969Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439791864722086508:2464] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:52:08.231988Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439791864722086508:2464] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T17:52:08.231991Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439791864722086508:2464] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T17:52:08.232000Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:52:08.232265Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:08.232285Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7439791864722086558:2464], now have 1 active actors on pipe 2024-11-21T17:52:08.232320Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2024-11-21T17:52:08.232409Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:52:08.232421Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:52:08.232457Z node 6 :PERSQUEUE INFO: new Cookie src|9e76183-a55ba5b6-56d22a89-b7304c0a_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T17:52:08.232492Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:52:08.232534Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:52:08.232782Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:52:08.232792Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:52:08.232828Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:52:08.232940Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|9e76183-a55ba5b6-56d22a89-b7304c0a_0 2024-11-21T17:52:08.233315Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211528233 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:52:08.233356Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|9e76183-a55ba5b6-56d22a89-b7304c0a_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T17:52:08.233549Z :INFO: [] MessageGroupId [src] SessionId [src|9e76183-a55ba5b6-56d22a89-b7304c0a_0] Write session: close. Timeout = 0 ms 2024-11-21T17:52:08.233564Z :INFO: [] MessageGroupId [src] SessionId [src|9e76183-a55ba5b6-56d22a89-b7304c0a_0] Write session will now close 2024-11-21T17:52:08.233571Z :DEBUG: [] MessageGroupId [src] SessionId [src|9e76183-a55ba5b6-56d22a89-b7304c0a_0] Write session: aborting 2024-11-21T17:52:08.233885Z :INFO: [] MessageGroupId [src] SessionId [src|9e76183-a55ba5b6-56d22a89-b7304c0a_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:52:08.233888Z :DEBUG: [] MessageGroupId [src] SessionId [src|9e76183-a55ba5b6-56d22a89-b7304c0a_0] Write session is aborting and will not restart 2024-11-21T17:52:08.233936Z :DEBUG: [] MessageGroupId [src] SessionId [src|9e76183-a55ba5b6-56d22a89-b7304c0a_0] Write session: destroy 2024-11-21T17:52:08.233933Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|9e76183-a55ba5b6-56d22a89-b7304c0a_0 grpc read done: success: 0 data: 2024-11-21T17:52:08.233944Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|9e76183-a55ba5b6-56d22a89-b7304c0a_0 grpc read failed 2024-11-21T17:52:08.233950Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|9e76183-a55ba5b6-56d22a89-b7304c0a_0 grpc closed 2024-11-21T17:52:08.233955Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|9e76183-a55ba5b6-56d22a89-b7304c0a_0 is DEAD 2024-11-21T17:52:08.234229Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:52:08.234341Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:52:08.234359Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439791864722086558:2464] destroyed 2024-11-21T17:52:08.234371Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:52:08.236276Z :INFO: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] Starting read session 2024-11-21T17:52:08.236291Z :DEBUG: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] Starting session to cluster null (localhost:25405) 2024-11-21T17:52:08.236579Z :DEBUG: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:08.236583Z :DEBUG: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:08.236586Z :DEBUG: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] [null] Reconnecting session to cluster null in 0.000000s 2024-11-21T17:52:08.236638Z :ERROR: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2024-11-21T17:52:08.236644Z :DEBUG: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:08.236646Z :DEBUG: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:08.236657Z :INFO: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2024-11-21T17:52:08.236704Z :NOTICE: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:52:08.236709Z :DEBUG: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2024-11-21T17:52:08.236718Z :INFO: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] Closing read session. Close timeout: 0.000000s 2024-11-21T17:52:08.236723Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T17:52:08.236728Z :INFO: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:52:08.236734Z :NOTICE: [/Root] [/Root] [ab126421-2b1f53f6-4c573440-d004e7a8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup-EvWrite >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail+StreamLookup [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail-StreamLookup >> TPersQueueTest::ReadFromSeveralPartitions [GOOD] >> TPersQueueTest::Init ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: 2024-11-21T17:52:06.609508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:06.609858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:06.609873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ac1/r3tmp/tmpHDkzsF/pdisk_1.dat 2024-11-21T17:52:06.699711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:06.715681Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:06.757310Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:52:06.757491Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:52:06.757524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:06.757536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:06.767939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:06.870698Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:52:06.870714Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:52:06.870741Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T17:52:06.877244Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:52:06.877435Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:52:06.877447Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:52:06.877492Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:52:06.877530Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:52:06.877540Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:52:06.877583Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:52:06.877920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:06.878117Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:52:06.878126Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:52:06.891173Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:06.891331Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:06.891392Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:638:2540] 2024-11-21T17:52:06.891427Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:06.892499Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:06.897176Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:06.897371Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:06.897396Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:06.897474Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:06.897489Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:06.897493Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:06.897521Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:06.899717Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:06.899757Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:06.899784Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:666:2557] 2024-11-21T17:52:06.899788Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:06.899791Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:06.899793Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:06.899808Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:06.899885Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:641:2542] 2024-11-21T17:52:06.899908Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:06.900436Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:06.900605Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:638:2540], Recipient [1:638:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.900609Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.900661Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:06.900673Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:06.900755Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:06.900760Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:06.900764Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:52:06.900768Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:06.900771Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:06.900774Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:52:06.900778Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:06.900794Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:653:2548], Recipient [1:638:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.900796Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.900800Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:630:2536], serverId# [1:653:2548], sessionId# [0:0:0] 2024-11-21T17:52:06.900868Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:653:2548] 2024-11-21T17:52:06.900871Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:06.900885Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:06.900923Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:52:06.900931Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:52:06.900941Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:52:06.900945Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:06.900948Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:52:06.900951Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:52:06.900953Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:06.900982Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:06.900985Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:52:06.900988Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:06.900997Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:06.901005Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:52:06.901008Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:06.901011Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:52:06.901014Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:06.901018Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:06.901101Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:06.901119Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:06.901183Z node 1 :TX_DATASHARD DEBUG: LoadChangeRec ... 2024-11-21T17:52:08.891993Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:52:08.892000Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T17:52:08.892014Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715660] at 72075186224037888 2024-11-21T17:52:08.892018Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2024-11-21T17:52:08.892020Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:08.892022Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T17:52:08.892025Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T17:52:08.892030Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T17:52:08.892039Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715660] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191936 2024-11-21T17:52:08.892084Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T17:52:08.892093Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:08.892095Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T17:52:08.892097Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:08.892100Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:08.892115Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:08.892117Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:08.892120Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:52:08.892122Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:52:08.892129Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2024-11-21T17:52:08.892131Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:52:08.892134Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715660] at 72075186224037888 has finished 2024-11-21T17:52:08.902377Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:08.902396Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715660] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:08.902406Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715660 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T17:52:08.902429Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:08.902956Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Handle TEvProposeTransaction 2024-11-21T17:52:08.902968Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] TxId# 281474976715661 ProcessProposeTransaction 2024-11-21T17:52:08.902979Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:819:2656] DataReq marker# P0 2024-11-21T17:52:08.902997Z node 2 :TX_PROXY DEBUG: Actor# [2:819:2656] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2024-11-21T17:52:08.903050Z node 2 :TX_PROXY DEBUG: Actor# [2:819:2656] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2024-11-21T17:52:08.903085Z node 2 :TX_PROXY DEBUG: Actor# [2:819:2656] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2024-11-21T17:52:08.903104Z node 2 :TX_PROXY DEBUG: Actor# [2:819:2656] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2024-11-21T17:52:08.903158Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:819:2656], Recipient [2:630:2536]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 819 RawX2: 8589937248 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t3\003\000\000\000\000\000\000\021`\n\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2024-11-21T17:52:08.903163Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:08.903183Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:08.903211Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2024-11-21T17:52:08.903220Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2024-11-21T17:52:08.903227Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2024-11-21T17:52:08.903229Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T17:52:08.903233Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:08.903235Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:52:08.903242Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T17:52:08.903253Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2024-11-21T17:52:08.903256Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2024-11-21T17:52:08.903258Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:08.903261Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2024-11-21T17:52:08.903263Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2024-11-21T17:52:08.903267Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2024-11-21T17:52:08.903269Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2024-11-21T17:52:08.903271Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2024-11-21T17:52:08.903273Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2024-11-21T17:52:08.903279Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:819:2656] for [0:281474976715661] at 72075186224037888 2024-11-21T17:52:08.903283Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2024-11-21T17:52:08.903291Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:08.903303Z node 2 :TX_PROXY DEBUG: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2024-11-21T17:52:08.903311Z node 2 :TX_PROXY DEBUG: Collected all clerance requests, txid: 281474976715661 2024-11-21T17:52:08.903314Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2024-11-21T17:52:08.903328Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:819:2656], Recipient [2:630:2536]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2024-11-21T17:52:08.903332Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2024-11-21T17:52:08.903343Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:819:2656], Recipient [2:630:2536]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2024-11-21T17:52:08.903345Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2024-11-21T17:52:08.903354Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:630:2536], Recipient [2:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:08.903356Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:08.903362Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:08.903366Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:52:08.903370Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2024-11-21T17:52:08.903373Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2024-11-21T17:52:08.903377Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715661] at 72075186224037888 2024-11-21T17:52:08.903380Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2024-11-21T17:52:08.903382Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2024-11-21T17:52:08.903384Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2024-11-21T17:52:08.903386Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2024-11-21T17:52:08.903428Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2024-11-21T17:52:08.903433Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:52:08.903437Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:52:08.903442Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:08.903447Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:08.903455Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:08.903546Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:825:2661], Recipient [2:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T17:52:08.903551Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor >> TopicService::OneConsumer_TheRangesOverlap [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> YdbYqlClient::CreateTableWithUniformPartitions [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2024-11-21T17:52:06.394160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:06.394180Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:06.394196Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:06.396823Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:06.396952Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:06.397001Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:06.397897Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:06.406315Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:06.406440Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:06.406577Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:06.406595Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:06.406605Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:06.406644Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:06.410645Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:06.410710Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:06.410759Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:06.410765Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:06.410770Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:06.410776Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:06.410869Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.410877Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.410899Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:06.410916Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:06.410958Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:06.410965Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:06.410971Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:06.410978Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:06.410982Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:06.410987Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:06.410992Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:06.419838Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.419862Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.419870Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:06.420358Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:06.420374Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:06.420399Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:06.420428Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:06.420439Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:06.420446Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:06.420454Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:06.420459Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:06.420463Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:06.420467Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:06.420531Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:06.420536Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:06.420540Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:06.420543Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:06.420553Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:06.420557Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:06.420561Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:06.420564Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:06.420569Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:06.441478Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:06.441504Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:06.441511Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:06.441522Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:06.441536Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:06.441632Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.441640Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.441647Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:52:06.441662Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:06.441667Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:06.441703Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:06.441729Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:06.441733Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:06.441737Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:06.442432Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:06.442449Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:06.442499Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.442505Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.442511Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:06.442518Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:06.442521Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:06.442529Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:06.442533Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:06.442539Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:06.442543Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:06.442546Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:06.442550Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:06.442587Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:06.442591Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:06.442594Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:06.442597Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:06.442600Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:06.442610Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:06.442614Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:06.442617Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:06.442620Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:06.442631Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:06.442635Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:06.442638Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:06.442643Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:06.442646Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:06.442649Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... BUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:08.845268Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:52:08.845302Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2024-11-21T17:52:08.845307Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845311Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2024-11-21T17:52:08.845327Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2024-11-21T17:52:08.845329Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845331Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2024-11-21T17:52:08.845340Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2024-11-21T17:52:08.845342Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845344Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2024-11-21T17:52:08.845350Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2024-11-21T17:52:08.845352Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845354Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2024-11-21T17:52:08.845361Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T17:52:08.845362Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845364Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T17:52:08.845372Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T17:52:08.845374Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845376Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T17:52:08.845380Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2024-11-21T17:52:08.845382Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845384Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2024-11-21T17:52:08.845393Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T17:52:08.845396Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845397Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T17:52:08.845403Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T17:52:08.845404Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845406Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2024-11-21T17:52:08.845414Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2024-11-21T17:52:08.845416Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845418Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2024-11-21T17:52:08.845425Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T17:52:08.845427Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845429Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T17:52:08.845435Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T17:52:08.845437Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845438Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T17:52:08.845446Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T17:52:08.845448Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845450Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T17:52:08.845458Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:08.845461Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2024-11-21T17:52:08.845467Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T17:52:08.845471Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T17:52:08.845473Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:08.845488Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:08.845490Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2024-11-21T17:52:08.845495Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T17:52:08.845499Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T17:52:08.845502Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:08.845518Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:08.845520Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2024-11-21T17:52:08.845525Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T17:52:08.845528Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T17:52:08.845530Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:08.845543Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:08.845545Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2024-11-21T17:52:08.845550Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T17:52:08.845553Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T17:52:08.845555Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:08.845790Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T17:52:08.845793Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845795Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2024-11-21T17:52:08.845806Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T17:52:08.845808Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845810Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T17:52:08.845818Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T17:52:08.845820Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845822Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T17:52:08.845829Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T17:52:08.845831Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:08.845832Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> YdbYqlClient::TestYqlLongSessionPrepareError [GOOD] >> YdbYqlClient::TestYqlLongSessionMultipleErrors >> DataShardOutOfOrder::UncommittedReadSetAck >> TopicService::DifferentConsumers_TheRangesOverlap >> TPersQueueTest::TopicServiceCommitOffset [GOOD] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets >> TPersQueueTest::ReadFromSeveralPartitionsMigrated [GOOD] >> TPersQueueTest::SchemeshardRestart >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: 2024-11-21T17:52:06.641244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:06.641602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:06.641617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ab4/r3tmp/tmpAfE2Vq/pdisk_1.dat 2024-11-21T17:52:06.732213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:06.747221Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:06.789080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:06.789119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:06.799514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:06.902680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.116098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.367468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:884:2699], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:07.367488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:894:2704], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:07.367494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:07.368041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:52:07.543595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:898:2707], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:52:07.611287Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xmkp74hm5c54pfafh1ncx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThkOTA5MDItYTI5MGNkMTUtNmRlZmIzOGMtNzQ2MjI0MWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:07.630625Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xmky7c33jsg239gnkgyj0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU1MWY4NTktNDQyYzAzYWItZTRiMjcxMC0xMTVmZjBjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:07.679007Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xmkyvcm211v2b3er5eejy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFjNzI5NWEtYjcyMTUxZjctMmU4YmMyNjctYWIxNThkMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2024-11-21T17:52:07.693440Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xmm0209z7qyddywkapj87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFjNzI5NWEtYjcyMTUxZjctMmU4YmMyNjctYWIxNThkMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2024-11-21T17:52:07.714240Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmm0sab5dvvnr1pjgy9w0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q0ODUxZTYtNWVkYTQzNWMtMjU3ZjE3Y2EtZGVlOWI1MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2024-11-21T17:52:07.714814Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1128:2804] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmm0sab5dvvnr1pjgy9w0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q0ODUxZTYtNWVkYTQzNWMtMjU3ZjE3Y2EtZGVlOWI1MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting immediate tx 281474976715665 because datashard 72075186224037889 is restarting; 2024-11-21T17:52:07.716735Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2Q0ODUxZTYtNWVkYTQzNWMtMjU3ZjE3Y2EtZGVlOWI1MDA=, ActorId: [1:1028:2804], ActorState: ExecuteState, TraceId: 01jd7xmm0sab5dvvnr1pjgy9w0, Create QueryResponse for error on request, msg: 2024-11-21T17:52:07.717560Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmFjNzI5NWEtYjcyMTUxZjctMmU4YmMyNjctYWIxNThkMWU=, ActorId: [1:1030:2806], ActorState: ExecuteState, TraceId: 01jd7xmm0209z7qyddywkapj87, Create QueryResponse for error on request, msg: 2024-11-21T17:52:07.717749Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmm0sab5dvvnr1pjgy9w0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q0ODUxZTYtNWVkYTQzNWMtMjU3ZjE3Y2EtZGVlOWI1MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:07.728348Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmm0209z7qyddywkapj87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFjNzI5NWEtYjcyMTUxZjctMmU4YmMyNjctYWIxNThkMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:07.882417Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd7xmm5t6dmpw50x8sm6wt8x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWI4ZDg1YjEtYTZlZmQ5NjUtNTMzNmZlMDEtNTlhNWI4MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2024-11-21T17:52:08.265411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:08.265443Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:52:08.265462Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ab4/r3tmp/tmpnyAfFl/pdisk_1.dat 2024-11-21T17:52:08.341690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.355529Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:08.397757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:08.397794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:08.408415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:08.512870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.720389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.973122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:792:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:08.973140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:803:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:08.973149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:08.973767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:52:09.149948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:806:2655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:52:09.190715Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xmn8cfrw05x5v157h26ht, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzg0OWY5NTktM2RiYWI2M2MtNjlmYTY0N2QtNTY5NTNhMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:09.209862Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xmnfjcar5bfx0yy9zxbwa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmQ0NDM2NDUtMTdlOGFlYmQtNmIyMjMxMzEtZDg1OTQwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for readsets 2024-11-21T17:52:09.616245Z node 2 :KQP_COMPUTE WARN: TxId: 281474976715664, task: 1, CA Id [2:964:2776]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2024-11-21T17:52:09.616527Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjQ1M2Q4ZGQtOTI0Njc0YWQtZDkxNDZiOTEtOTZhYjliMTE=, ActorId: [2:921:2738], ActorState: ExecuteState, TraceId: 01jd7xmng50vr28d6z2famjakm, Create QueryResponse for error on request, msg: { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> DataShardOutOfOrder::TestOutOfOrderLockLost-StreamLookup [GOOD] >> TxOrderInternals::OperationOrder [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup+EvWrite >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderLockLost-StreamLookup [GOOD] Test command err: 2024-11-21T17:52:06.818374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:06.818745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:06.818769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ab3/r3tmp/tmpZx8U8P/pdisk_1.dat 2024-11-21T17:52:06.910830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:06.927196Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:06.969463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:06.969487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:06.979901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:07.083554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.096826Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:07.097024Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:07.097093Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:52:07.097128Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:07.102234Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:07.102343Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:07.102358Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:07.102449Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:07.102465Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:07.102470Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:07.102505Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:07.104727Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:07.104770Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:07.104798Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:52:07.104801Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:07.104804Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:07.104807Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:07.104884Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:07.104888Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:07.104976Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:07.104987Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:07.104995Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:07.104997Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:07.105001Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:52:07.105006Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:07.105010Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:07.105014Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:52:07.105018Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:07.105021Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:07.105024Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:52:07.105027Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:07.105042Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:52:07.105044Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:07.105057Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:07.105093Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:52:07.105101Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:52:07.105112Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:52:07.105116Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:07.105118Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:52:07.105122Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:52:07.105124Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:07.105163Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:07.105166Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:52:07.105167Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:07.105169Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:07.105175Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:52:07.105177Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:07.105179Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:52:07.105181Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:07.105184Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:07.105361Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:52:07.105365Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:52:07.115553Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:07.115568Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:07.115572Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:07.115579Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:52:07.115587Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:07.288729Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:07.288745Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:07.288751Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:52:07.288770Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:52:07.288775Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:07.288807Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:07.288812Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:52:07.288816Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:52:07.288819Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:52:07.289436Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:52:07.289447Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:07.289545Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:07.289550Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:07.289554Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:07.289558Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:07.289561Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:52:07.289566Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 6715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1047:2844], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 150 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 123 FinishTimeMs: 1732211530147 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 111 BuildCpuTimeUs: 12 WaitInputTimeUs: 637 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211530146 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:10.147864Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1047:2844] 2024-11-21T17:52:10.147868Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1049:2846], CA [2:1046:2843], CA [2:1050:2847], CA [2:1051:2848], CA [2:1048:2845], 2024-11-21T17:52:10.147871Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1049:2846], CA [2:1046:2843], CA [2:1050:2847], CA [2:1051:2848], CA [2:1048:2845], 2024-11-21T17:52:10.147957Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1046:2843], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 36 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 6 FinishTimeMs: 1732211530147 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 1 BuildCpuTimeUs: 5 WaitInputTimeUs: 767 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211530146 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:10.147965Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1046:2843] 2024-11-21T17:52:10.147972Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1049:2846], CA [2:1050:2847], CA [2:1051:2848], CA [2:1048:2845], 2024-11-21T17:52:10.147975Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1049:2846], CA [2:1050:2847], CA [2:1051:2848], CA [2:1048:2845], 2024-11-21T17:52:10.148061Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1048:2845], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 103 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 86 FinishTimeMs: 1732211530147 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 82 BuildCpuTimeUs: 4 WaitInputTimeUs: 675 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211530146 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:10.148065Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1048:2845] 2024-11-21T17:52:10.148068Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1049:2846], CA [2:1050:2847], CA [2:1051:2848], 2024-11-21T17:52:10.148070Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1049:2846], CA [2:1050:2847], CA [2:1051:2848], 2024-11-21T17:52:10.148090Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1049:2846], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 114 DurationUs: 2000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 95 FinishTimeMs: 1732211530148 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 77 BuildCpuTimeUs: 18 WaitInputTimeUs: 774 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211530146 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:10.148094Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1049:2846] 2024-11-21T17:52:10.148097Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2847], CA [2:1051:2848], 2024-11-21T17:52:10.148100Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1050:2847], CA [2:1051:2848], 2024-11-21T17:52:10.148121Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2847], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 44 DurationUs: 2000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 24 FinishTimeMs: 1732211530148 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 16 WaitInputTimeUs: 912 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211530146 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:10.148123Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2847] 2024-11-21T17:52:10.148126Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1051:2848], 2024-11-21T17:52:10.148128Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1051:2848], 2024-11-21T17:52:10.148137Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2848], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 45 DurationUs: 2000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 30 FinishTimeMs: 1732211530148 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 22 BuildCpuTimeUs: 8 WaitInputTimeUs: 924 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211530146 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:10.148140Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2848] 2024-11-21T17:52:10.148168Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:52:10.148174Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2825] TxId: 281474976715666. Ctx: { TraceId: 01jd7xmpbr58ge4y50ackb6hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcxNjgxMS04ZTkyMTg1Ny1kNzA1Zjk5Yi1hNmMzNGE2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000704s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> DemoTx::Scenario_2 [GOOD] >> Secret::Validation [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] >> DataShardTxOrder::DelayData [GOOD] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup+EvWrite >> DataShardTxOrder::ImmediateBetweenOnline_Init >> VectorIndexBuildTest::BaseCase [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: 2024-11-21T17:52:06.644757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:06.645072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:06.645088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000abc/r3tmp/tmpja00nK/pdisk_1.dat 2024-11-21T17:52:06.736314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:06.751222Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:06.793022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:06.793045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:06.803445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:06.906284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.115109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.368031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:791:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:07.368070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:07.368076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:07.368820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:52:07.545046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:805:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:52:07.614409Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xmkp7atd3jwaymajwdv32, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTlmOGVmMWYtYTU4OWI0YzgtZGE0N2E3ZjMtNTAxMDc2NWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:07.634909Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xmkya390dyew7sns2vpbb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjZkMDQ0ZmItODBiNDIzMjItNzlkMGFmZTgtYmZmMTEzNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the first select 2024-11-21T17:52:07.946818Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xmm708jwq2qxwymxzasj4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgyMmMwZjctNjcxNDNlMWYtNTE0OWMzOTktNTIzYjUzMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2024-11-21T17:52:07.985752Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xmm8f5nvd0764d2vkmmzp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc2ZjYzMDUtYTM3YjBjOTctYWE0ZTUwZTUtMTA1NDMwNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets 2024-11-21T17:52:08.000008Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmm9mayvtrkpb92wbshpb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc2ZjYzMDUtYTM3YjBjOTctYWE0ZTUwZTUtMTA1NDMwNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ... performing an upsert 2024-11-21T17:52:08.382499Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmmnnddkx53zq5sk2pgj6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGUxYmQyYTYtODIzYzc2Yy1kMDczZDI1My0xMzY4NTZlNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the second select 2024-11-21T17:52:08.404693Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmmpaaztgz0awyd5x776r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgyMmMwZjctNjcxNDNlMWYtNTE0OWMzOTktNTIzYjUzMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the third select 2024-11-21T17:52:08.416472Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd7xmmppfwv8fcswhxrgp6c1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgyMmMwZjctNjcxNDNlMWYtNTE0OWMzOTktNTIzYjUzMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the last upsert and commit 2024-11-21T17:52:08.425843Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDgyMmMwZjctNjcxNDNlMWYtNTE0OWMzOTktNTIzYjUzMjI=, ActorId: [1:930:2747], ActorState: ExecuteState, TraceId: 01jd7xmmq167zb2ydqxx6rwy0e, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2024-11-21T17:52:08.436516Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd7xmmq167zb2ydqxx6rwy0e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgyMmMwZjctNjcxNDNlMWYtNTE0OWMzOTktNTIzYjUzMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:08.867035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:08.867066Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:52:08.867082Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000abc/r3tmp/tmprwgHUC/pdisk_1.dat 2024-11-21T17:52:08.940272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.953349Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:08.995064Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:08.995103Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:09.005614Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:09.108810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:09.120483Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:622:2530], Recipient [2:630:2536]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:09.120667Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:622:2530], Recipient [2:630:2536]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:09.120751Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:630:2536] 2024-11-21T17:52:09.120782Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:09.127142Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:622:2530], Recipient [2:630:2536]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:09.127266Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:09.127282Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:09.127391Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:09.127397Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:09.127402Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:09.127434Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:09.127441Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:09.127455Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:09.127464Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:647:2545] 2024-11-21T17:52:09.127467Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:09.127470Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:09.127473Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:09.127517Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:630:2536], Recipient [2:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:09.127520Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:09.127603Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:09.127611Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:09.127618Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:638:2540], Recipient [2:630:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:09.127622Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:09.127626Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:628:2534], serverId# [2:638:2540], sessionId# [0:0:0] 2024-11-21T17:52:09.127633Z node 2 :TX_DAT ... :984:2745], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T17:52:10.124084Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:10.124363Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:630:2536]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 2 Status: STATUS_NOT_FOUND 2024-11-21T17:52:10.124385Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:718:2598]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 2 Status: STATUS_NOT_FOUND 2024-11-21T17:52:10.134769Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2024-11-21T17:52:10.134813Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:630:2536], Recipient [2:718:2598]: {TEvReadSet step# 3001 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T17:52:10.134819Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.134825Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2024-11-21T17:52:10.134857Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2024-11-21T17:52:10.134872Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:718:2598], Recipient [2:630:2536]: {TEvReadSet step# 3001 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T17:52:10.134875Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.134879Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2024-11-21T17:52:10.545231Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmpqr48araq9xx6j0m310, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2Q0YjRmZTgtMzYwN2Q1NWMtOTZjZDUwMmItNjI3NmNlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:10.546365Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1051:2841], Recipient [2:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 KeysSize: 1 2024-11-21T17:52:10.546411Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:52:10.546431Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2024-11-21T17:52:10.546452Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T17:52:10.546456Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:52:10.546462Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:10.546467Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:52:10.546485Z node 2 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2024-11-21T17:52:10.546494Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T17:52:10.546497Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:10.546501Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:52:10.546505Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:52:10.546523Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2024-11-21T17:52:10.546601Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T17:52:10.546609Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2024-11-21T17:52:10.546615Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1051:2841], 0} after executionsCount# 1 2024-11-21T17:52:10.546624Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1051:2841], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:52:10.546645Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1051:2841], 0} finished in read 2024-11-21T17:52:10.546661Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T17:52:10.546665Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:52:10.546669Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:52:10.546672Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:52:10.546683Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T17:52:10.546686Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:52:10.546693Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2024-11-21T17:52:10.546698Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:52:10.546717Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T17:52:10.546800Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:630:2536]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2024-11-21T17:52:10.546852Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1053:2842], Recipient [2:718:2598]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 KeysSize: 1 2024-11-21T17:52:10.546869Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2024-11-21T17:52:10.546880Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2024-11-21T17:52:10.546891Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2024-11-21T17:52:10.546894Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2024-11-21T17:52:10.546898Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:10.546901Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T17:52:10.546909Z node 2 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037889 2024-11-21T17:52:10.546915Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2024-11-21T17:52:10.546917Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:10.546920Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2024-11-21T17:52:10.546924Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2024-11-21T17:52:10.546933Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2024-11-21T17:52:10.546964Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 3] 2024-11-21T17:52:10.546970Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v4000/18446744073709551615 2024-11-21T17:52:10.546974Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:1053:2842], 0} after executionsCount# 1 2024-11-21T17:52:10.546978Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1053:2842], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:52:10.546986Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1053:2842], 0} finished in read 2024-11-21T17:52:10.546993Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2024-11-21T17:52:10.546996Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T17:52:10.546999Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T17:52:10.547005Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2024-11-21T17:52:10.547010Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2024-11-21T17:52:10.547013Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T17:52:10.547016Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037889 has finished 2024-11-21T17:52:10.547020Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T17:52:10.547038Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T17:52:10.547155Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:718:2598]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2024-11-21T17:52:10.547288Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1051:2841], Recipient [2:630:2536]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:52:10.547296Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T17:52:10.547616Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1053:2842], Recipient [2:718:2598]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:52:10.547625Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> SystemView::TopPartitionsRanges [GOOD] >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> DemoTx::Scenario_3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [GOOD] Test command err: 2024-11-21T17:49:43.003388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:49:43.003755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:49:43.003771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001c45/r3tmp/tmpATlsGg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29694, node 1 TClient is connected to server localhost:32070 2024-11-21T17:49:43.125552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:49:43.143216Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:49:43.143953Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:49:43.143969Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:49:43.143973Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:49:43.144027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:49:43.185891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:49:43.185936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:49:43.196539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2024-11-21T17:49:55.738694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:722:2601], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:49:55.738719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: Unexpected token '-' : syntax error... ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2024-11-21T17:50:05.990379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2615], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:05.990424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:05.991753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2024-11-21T17:50:06.031383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:855:2691], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:06.031419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:06.031481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:860:2696], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:50:06.032248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T17:50:06.187693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:862:2698], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:50:06.339753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:50:06.404293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2024-11-21T17:50:06.751541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2024-11-21T17:50:07.139083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:50:07.242727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T17:50:07.849102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:50:08.137284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Execution, code: 1060
:1:48: Error: Executing ALTER OBJECT SECRET
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2024-11-21T17:50:08.845953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:50:08.845991Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2024-11-21T17:50:40.729743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2024-11-21T17:50:41.195791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715727:0, at schemeshard: 72057594046644480 2024-11-21T17:50:41.822847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715738:0, at schemeshard: 72057594046644480 2024-11-21T17:50:41.924053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715741:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Execution, code: 1060
:1:42: Error: Executing CREATE OBJECT SECRET_ACCESS
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2024-11-21T17:51:03.752276Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3815:4928], TxId: 281474976715774, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NTcyYjcxZmMtZDQzODg2YmYtZTI5NmNiZTYtYmIyMDVkOTM=. TraceId : 01jd7xjnhcf2fgvakq9jehpe3z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T17:51:03.753621Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3816:4929], TxId: 281474976715774, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NTcyYjcxZmMtZDQzODg2YmYtZTI5NmNiZTYtYmIyMDVkOTM=. CustomerSuppliedId : . TraceId : 01jd7xjnhcf2fgvakq9jehpe3z. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:3812:4858], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T17:51:03.753843Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTcyYjcxZmMtZDQzODg2YmYtZTI5NmNiZTYtYmIyMDVkOTM=, ActorId: [1:3717:4858], ActorState: ExecuteState, TraceId: 01jd7xjnhcf2fgvakq9jehpe3z, Create QueryResponse for error on request, msg: 2024-11-21T17:51:03.755363Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jd7xjnfqdkfqcjfwtnx0xrwm" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NTcyYjcxZmMtZDQzODg2YmYtZTI5NmNiZTYtYmIyMDVkOTM=" tx_control { tx_id: "01jd7xjnfqdkfqcjfwtnx0xrwm" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Execution, code: 1060
:1:29: Error: Executing DROP OBJECT SECRET
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2024-11-21T17:51:25.377825Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4470:5409], for# root@builtin, access# DescribeSchema 2024-11-21T17:51:25.377857Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4470:5409], for# root@builtin, access# DescribeSchema 2024-11-21T17:51:25.378100Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4467:5406], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:25.378330Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjIyMDY5MDYtYWI5M2U5YzItODJiMTRmNTUtZjJiZmE1NQ==, ActorId: [1:4459:5399], ActorState: ExecuteState, TraceId: 01jd7xkap038zz3tgdjv3vvegw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2024-11-21T17:51:36.157403Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing UPSERT OBJECT SECRET
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2024-11-21T17:52:09.563734Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715878. Ctx: { TraceId: 01jd7xmnt3ad92vpn517xfwrk8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU2MmJhYmItZTUxZTg3NzEtNjVmNmFkNjktMzdmNDVlYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2024-11-21T17:52:02.888820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:02.888843Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:02.888858Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:02.891450Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:02.891586Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:02.891635Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:02.892588Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:02.900787Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:02.900890Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:02.900968Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:02.900980Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:02.900985Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:02.901007Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:02.902990Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:02.903027Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:02.903064Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:02.903066Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:02.903069Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:02.903072Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:02.903118Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.903123Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.903137Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:02.903150Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:02.903179Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:02.903184Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:02.903188Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:02.903191Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:02.903194Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:02.903197Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:02.903200Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:02.908943Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.908960Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.908967Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:02.909259Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:02.909267Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:02.909286Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:02.909312Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:02.909321Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:02.909328Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:02.909333Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:02.909336Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:02.909339Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:02.909342Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:02.909398Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:02.909402Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:02.909404Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:02.909406Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:02.909414Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:02.909415Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:02.909418Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:02.909419Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:02.909423Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:02.930281Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:02.930309Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:02.930315Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:02.930326Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:02.930337Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:02.930424Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.930430Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.930436Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:52:02.930450Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:02.930453Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:02.930503Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:02.930512Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:02.930515Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:02.930518Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:02.931085Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:02.931098Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:02.931141Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.931145Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.931151Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:02.931157Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:02.931161Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:02.931170Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:02.931174Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:02.931181Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:02.931185Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:02.931189Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:02.931193Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:02.931230Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:02.931232Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:02.931234Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:02.931237Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:02.931239Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:02.931246Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:02.931248Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:02.931250Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:02.931253Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:02.931267Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:02.931270Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:02.931274Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:02.931279Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:02.931282Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:02.931286Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... e 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T17:52:10.570403Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T17:52:10.570405Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit LoadAndWaitInRS 2024-11-21T17:52:10.570407Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2024-11-21T17:52:10.570409Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T17:52:10.570411Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:52:10.570413Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit ExecuteDataTx 2024-11-21T17:52:10.570493Z node 2 :TX_DATASHARD TRACE: Executed operation [1000005:506] at tablet 9437184 with status COMPLETE 2024-11-21T17:52:10.570500Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:506] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 81, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:52:10.570508Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2024-11-21T17:52:10.570510Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:52:10.570512Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompleteOperation 2024-11-21T17:52:10.570514Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompleteOperation 2024-11-21T17:52:10.570547Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is DelayComplete 2024-11-21T17:52:10.570550Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompleteOperation 2024-11-21T17:52:10.570552Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompletedOperations 2024-11-21T17:52:10.570555Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompletedOperations 2024-11-21T17:52:10.570558Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2024-11-21T17:52:10.570559Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompletedOperations 2024-11-21T17:52:10.570562Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000005:506] at 9437184 has finished 2024-11-21T17:52:10.570564Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:10.570566Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:10.570568Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2024-11-21T17:52:10.570615Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:231:2226], Recipient [2:231:2226]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:10.570619Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:10.570622Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:10.570624Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:10.570628Z node 2 :TX_DATASHARD DEBUG: Return cached ready operation [1000005:507] at 9437184 2024-11-21T17:52:10.570630Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2024-11-21T17:52:10.570632Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T17:52:10.570634Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:10.570636Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:10.570638Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:10.570689Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2024-11-21T17:52:10.570692Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T17:52:10.570694Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:10.570696Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2024-11-21T17:52:10.570698Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2024-11-21T17:52:10.570701Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T17:52:10.570702Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2024-11-21T17:52:10.570704Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:10.570706Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:10.570711Z node 2 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically complete end at 9437184 2024-11-21T17:52:10.570713Z node 2 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically incomplete end at 9437184 2024-11-21T17:52:10.570715Z node 2 :TX_DATASHARD TRACE: Activated operation [1000005:507] at 9437184 2024-11-21T17:52:10.570718Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T17:52:10.570719Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:10.570721Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2024-11-21T17:52:10.570723Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2024-11-21T17:52:10.570727Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T17:52:10.570729Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2024-11-21T17:52:10.570730Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2024-11-21T17:52:10.570732Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2024-11-21T17:52:10.570734Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T17:52:10.570736Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2024-11-21T17:52:10.570738Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2024-11-21T17:52:10.570740Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2024-11-21T17:52:10.570742Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T17:52:10.570744Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T17:52:10.570747Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T17:52:10.570749Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2024-11-21T17:52:10.570751Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T17:52:10.570752Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T17:52:10.570754Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:52:10.570756Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2024-11-21T17:52:10.570789Z node 2 :TX_DATASHARD TRACE: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2024-11-21T17:52:10.570793Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:52:10.570797Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:10.570799Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:52:10.570801Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2024-11-21T17:52:10.570803Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2024-11-21T17:52:10.570815Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is DelayComplete 2024-11-21T17:52:10.570817Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2024-11-21T17:52:10.570819Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2024-11-21T17:52:10.570821Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2024-11-21T17:52:10.570824Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T17:52:10.570826Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2024-11-21T17:52:10.570827Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000005:507] at 9437184 has finished 2024-11-21T17:52:10.570861Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:10.570864Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:10.570866Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:10.570868Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:10.582227Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2024-11-21T17:52:10.582250Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2024-11-21T17:52:10.582262Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:10.582269Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2024-11-21T17:52:10.582287Z node 2 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:10.582295Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:10.582338Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:10.582341Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2024-11-21T17:52:10.582349Z node 2 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:10.582352Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> DataShardTxOrder::RandomPoints_DelayData >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] >> ReadSessionImplTest::DataReceivedCallback [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] Test command err: 2024-11-21T17:52:07.124841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791861553111469:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:07.124903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011ea/r3tmp/tmpd4t8AR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15016, node 1 2024-11-21T17:52:07.178302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:52:07.178316Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:07.187162Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:07.187175Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:07.187176Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:07.187214Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:07.209427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.210475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:07.210504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.211129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:07.211187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:07.211197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:52:07.211537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:07.211546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:52:07.211579Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:52:07.211959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.212829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211527256, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:07.212843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:52:07.212891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:52:07.213252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:07.213294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:07.213319Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:52:07.213335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:52:07.213347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:52:07.213365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:52:07.213686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:52:07.213701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:52:07.213703Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:07.213712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:52:07.225058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:07.225082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:07.226507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:07.384857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791861553112395:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:07.384879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:07.469243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.469350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:52:07.469495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:07.469507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.470142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:52:07.470193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:07.470247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:07.470269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:52:07.470332Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:52:07.470479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:07.470487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:07.470490Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:07.470515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:07.470519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:07.470520Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:52:07.471982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:52:07.472014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T17:52:07.472351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:52:07.524551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:52:07.524562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:52:07.524588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T17:52:07.525070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:52:07.525786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211527571, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:07.525798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211527571 2024-11-21T17:52:07.525819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T17:52:07.526086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:07.526150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:07.526165Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:52:07.526432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:07.526444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:07.526447Z node 1 :FLAT_TX_SCHEMESHARD INFO: ... node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:10.391279Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:10.391280Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T17:52:10.391290Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:10.391292Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:10.391293Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T17:52:10.391302Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:10.391309Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:10.391310Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T17:52:10.391318Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:10.391323Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:10.391324Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T17:52:10.399280Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211530448, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:10.399301Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211530448, at schemeshard: 72057594046644480 2024-11-21T17:52:10.399341Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T17:52:10.399362Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211530448, at schemeshard: 72057594046644480 2024-11-21T17:52:10.399378Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T17:52:10.399386Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211530448, at schemeshard: 72057594046644480 2024-11-21T17:52:10.399392Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T17:52:10.399403Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732211530448 2024-11-21T17:52:10.399413Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T17:52:10.399888Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:10.400008Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:10.400031Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T17:52:10.400048Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T17:52:10.400086Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T17:52:10.400097Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T17:52:10.400106Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T17:52:10.400110Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T17:52:10.400116Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T17:52:10.400119Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T17:52:10.400123Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:52:10.400132Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T17:52:10.400135Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T17:52:10.400136Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T17:52:10.400139Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T17:52:10.400551Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:10.400574Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:10.400578Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T17:52:10.400616Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:10.400625Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:10.400625Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T17:52:10.400636Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:10.400639Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:10.400639Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:52:10.400648Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:10.400649Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:10.400650Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T17:52:10.400658Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:10.400671Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:10.400672Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T17:52:10.400676Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T17:52:10.401091Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439791871331670411:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:52:10.492078Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:52:10.492123Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:52:10.493167Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:52:10.496166Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7439791871331670503:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:52:10.496688Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZGIwOWE4MjMtYjk1OTA5ZjYtMjQxOWEzODgtMjQyZmIxMjY=, ActorId: [10:7439791871331670221:2295], ActorState: ExecuteState, TraceId: 01jd7xmpmn2c230wr90h3q5dgv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:52:10.500528Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7439791871331670528:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable2]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:52:10.500609Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZGIwOWE4MjMtYjk1OTA5ZjYtMjQxOWEzODgtMjQyZmIxMjY=, ActorId: [10:7439791871331670221:2295], ActorState: ExecuteState, TraceId: 01jd7xmpr1acxbjdybzkne7z6m, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::TopPartitionsRanges [GOOD] Test command err: 2024-11-21T17:51:35.914506Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791723280961934:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:35.914523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001985/r3tmp/tmpJUgRgF/pdisk_1.dat 2024-11-21T17:51:35.964539Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26100, node 1 2024-11-21T17:51:35.977810Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:35.977822Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:35.977824Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:35.977853Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:36.014589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.014623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.015639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:36.043125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.186466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791727575929824:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.186492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791727575929835:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.186499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.187390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T17:51:36.189194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791727575929838:2300], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:51:36.290543Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439791727575929917:2295] TxId: 281474976715661. Ctx: { TraceId: 01jd7xkn3ha45qfa53m1qa1v08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODY1OGE2ZmEtNDcyZGNlMDAtNGQwNjMyN2EtNzA2M2I4ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T17:51:36.290626Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xkn3ha45qfa53m1qa1v08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODY1OGE2ZmEtNDcyZGNlMDAtNGQwNjMyN2EtNzA2M2I4ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:36.295503Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791727575929923:2308], owner: [1:7439791727575929920:2306], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T17:51:36.296601Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791727575929923:2308], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:36.297587Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791727575929923:2308], row count: 1, finished: 1 2024-11-21T17:51:36.297601Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791727575929923:2308], owner: [1:7439791727575929920:2306], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T17:51:36.298679Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211496290, txId: 281474976715660] shutting down 2024-11-21T17:51:37.315809Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439791731870897270:2314] TxId: 281474976715663. Ctx: { TraceId: 01jd7xkpamefrtf6x2c1p7rmbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIzOTQ5MDUtODg5MTE5ZjAtM2Q2MmNlYzMtZjZkMWE1MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T17:51:37.315872Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xkpamefrtf6x2c1p7rmbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIzOTQ5MDUtODg5MTE5ZjAtM2Q2MmNlYzMtZjZkMWE1MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:37.316441Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791731870897277:2322], owner: [1:7439791731870897273:2320], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T17:51:37.316580Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791731870897277:2322], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:37.316683Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791731870897277:2322], row count: 1, finished: 1 2024-11-21T17:51:37.316694Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791731870897277:2322], owner: [1:7439791731870897273:2320], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T17:51:37.317189Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211497315, txId: 281474976715662] shutting down 2024-11-21T17:51:38.335128Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xkqae3rkbc3jpnadmktcx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmEwMDcwZTYtOWM2ZjhmNzQtMzJmNmQxY2ItZTIzNDYxNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:38.335620Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791736165864610:2333], owner: [1:7439791736165864606:2331], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T17:51:38.335753Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791736165864610:2333], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:38.335824Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791736165864610:2333], row count: 1, finished: 1 2024-11-21T17:51:38.335834Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791736165864610:2333], owner: [1:7439791736165864606:2331], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T17:51:38.336254Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211498334, txId: 281474976715664] shutting down 2024-11-21T17:51:39.352786Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xkra93werxhcvph2vf4jf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzUxMzIyYWQtYzc0ZmYwZmUtM2Q1ZjIxZWMtZGJlN2NmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:39.353310Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791740460831943:2344], owner: [1:7439791740460831939:2342], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T17:51:39.353500Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791740460831943:2344], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:39.353611Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791740460831943:2344], row count: 1, finished: 1 2024-11-21T17:51:39.353627Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791740460831943:2344], owner: [1:7439791740460831939:2342], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T17:51:39.354174Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211499352, txId: 281474976715666] shutting down 2024-11-21T17:51:40.379733Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd7xksa30n242w24be9mksqb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGU0Y2NlYWMtODQ3ZDZmNDQtMTk0MDliODgtNmMzYzQ1OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:40.380547Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791744755799275:2355], owner: [1:7439791744755799272:2353], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T17:51:40.384779Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791744755799275:2355], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:40.384917Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791744755799275:2355], row count: 1, finished: 1 2024-11-21T17:51:40.384925Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791744755799275:2355], owner: [1:7439791744755799272:2353], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T17:51:40.385667Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211500379, txId: 281474976715668] shutting down 2024-11-21T17:51:40.914772Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791723280961934:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:40.914807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Ro ... d: 72057594037968897, tenant name: /Root/Tenant1, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], tenant node count: 2 2024-11-21T17:52:10.045559Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1732211529000000 Rank: 3 } InclusiveFrom: true To { IntervalEndUs: 1732211529000000 Rank: 4294967295 } InclusiveTo: true Type: TOP_PARTITIONS_ONE_MINUTE , rows# 3, bytes# 228, next# 2024-11-21T17:52:10.045695Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439791872869789361:2435], row count: 3, finished: 1 2024-11-21T17:52:10.045716Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439791872869789361:2435], owner: [7:7439791872869789357:2433], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T17:52:10.046288Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211530042, txId: 281474976715681] shutting down 2024-11-21T17:52:10.061838Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd7xmp9z0y7x3bxqbqzjtvgd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NGE3Y2U0MGEtMWJiMTlmNDQtNGJkODlhYzEtNjI4ZmJhMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:10.062274Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439791872869789393:2444], owner: [7:7439791872869789389:2442], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T17:52:10.062388Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439791872869789393:2444], schemeshard id: 72075186224037888, hive id: 72057594037968897, tenant name: /Root/Tenant1, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], tenant node count: 2 2024-11-21T17:52:10.062586Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1732211529000000 Rank: 0 } InclusiveFrom: false To { IntervalEndUs: 1732211529000000 Rank: 3 } InclusiveTo: false Type: TOP_PARTITIONS_ONE_MINUTE , rows# 2, bytes# 153, next# 2024-11-21T17:52:10.062661Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439791872869789393:2444], row count: 2, finished: 1 2024-11-21T17:52:10.062669Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439791872869789393:2444], owner: [7:7439791872869789389:2442], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T17:52:10.063200Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211530061, txId: 281474976715683] shutting down 2024-11-21T17:52:10.075039Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jd7xmpafd1p5zyvw04zs11wh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NDdkOGRjNTYtOGMwZWVmYzEtNDA2NDE1YjAtYzk0NzYzYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:10.075437Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439791872869789425:2453], owner: [7:7439791872869789421:2451], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T17:52:10.075589Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439791872869789425:2453], schemeshard id: 72075186224037888, hive id: 72057594037968897, tenant name: /Root/Tenant1, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], tenant node count: 2 2024-11-21T17:52:10.075815Z node 10 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1732211529000000 Rank: 0 } InclusiveFrom: false To { IntervalEndUs: 1732211529000000 Rank: 3 } InclusiveTo: true Type: TOP_PARTITIONS_ONE_MINUTE , rows# 3, bytes# 229, next# 2024-11-21T17:52:10.075895Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439791872869789425:2453], row count: 3, finished: 1 2024-11-21T17:52:10.075905Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439791872869789425:2453], owner: [7:7439791872869789421:2451], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T17:52:10.076327Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211530074, txId: 281474976715685] shutting down 2024-11-21T17:52:10.077340Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2024-11-21T17:52:10.077531Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:52:10.077561Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2024-11-21T17:52:10.077249Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [11:7439791824696064957:2058] 2024-11-21T17:52:10.077590Z node 8 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [8:7439791827965289636:2063], processor id# 72075186224037899, database# /Root/Tenant2 2024-11-21T17:52:10.077633Z node 8 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [8:7439791827965289636:2063], database# /Root/Tenant2, processor id# 72075186224037899 2024-11-21T17:52:10.077664Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:52:10.077682Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T17:52:10.077733Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:52:10.077708Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7439791827695758169:2056], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T17:52:10.077730Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7439791827695758169:2056], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T17:52:10.077926Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2024-11-21T17:52:10.078001Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7439791824696064957:2058], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T17:52:10.078019Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7439791824696064957:2058], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T17:52:10.078053Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:52:10.079147Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439791827965289857:2106], Type=268959746 2024-11-21T17:52:10.079161Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439791827965289857:2106], Type=268959746 2024-11-21T17:52:10.079165Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439791827965289857:2106], Type=268959746 2024-11-21T17:52:10.079168Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439791827965289857:2106], Type=268959746 2024-11-21T17:52:10.079171Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439791827965289857:2106], Type=268959746 2024-11-21T17:52:10.079175Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439791827965289857:2106], Type=268959746 2024-11-21T17:52:10.079178Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439791827965289857:2106], Type=268959746 2024-11-21T17:52:10.079182Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[8:7439791827965289857:2106], Type=268959746 2024-11-21T17:52:10.079257Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7439791827672774728:2058], processor id# 72075186224037899, database# /Root/Tenant2 2024-11-21T17:52:10.079297Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7439791827672774728:2058], database# /Root/Tenant2, processor id# 72075186224037899 2024-11-21T17:52:10.140223Z node 9 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [9:7439791827672774728:2058] 2024-11-21T17:52:10.140860Z node 9 :SYSTEM_VIEWS DEBUG: Send counters: service id# [9:7439791827672774728:2058], processor id# 72075186224037899, database# /Root/Tenant2, generation# 16168327605773764193, node id# 9, is retrying# 0, is labeled# 0 2024-11-21T17:52:10.172281Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7439791827672774728:2058], processor id# 72075186224037899, database# /Root/Tenant2 2024-11-21T17:52:10.172319Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7439791827672774728:2058], database# /Root/Tenant2, processor id# 72075186224037899 2024-11-21T17:52:10.176570Z node 7 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [7:7439791825625146656:2072] 2024-11-21T17:52:10.234971Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [11:7439791824696064957:2058] 2024-11-21T17:52:10.235495Z node 11 :SYSTEM_VIEWS DEBUG: Send counters: service id# [11:7439791824696064957:2058], processor id# 72075186224037893, database# /Root/Tenant1, generation# 18299920076631838861, node id# 11, is retrying# 0, is labeled# 0 2024-11-21T17:52:10.268411Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7439791824696064957:2058], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T17:52:10.268475Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7439791824696064957:2058], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T17:52:10.428015Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [8:7439791827965289574:2063] 2024-11-21T17:52:10.502759Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [10:7439791827695758113:2063] 2024-11-21T17:52:10.575922Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [10:7439791827695758169:2056] 2024-11-21T17:52:10.576387Z node 10 :SYSTEM_VIEWS DEBUG: Send counters: service id# [10:7439791827695758169:2056], processor id# 72075186224037893, database# /Root/Tenant1, generation# 507410954259298730, node id# 10, is retrying# 0, is labeled# 0 2024-11-21T17:52:10.607693Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [10:7439791827695758113:2063] 2024-11-21T17:52:10.607858Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7439791827695758169:2056], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T17:52:10.607903Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7439791827695758169:2056], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T17:52:10.676336Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [8:7439791827965289636:2063] 2024-11-21T17:52:10.709560Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [8:7439791827965289636:2063] 2024-11-21T17:52:10.709880Z node 8 :SYSTEM_VIEWS DEBUG: Send counters: service id# [8:7439791827965289636:2063], processor id# 72075186224037899, database# /Root/Tenant2, generation# 5421814064413342902, node id# 8, is retrying# 0, is labeled# 0 2024-11-21T17:52:10.743537Z node 8 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [8:7439791827965289636:2063], processor id# 72075186224037899, database# /Root/Tenant2 2024-11-21T17:52:10.743610Z node 8 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [8:7439791827965289636:2063], database# /Root/Tenant2, processor id# 72075186224037899 2024-11-21T17:52:10.780731Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [8:7439791827965289574:2063] >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:58.714121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:58.714146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:58.714152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:58.714156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:58.714169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:58.714172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:58.714181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:58.714260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:58.725632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:58.725654Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:58.728103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:58.728807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:58.728844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:58.730165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:58.730338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:58.730439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.730499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:58.731517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.731789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:58.731799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.731831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:58.731838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:58.731845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:58.731856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.733079Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:58.748762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:58.748824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.748878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:58.748927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:58.748935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.749501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.749522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:58.749563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.749572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:58.749575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:58.749579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:58.749927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.749937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:58.749941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:58.750227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.750235Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.750240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.750245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.750759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:58.751118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:58.751160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:58.751307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.751327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:58.751332Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.751377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:58.751384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.751405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:58.751415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:58.751754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:58.751761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:58.751793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.751798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:58.751865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.751872Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:58.751882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:58.751886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.751892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:58.751896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.751900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:58.751903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:58.751913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:58.751918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:58.751922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:58.752192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:58.752203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:58.752208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:58.752212Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:58.752216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:58.752247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 70:20734], Recipient [1:16873:17603]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:52:10.800966Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T17:52:10.801056Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409573 ShardLocalIdx: 14 TxId_Deprecated: 14 TabletID: 72075186233409586 Forgetting tablet 72075186233409586 2024-11-21T17:52:10.801143Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [1:16863:17595], Recipient [1:16873:17603]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T17:52:10.801188Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186233409586 2024-11-21T17:52:10.801194Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186233409586 2024-11-21T17:52:10.801414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409573:13 hive 72057594037968897 at ss 72075186233409573 2024-11-21T17:52:10.801494Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409573 ShardLocalIdx: 13 TxId_Deprecated: 13 TabletID: 72075186233409585 2024-11-21T17:52:10.801519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 14 ShardOwnerId: 72075186233409573 ShardLocalIdx: 14, at schemeshard: 72075186233409573 2024-11-21T17:52:10.801568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409573, LocalPathId: 13] was 1 2024-11-21T17:52:10.801707Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269552133, Sender [1:13574:14529], Recipient [1:15900:16704]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72075186233409573 State: 4 2024-11-21T17:52:10.801711Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvStateChangedResult 2024-11-21T17:52:10.801715Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186233409585 state Offline 2024-11-21T17:52:10.801746Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [1:15892:16698], Recipient [1:15900:16704]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T17:52:10.801771Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186233409585 2024-11-21T17:52:10.801775Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186233409585 Forgetting tablet 72075186233409585 2024-11-21T17:52:10.801990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 13 ShardOwnerId: 72075186233409573 ShardLocalIdx: 13, at schemeshard: 72075186233409573 2024-11-21T17:52:10.802016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409573, LocalPathId: 12] was 1 2024-11-21T17:52:10.802154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72075186233409573 2024-11-21T17:52:10.802159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409573, LocalPathId: 13], at schemeshard: 72075186233409573 2024-11-21T17:52:10.802170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409573, LocalPathId: 3] was 5 2024-11-21T17:52:10.802174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409573, LocalPathId: 12], at schemeshard: 72075186233409573 2024-11-21T17:52:10.802179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409573, LocalPathId: 3] was 4 2024-11-21T17:52:10.802606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409573:14 2024-11-21T17:52:10.802613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409573:14 tabletId 72075186233409586 2024-11-21T17:52:10.802915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409573:13 2024-11-21T17:52:10.802921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409573:13 tabletId 72075186233409585 2024-11-21T17:52:10.802962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409573 2024-11-21T17:52:10.824589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 6500, transactions count in step: 1, at schemeshard: 72075186233409573 2024-11-21T17:52:10.824652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976735772 AckTo { RawX1: 13684 RawX2: 4294981907 } } Step: 6500 MediatorID: 72075186233409575 TabletID: 72075186233409573, at schemeshard: 72075186233409573 2024-11-21T17:52:10.824665Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409573] TDropLock TPropose opId# 281474976735772:0 HandleReply TEvOperationPlan: step# 6500 2024-11-21T17:52:10.824672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976735772:0 128 -> 240 2024-11-21T17:52:10.825345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976735772:0, at schemeshard: 72075186233409573 2024-11-21T17:52:10.825358Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409573] TDone opId# 281474976735772:0 ProgressState 2024-11-21T17:52:10.825372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735772:0 progress is 1/1 2024-11-21T17:52:10.825377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735772 ready parts: 1/1 2024-11-21T17:52:10.825384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976735772, ready parts: 1/1, is published: true 2024-11-21T17:52:10.825396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:13574:14529] message: TxId: 281474976735772 2024-11-21T17:52:10.825401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735772 ready parts: 1/1 2024-11-21T17:52:10.825407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976735772:0 2024-11-21T17:52:10.825411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976735772:0 2024-11-21T17:52:10.825421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409573, LocalPathId: 2] was 4 2024-11-21T17:52:10.826038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976735772 2024-11-21T17:52:10.826050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976735772 2024-11-21T17:52:10.826063Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735772, buildInfoId: 115 2024-11-21T17:52:10.826079Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735772, buildInfo: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409573, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409573, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:14435:15344], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735771, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735772, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:52:10.826525Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2024-11-21T17:52:10.826539Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409573, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409573, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:14435:15344], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735771, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735772, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:52:10.826544Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T17:52:10.826996Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2024-11-21T17:52:10.827007Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409573, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409573, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:14435:15344], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735771, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735772, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:52:10.827010Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 115, subscribers count# 1 2024-11-21T17:52:10.827024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2024-11-21T17:52:10.827028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:14639:15508] TestWaitNotification: OK eventTxId 115 2024-11-21T17:52:10.827787Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/CommonDB" IndexBuildId: 115 2024-11-21T17:52:10.827841Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } |83.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> DataShardOutOfOrder::TestImmediateQueueThenSplit |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |83.7%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2024-11-21T17:52:06.421770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:06.421789Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:06.421803Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:06.424265Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:06.424379Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:06.424419Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:06.425089Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:06.432610Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:06.432738Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:06.432849Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:06.432863Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:06.432869Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:06.432906Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:06.435670Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:06.435739Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:06.435791Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:06.435796Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:06.435801Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:06.435807Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:06.435880Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.435886Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.435905Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:06.435919Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:06.435968Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:06.435974Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:06.435980Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:06.435984Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:06.435988Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:06.435993Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:06.436004Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:06.443314Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.443337Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.443347Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:06.443829Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:06.443845Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:06.443861Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:06.443886Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:06.443895Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:06.443902Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:06.443907Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:06.443909Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:06.443912Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:06.443915Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:06.443963Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:06.443965Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:06.443967Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:06.443970Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:06.443977Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:06.443979Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:06.443981Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:06.443983Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:06.443986Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:06.464950Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:06.464971Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:06.464978Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:06.464990Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:06.465004Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:06.465112Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.465118Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.465125Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:52:06.465142Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:06.465146Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:06.465191Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:06.465224Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:06.465228Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:06.465232Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:06.465899Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:06.465909Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:06.465943Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.465947Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.465952Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:06.465958Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:06.465961Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:06.465967Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:06.465971Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:06.465976Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:06.465979Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:06.465983Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:06.465986Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:06.466021Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:06.466024Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:06.466027Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:06.466031Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:06.466034Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:06.466043Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:06.466046Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:06.466049Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:06.466052Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:06.466061Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:06.466064Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:06.466067Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:06.466072Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:06.466088Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:06.466091Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... t heartbeats: at tablet# 9437186 2024-11-21T17:52:10.790123Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:52:10.790128Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2024-11-21T17:52:10.790136Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:10.790142Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T17:52:10.790146Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:52:10.790174Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:52:10.790179Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2024-11-21T17:52:10.790186Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:10.790192Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T17:52:10.790196Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:52:10.790215Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:52:10.790219Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T17:52:10.790227Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:10.790232Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T17:52:10.790236Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:52:10.790478Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T17:52:10.790486Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.790492Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T17:52:10.790543Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T17:52:10.790549Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.790552Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T17:52:10.790567Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T17:52:10.790570Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.790573Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T17:52:10.790584Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T17:52:10.790587Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.790591Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T17:52:10.790605Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T17:52:10.790609Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.790612Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T17:52:10.790620Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T17:52:10.790623Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.790626Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T17:52:10.790639Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T17:52:10.790642Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.790645Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2024-11-21T17:52:10.790666Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:10.790671Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2024-11-21T17:52:10.790682Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:10.790688Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T17:52:10.790692Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:10.790732Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:10.790736Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:150] at 9437184 on unit CompleteOperation 2024-11-21T17:52:10.790744Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 150] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:52:10.790749Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:10.790773Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:10.790778Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2024-11-21T17:52:10.790786Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:10.790792Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T17:52:10.790816Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:10.790849Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:10.790853Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2024-11-21T17:52:10.790860Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:10.790866Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T17:52:10.790869Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:10.790890Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:10.790895Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2024-11-21T17:52:10.790901Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:52:10.790904Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:10.790924Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:10.790928Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2024-11-21T17:52:10.790934Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:10.790939Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T17:52:10.790943Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:10.790989Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T17:52:10.790994Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.790998Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2024-11-21T17:52:10.791014Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T17:52:10.791017Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.791021Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T17:52:10.791036Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T17:52:10.791039Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.791042Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T17:52:10.791052Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T17:52:10.791055Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:10.791059Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] Test command err: 2024-11-21T17:52:07.818234Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791861856549170:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:07.818375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011f8/r3tmp/tmpkVySkI/pdisk_1.dat 2024-11-21T17:52:07.873868Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23242, node 1 2024-11-21T17:52:07.894331Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:07.894342Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:07.894344Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:07.894370Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:52:07.918806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:07.918834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:07.920355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:07.949881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.950823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:07.950857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.951734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:07.951798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:07.951805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:07.952305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:07.952314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:07.952348Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:07.952702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:07.956108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211528005, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:07.956124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:07.956189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:07.956654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:07.956704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:07.956719Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:07.956728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:07.956739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:07.956750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:07.957244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:07.957279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:07.957284Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:07.957304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:52:08.085820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.085943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:52:08.086069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:08.086080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.086144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/Test/uid, operationId: 281474976715658:1, transaction: WorkingDir: "/Root/Test" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "uid" KeyColumnNames: "uid" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2024-11-21T17:52:08.086195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:08.086204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test/uid/indexImplTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2024-11-21T17:52:08.086259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 1 -> 2 2024-11-21T17:52:08.086321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:08.087132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/Test 2024-11-21T17:52:08.087228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:08.087325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:08.087350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTableIndex TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:08.087422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:52:08.087474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:2 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:52:08.087501Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:52:08.087578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:08.087594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:08.087597Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:08.087656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:08.087675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:08.087680Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:52:08.087691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:08.087697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:08.087698Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2024-11-21T17:52:08.087707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:08.087709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:08.087711Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2024-11-21T17:52:08.089354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:52:08.089380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:52:08.089436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:2 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024 ... :0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:10.680270Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:52:10.680935Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:10.680940Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:10.681453Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:10.682381Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:10.682880Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211530728, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:10.682896Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:10.682985Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:10.683604Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:10.683648Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:10.683659Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:10.683675Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:10.683687Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:10.683699Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:10.683860Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:10.683925Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:10.683938Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:10.683963Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:52:10.849426Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:10.849613Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:52:10.849875Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:10.849896Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:10.850944Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:52:10.851046Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:10.851116Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:10.851140Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:52:10.851293Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:52:10.851392Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:10.851411Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:10.851416Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:10.851468Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:10.851477Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:10.851479Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:52:10.853230Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:52:10.853304Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:52:10.853331Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:52:10.853344Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:52:10.853860Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:52:10.906566Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:52:10.906582Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:52:10.906725Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:52:10.906733Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:52:10.906757Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:52:10.906769Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:52:10.906775Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:52:10.907271Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:52:10.908045Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211530952, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:10.908060Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211530952 2024-11-21T17:52:10.908091Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:52:10.908614Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:10.908713Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:10.908730Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:52:10.909083Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:10.909089Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:10.909093Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:52:10.909122Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:10.909124Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:10.909126Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:52:10.909879Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715658 Step: 1732211530952 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 229 } } 2024-11-21T17:52:10.909930Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732211530952 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 320 } } 2024-11-21T17:52:10.909972Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715658 Step: 1732211530952 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 361 } } 2024-11-21T17:52:10.909996Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:52:10.910020Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:52:10.910036Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:52:10.910039Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:10.910046Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T17:52:10.910468Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:52:10.910485Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:52:10.910494Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate-StreamLookup >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup+EvWrite [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2024-11-21T17:51:44.489804Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.489812Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.489816Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.489940Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.491658Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.491702Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.491771Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.491876Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.491917Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:51:44.491956Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.491965Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:51:44.492137Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.492141Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.492144Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.492204Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.492357Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.492381Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.492411Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.492462Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.492495Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:51:44.492521Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.492526Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:51:44.492782Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.492786Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.492788Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.492834Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.492945Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.492974Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.492999Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.493271Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.493365Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:51:44.493392Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.493399Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:51:44.493603Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.493618Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.493620Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.493677Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.493774Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.493811Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.493840Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.494769Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.494878Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:51:44.494905Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.494911Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:51:44.495108Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.495112Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.495114Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.495171Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.495287Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.495306Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.495333Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.495386Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.495406Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:51:44.495426Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.495432Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T17:51:44.495545Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.495548Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.495551Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.495592Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.505883Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.505957Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.506039Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.506151Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.506181Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:51:44.506198Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.506206Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T17:51:44.506463Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.506467Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.506470Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.506536Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.506679Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.506699Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.506739Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.506910Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.508830Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:51:44.508867Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.508878Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T17:51:44.509194Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.509199Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.509202Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:51:44.509279Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:51:44.509418Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:51:44.509469Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.509528Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:51:44.510294Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:51:44.510326Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:51:44.510337Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:51:44.510345Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T17:51:44.540268Z :ReadSession INFO: Random seed for debugging is 1732211504540262 2024-11-21T17:51:44.768324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791759681377755:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:44.771952Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791762900305598:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00184e/r3tmp/tmpTK98gQ/pdisk_1.dat 2024-11-21T17:51:44.803473Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:44.801706Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:44.802984Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot ... 8800, ReadSizeServerDelta = 0 2024-11-21T17:51:58.946697Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2024-11-21T17:51:58.946717Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T17:51:58.944000Z WriteTime: 2024-11-21T17:51:58.944000Z Ip: "ipv6:[::1]:33928" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:33928" } } } 2024-11-21T17:51:58.946781Z :DEBUG: [/Root] [/Root] [c9a4e11-c697a0e1-2c4a4406-8be65b4e] [dc1] Commit offsets [2, 3). Partition stream id: 1 2024-11-21T17:51:58.946764Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T17:51:58.946801Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 got read request: guid# 52cf57fe-f7b2daff-8b23b690-8cac743c 2024-11-21T17:51:58.946840Z :DEBUG: [/Root] [/Root] [c9a4e11-c697a0e1-2c4a4406-8be65b4e] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:51:58.946930Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 3 } } } 2024-11-21T17:51:58.946973Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 commit request from client for 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T17:51:58.946979Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T17:51:58.946983Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2024-11-21T17:51:58.947061Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:51:58.947072Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:51:58.947096Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_11556938054746125480_v1 2024-11-21T17:51:58.947109Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:51:58.947806Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:51:58.947822Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:51:58.947833Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2024-11-21T17:51:58.947900Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2024-11-21T17:51:58.947929Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2024-11-21T17:51:58.947940Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2024-11-21T17:51:58.948055Z :DEBUG: [/Root] [/Root] [c9a4e11-c697a0e1-2c4a4406-8be65b4e] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2024-11-21T17:51:59.044619Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9ac3bc75-91302035-9d4c9579-7c98f4b3_0] Write session will now close 2024-11-21T17:51:59.044656Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9ac3bc75-91302035-9d4c9579-7c98f4b3_0] Write session: aborting 2024-11-21T17:51:59.044886Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9ac3bc75-91302035-9d4c9579-7c98f4b3_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:51:59.044897Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|9ac3bc75-91302035-9d4c9579-7c98f4b3_0] Write session: destroy 2024-11-21T17:51:59.045072Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|9ac3bc75-91302035-9d4c9579-7c98f4b3_0 grpc read done: success: 0 data: 2024-11-21T17:51:59.045086Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|9ac3bc75-91302035-9d4c9579-7c98f4b3_0 grpc read failed 2024-11-21T17:51:59.045093Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|9ac3bc75-91302035-9d4c9579-7c98f4b3_0 grpc closed 2024-11-21T17:51:59.045098Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|9ac3bc75-91302035-9d4c9579-7c98f4b3_0 is DEAD 2024-11-21T17:51:59.045347Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:51:59.045497Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:51:59.045524Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439791819810922647:2607] destroyed 2024-11-21T17:51:59.045541Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:51:59.861906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:51:59.861926Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:01.666244Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:52:01.719432Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2024-11-21T17:52:06.666479Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:52:08.946851Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2024-11-21T17:52:09.045195Z :INFO: [/Root] [/Root] [c9a4e11-c697a0e1-2c4a4406-8be65b4e] Closing read session. Close timeout: 0.000000s 2024-11-21T17:52:09.045225Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2024-11-21T17:52:09.045235Z :INFO: [/Root] [/Root] [c9a4e11-c697a0e1-2c4a4406-8be65b4e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16331 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:52:09.045259Z :NOTICE: [/Root] [/Root] [c9a4e11-c697a0e1-2c4a4406-8be65b4e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T17:52:09.045268Z :DEBUG: [/Root] [/Root] [c9a4e11-c697a0e1-2c4a4406-8be65b4e] [dc1] Abort session to cluster 2024-11-21T17:52:09.045503Z :NOTICE: [/Root] [/Root] [c9a4e11-c697a0e1-2c4a4406-8be65b4e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:52:09.046671Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 grpc read done: success# 0, data# { } 2024-11-21T17:52:09.046694Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 grpc read failed 2024-11-21T17:52:09.046703Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 grpc closed 2024-11-21T17:52:09.046722Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_11556938054746125480_v1 is DEAD 2024-11-21T17:52:09.046937Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439791794041118319:2502] disconnected; active server actors: 1 2024-11-21T17:52:09.046947Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439791794041118319:2502] client user disconnected session shared/user_1_1_11556938054746125480_v1 2024-11-21T17:52:09.046939Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:52:09.046958Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_11556938054746125480_v1 2024-11-21T17:52:09.047020Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439791794041118322:2505] destroyed 2024-11-21T17:52:09.047067Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_11556938054746125480_v1 2024-11-21T17:52:09.546547Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:09.546554Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:09.546557Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:52:09.546621Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:52:09.546726Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:52:09.546810Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:09.546870Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:52:09.547046Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:52:09.547067Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:52:09.547122Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T17:52:09.547135Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:52:09.547142Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:52:09.547147Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T17:52:09.547189Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:52:09.547202Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable [GOOD] >> TPersQueueTest::BadSids >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail-StreamLookup [GOOD] >> DataShardTxOrder::ReadWriteReorder ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T17:52:09.197122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:09.197464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:09.197479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aac/r3tmp/tmpZNIfTw/pdisk_1.dat 2024-11-21T17:52:09.288836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:09.304609Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:09.346447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:09.346471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:09.356871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:09.459828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:09.472868Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:09.473013Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:09.473079Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:52:09.473112Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:09.477862Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:09.477954Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:09.477968Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:09.478066Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:09.478082Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:09.478086Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:09.478117Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:09.480201Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:09.480260Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:09.480295Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:52:09.480298Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:09.480301Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:09.480304Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:09.480373Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:09.480378Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:09.480478Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:09.480489Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:09.480497Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:09.480499Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:09.480503Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:52:09.480507Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:09.480511Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:09.480515Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:52:09.480519Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:09.480522Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:09.480525Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:52:09.480529Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:09.480544Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:52:09.480547Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:09.480561Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:09.480599Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:52:09.480607Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:52:09.480618Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:52:09.480621Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:09.480624Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:52:09.480626Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:52:09.480629Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:09.480657Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:09.480660Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:52:09.480662Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:09.480663Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:09.480669Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:52:09.480671Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:09.480673Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:52:09.480675Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:09.480678Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:09.480888Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:52:09.480894Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:52:09.491131Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:09.491151Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:09.491156Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:09.491164Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:52:09.491173Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:09.664638Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:09.664660Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:09.664668Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:52:09.664688Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:52:09.664693Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:09.664719Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:09.664727Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:52:09.664731Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:52:09.664736Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:52:09.665464Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:52:09.665483Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:09.665588Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:09.665593Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:09.665598Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:09.665605Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:09.665609Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:52:09.665617Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... tx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1008:2807], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 62 DurationUs: 2000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 11 FinishTimeMs: 1732211531788 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 5 BuildCpuTimeUs: 6 WaitInputTimeUs: 1116 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211531786 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:11.788619Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1008:2807] 2024-11-21T17:52:11.788629Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2024-11-21T17:52:11.788635Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2024-11-21T17:52:11.788758Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1009:2808], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 125 DurationUs: 2000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 83 FinishTimeMs: 1732211531788 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 73 BuildCpuTimeUs: 10 WaitInputTimeUs: 1070 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211531786 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:11.788766Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1009:2808] 2024-11-21T17:52:11.788774Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T17:52:11.788780Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T17:52:11.788851Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1010:2809], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 158 DurationUs: 2000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 127 FinishTimeMs: 1732211531788 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 122 BuildCpuTimeUs: 5 WaitInputTimeUs: 1022 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211531786 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:11.788857Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1010:2809] 2024-11-21T17:52:11.788864Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T17:52:11.788869Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T17:52:11.788972Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1011:2810], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 139 DurationUs: 2000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 100 FinishTimeMs: 1732211531788 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 86 BuildCpuTimeUs: 14 WaitInputTimeUs: 1345 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211531786 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:11.788979Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1011:2810] 2024-11-21T17:52:11.788985Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1012:2811], 2024-11-21T17:52:11.788990Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1012:2811], 2024-11-21T17:52:11.789028Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1012:2811], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 59 DurationUs: 2000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 29 FinishTimeMs: 1732211531788 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 16 BuildCpuTimeUs: 13 WaitInputTimeUs: 1570 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211531786 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:11.789033Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1012:2811] 2024-11-21T17:52:11.789039Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], 2024-11-21T17:52:11.789043Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1013:2812], 2024-11-21T17:52:11.789064Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1013:2812], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 97 DurationUs: 2000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 51 FinishTimeMs: 1732211531788 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 30 BuildCpuTimeUs: 21 WaitInputTimeUs: 1651 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211531786 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:11.789068Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1013:2812] 2024-11-21T17:52:11.789108Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:52:11.789119Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmqytbhh6nem83aafaakt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ2NDMyMjEtZTQ5MTYxY2MtNDJhYTJiODAtODJmYzdlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000919s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] >> DataShardTxOrder::ZigZag_oo8_dirty >> DataShardTxOrder::RandomDotRanges_DelayRS >> DataShardTxOrder::ForceOnlineBetweenOnline >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup+EvWrite [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail-StreamLookup [GOOD] Test command err: 2024-11-21T17:52:06.619695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:06.620033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:06.620047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000abf/r3tmp/tmpVCEfNt/pdisk_1.dat 2024-11-21T17:52:06.711552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:06.726796Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:06.768656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:06.768702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:06.779154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:06.882212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:06.895292Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:06.895429Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:06.895485Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:52:06.895517Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:06.900577Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:06.900685Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:06.900699Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:06.900801Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:06.900818Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:06.900823Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:06.900850Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:06.903181Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:06.903221Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:06.903244Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:52:06.903247Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:06.903250Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:06.903253Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:06.903338Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.903343Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:06.903425Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:06.903436Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:06.903444Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.903447Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:06.903450Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:52:06.903455Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:06.903458Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:06.903462Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:52:06.903465Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:06.903467Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:06.903470Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:52:06.903473Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:06.903487Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:52:06.903490Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:06.903501Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:06.903527Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:52:06.903533Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:52:06.903544Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:52:06.903548Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:06.903550Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:52:06.903553Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:52:06.903555Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:06.903580Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:06.903583Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:52:06.903585Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:06.903587Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:06.903592Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:52:06.903594Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:06.903596Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:52:06.903597Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:06.903600Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:06.903749Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:52:06.903754Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:52:06.913950Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:06.913968Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:06.913972Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:06.913981Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:52:06.913992Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:07.087551Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:07.087567Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:07.087573Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:52:07.087585Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:52:07.087588Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:07.087616Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:07.087622Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:52:07.087625Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:52:07.087628Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:52:07.088148Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:52:07.088156Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:07.088246Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:07.088253Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:07.088259Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:07.088266Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:07.088270Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:52:07.088278Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 74976715671. Resolved key sets: 1 2024-11-21T17:52:11.957968Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T17:52:11.957985Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2024-11-21T17:52:11.958029Z node 2 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T17:52:11.958071Z node 2 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715671. Shard resolve complete, resolved shards: 1 2024-11-21T17:52:11.958082Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2024-11-21T17:52:11.958090Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 2: [72075186224037888] 2024-11-21T17:52:11.958100Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:11.958107Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T17:52:11.958180Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T17:52:11.958193Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1206:2961], 2024-11-21T17:52:11.958200Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1206:2961], 2024-11-21T17:52:11.958205Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T17:52:11.958319Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1206:2961], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2024-11-21T17:52:11.958326Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1206:2961], 2024-11-21T17:52:11.958331Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1206:2961], 2024-11-21T17:52:11.958427Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1208:2961], Recipient [2:1130:2913]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2024-11-21T17:52:11.958452Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:52:11.958462Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v4001/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v2000/18446744073709551615 2024-11-21T17:52:11.958467Z node 2 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2024-11-21T17:52:11.958476Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2024-11-21T17:52:11.958493Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T17:52:11.958498Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:52:11.958505Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:11.958509Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:52:11.958520Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2024-11-21T17:52:11.958525Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T17:52:11.958529Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:11.958532Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:52:11.958537Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:52:11.958552Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2024-11-21T17:52:11.958587Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1208:2961], 0} after executionsCount# 1 2024-11-21T17:52:11.958595Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1208:2961], 0} sends rowCount# 1, bytes# 24, quota rows left# 32766, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:52:11.958609Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1208:2961], 0} finished in read 2024-11-21T17:52:11.958618Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T17:52:11.958621Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:52:11.958625Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:52:11.958629Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:52:11.958636Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T17:52:11.958640Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:52:11.958644Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2024-11-21T17:52:11.958649Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:52:11.958777Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1208:2961], Recipient [2:1130:2913]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:52:11.958786Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T17:52:11.958956Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1206:2961], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 185 Tasks { TaskId: 1 CpuTimeUs: 39 FinishTimeMs: 1732211531958 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 12 BuildCpuTimeUs: 27 WaitInputTimeUs: 349 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211531958 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:11.958968Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1206:2961] 2024-11-21T17:52:11.959004Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:52:11.959013Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1202:2961] TxId: 281474976715671. Ctx: { TraceId: 01jd7xmr5afasa8jsfnr98d07p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhlZDFjZWItZWQwNGJhMzItM2U3ODkzNjEtM2QxMDdjMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000185s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 7 } items { uint32_value: 4 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] Test command err: 2024-11-21T17:52:08.593612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:08.594208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:08.594239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aaf/r3tmp/tmpAhzgb5/pdisk_1.dat 2024-11-21T17:52:08.687883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.702991Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:08.745094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:08.745126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:08.755581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:08.858423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:09.066760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2024-11-21T17:52:09.317088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:791:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:09.317109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:09.317117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:09.317719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:52:09.492763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:805:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:52:09.553534Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xmnk407p2rm62n7sn0rj0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwMDg1ZjYtMTliMDRiOWQtM2RiNTA3ZGQtNzBlMDA2NDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:09.571494Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xmntx52x68x9trq1f59pd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVmMTMyMjktYTc4YzQyNWQtYTljYTkyODMtYzI0ZTg1ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2024-11-21T17:52:09.618059Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xmnvg2p93r8hd35m1jaep, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTk1ZWVjMGYtMWUwZWQyNjQtZDdhYWY5MGItNjU0YmNlNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2024-11-21T17:52:09.629529Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xmnwme4ekcfvkja7pfp9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTk1ZWVjMGYtMWUwZWQyNjQtZDdhYWY5MGItNjU0YmNlNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2024-11-21T17:52:09.926463Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmp5s902f1gtaqmgqz9we, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjdhNzkyZjAtYzNiM2Q1M2MtMThmZjg5MjctODkxZTQ1M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2024-11-21T17:52:10.415030Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:10.415065Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:52:10.415082Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aaf/r3tmp/tmptL4UJf/pdisk_1.dat 2024-11-21T17:52:10.488448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:10.503190Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:10.545243Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:10.545281Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:10.555837Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:10.659388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:10.865465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:11.118749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:11.118777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:801:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:11.118786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:11.119651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:52:11.295746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:804:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:52:11.337944Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xmqbe2n3a406r8w3q1nzz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmY1YjhlN2ItYTM0Yzc3NWMtYzgwNWJjOTctNzEzYmUxZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:11.356530Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xmqjna233b1fc78n385n6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzA1ZDkyYjctNjI1ZWIzMzYtNzZiZjBlNjgtYTYzMmE3Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2024-11-21T17:52:11.955266Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmr5a62fccdwafkc0wftw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjllMjQ1MjItMmFkMDE3ODQtZjU5ZWM4MDktOWNkMjIzZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:11.964896Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmr5mf20rphxz7rytvymh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmY1Yjg2OGUtY2M4ODc1YzctZDQ0NmZjNTgtNjQwNzZkMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 >> YdbQueryService::TestCreateAndAttachSession >> YdbYqlClient::CheckDefaultTableSettings2 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings3 >> DataShardTxOrder::ReadWriteReorder [GOOD] >> DataShardOutOfOrder::TestReadTableWriteConflict >> Yq_1::DescribeJob >> Yq_1::ModifyConnections >> Yq_1::Basic_Null ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T17:52:09.459911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:09.460348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:09.460368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aab/r3tmp/tmp4jXkLZ/pdisk_1.dat 2024-11-21T17:52:09.554635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:09.573782Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:09.615904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:09.615930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:09.626349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:09.729559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:09.743088Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:09.743324Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:09.743399Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:52:09.743453Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:09.749244Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:09.749379Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:09.749399Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:09.749498Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:09.749513Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:09.749517Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:09.749547Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:09.752212Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:09.752279Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:09.752305Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:52:09.752308Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:09.752311Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:09.752314Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:09.752422Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:09.752426Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:09.752528Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:09.752541Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:09.752550Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:09.752553Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:09.752557Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:52:09.752564Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:09.752569Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:09.752574Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:52:09.752579Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:09.752582Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:09.752587Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:52:09.752592Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:09.752610Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:52:09.752613Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:09.752628Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:09.752658Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:52:09.752665Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:52:09.752675Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:52:09.752679Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:09.752681Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:52:09.752684Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:52:09.752687Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:09.752718Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:09.752721Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:52:09.752723Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:09.752725Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:09.752732Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:52:09.752734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:09.752736Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:52:09.752738Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:09.752741Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:09.752933Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:52:09.752940Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:52:09.763183Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:09.763207Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:09.763215Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:09.763225Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:52:09.763234Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:09.937402Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:09.937421Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:09.937429Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:52:09.937446Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:52:09.937450Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:09.937474Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:09.937482Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:52:09.937486Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:52:09.937491Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:52:09.938212Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:52:09.938228Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:09.938342Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:09.938348Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:09.938353Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:09.938360Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:09.938363Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:52:09.938371Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 7. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1046:2841], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 58 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 11 FinishTimeMs: 1732211532274 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 4 BuildCpuTimeUs: 7 WaitInputTimeUs: 781 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211532273 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:12.274913Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1046:2841] 2024-11-21T17:52:12.274918Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T17:52:12.274922Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T17:52:12.274985Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1047:2842], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 117 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 82 FinishTimeMs: 1732211532274 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 74 BuildCpuTimeUs: 8 WaitInputTimeUs: 752 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211532273 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:12.274991Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1047:2842] 2024-11-21T17:52:12.274997Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T17:52:12.275002Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T17:52:12.275050Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1048:2843], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 146 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 119 FinishTimeMs: 1732211532274 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 114 BuildCpuTimeUs: 5 WaitInputTimeUs: 675 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211532273 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:12.275054Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1048:2843] 2024-11-21T17:52:12.275057Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1049:2844], 2024-11-21T17:52:12.275060Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1049:2844], 2024-11-21T17:52:12.275113Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1049:2844], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 148 DurationUs: 2000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 119 FinishTimeMs: 1732211532275 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 107 BuildCpuTimeUs: 12 WaitInputTimeUs: 856 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211532273 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:12.275119Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1049:2844] 2024-11-21T17:52:12.275124Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], 2024-11-21T17:52:12.275128Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], 2024-11-21T17:52:12.275163Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2845], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 64 DurationUs: 2000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 35 FinishTimeMs: 1732211532275 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 23 BuildCpuTimeUs: 12 WaitInputTimeUs: 1194 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211532273 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:12.275168Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2845] 2024-11-21T17:52:12.275173Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1051:2846], 2024-11-21T17:52:12.275177Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1051:2846], 2024-11-21T17:52:12.275192Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2846], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 79 DurationUs: 2000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 48 FinishTimeMs: 1732211532275 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 31 BuildCpuTimeUs: 17 WaitInputTimeUs: 1272 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211532273 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:12.275195Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2846] 2024-11-21T17:52:12.275233Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:52:12.275242Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd7xmrea3m855e34e2j6sk71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGRmOWZiZDctODAxZWJiNjQtOTk5YjZiZWItZDIxMmMyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000814s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2024-11-21T17:52:12.174090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:12.174110Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:12.174124Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:12.176493Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:12.176622Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:12.176674Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:12.177249Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:12.182851Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:12.182952Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:12.183072Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:12.183084Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:12.183090Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:12.183117Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:12.185668Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:12.185727Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:12.185774Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:12.185779Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:12.185782Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:12.185786Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:12.185849Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.185854Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.185871Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:12.185884Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:12.185919Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:12.185924Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:12.185928Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:12.185932Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:12.185935Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:12.185938Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:12.185942Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:12.191772Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.191786Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.191792Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:12.192087Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:12.192097Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:12.192114Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:12.192134Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:12.192140Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:12.192146Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:12.192150Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:12.192152Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:12.192155Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:12.192158Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:12.192201Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:12.192203Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:12.192205Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:12.192207Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:12.192214Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:12.192216Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:12.192218Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:12.192220Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:12.192223Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:12.213162Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:12.213183Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:12.213190Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:12.213202Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:12.213215Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:12.213325Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.213333Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.213341Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:52:12.213360Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:12.213365Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:12.213404Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:12.213413Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.213418Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:12.213423Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:12.214125Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:12.214138Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:12.214186Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.214192Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.214199Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:12.214206Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:12.214211Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:12.214219Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:12.214224Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:12.214231Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.214235Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:12.214239Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:12.214244Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:12.214286Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:12.214291Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.214295Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:12.214300Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:12.214303Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:12.214314Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:12.214317Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:12.214321Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:12.214324Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:12.214335Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:12.214339Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:12.214342Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:12.214348Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.214351Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:12.214355Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeSna ... WaitInRS 2024-11-21T17:52:12.677477Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2024-11-21T17:52:12.677478Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit LoadAndWaitInRS 2024-11-21T17:52:12.677480Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit ExecuteDataTx 2024-11-21T17:52:12.677482Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit ExecuteDataTx 2024-11-21T17:52:12.677525Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437185 with status COMPLETE 2024-11-21T17:52:12.677531Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:52:12.677536Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2024-11-21T17:52:12.677538Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit ExecuteDataTx 2024-11-21T17:52:12.677540Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompleteOperation 2024-11-21T17:52:12.677542Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompleteOperation 2024-11-21T17:52:12.677566Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is DelayComplete 2024-11-21T17:52:12.677568Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompleteOperation 2024-11-21T17:52:12.677570Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompletedOperations 2024-11-21T17:52:12.677572Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompletedOperations 2024-11-21T17:52:12.677575Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2024-11-21T17:52:12.677576Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompletedOperations 2024-11-21T17:52:12.677579Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437185 has finished 2024-11-21T17:52:12.677581Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:12.677582Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T17:52:12.677584Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T17:52:12.677586Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T17:52:12.677608Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:227:2222], Recipient [1:227:2222]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.677615Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.677620Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:12.677624Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:12.677626Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:12.677628Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:12] in PlanQueue unit at 9437184 2024-11-21T17:52:12.677630Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PlanQueue 2024-11-21T17:52:12.677634Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T17:52:12.677636Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:12.677637Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:12.677639Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:12.677712Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:12 keys extracted: 3 2024-11-21T17:52:12.677716Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T17:52:12.677719Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:12.677722Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit FinalizeDataTxPlan 2024-11-21T17:52:12.677741Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit FinalizeDataTxPlan 2024-11-21T17:52:12.677746Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T17:52:12.677748Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit FinalizeDataTxPlan 2024-11-21T17:52:12.677752Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:12.677755Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:12.677762Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically complete end at 9437184 2024-11-21T17:52:12.677764Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically incomplete end at 9437184 2024-11-21T17:52:12.677767Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:12] at 9437184 2024-11-21T17:52:12.677771Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T17:52:12.677774Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:12.677777Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildDataTxOutRS 2024-11-21T17:52:12.677780Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildDataTxOutRS 2024-11-21T17:52:12.677785Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T17:52:12.677788Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildDataTxOutRS 2024-11-21T17:52:12.677791Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit StoreAndSendOutRS 2024-11-21T17:52:12.677794Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit StoreAndSendOutRS 2024-11-21T17:52:12.677797Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T17:52:12.677800Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit StoreAndSendOutRS 2024-11-21T17:52:12.677804Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit PrepareDataTxInRS 2024-11-21T17:52:12.677808Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PrepareDataTxInRS 2024-11-21T17:52:12.677811Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T17:52:12.677814Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T17:52:12.677817Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T17:52:12.677820Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadAndWaitInRS 2024-11-21T17:52:12.677823Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T17:52:12.677826Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T17:52:12.677829Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:52:12.677832Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit ExecuteDataTx 2024-11-21T17:52:12.677886Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437184 with status COMPLETE 2024-11-21T17:52:12.677891Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437184: {NSelectRow: 3, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:52:12.677895Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T17:52:12.677897Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:52:12.677899Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompleteOperation 2024-11-21T17:52:12.677902Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompleteOperation 2024-11-21T17:52:12.677924Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is DelayComplete 2024-11-21T17:52:12.677926Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompleteOperation 2024-11-21T17:52:12.677928Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompletedOperations 2024-11-21T17:52:12.677930Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompletedOperations 2024-11-21T17:52:12.677932Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2024-11-21T17:52:12.677934Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompletedOperations 2024-11-21T17:52:12.677936Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437184 has finished 2024-11-21T17:52:12.677938Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:12.677940Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:12.677942Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:12.677943Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:12.689170Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2024-11-21T17:52:12.689193Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2024-11-21T17:52:12.689209Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:12.689216Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2024-11-21T17:52:12.689234Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:52:12.689243Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:12.689413Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2024-11-21T17:52:12.689427Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2024-11-21T17:52:12.689437Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T17:52:12.689445Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2024-11-21T17:52:12.689456Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T17:52:12.689464Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> DataShardOutOfOrder::TestImmediateQueueThenSplit [GOOD] |83.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop >> Yq_1::DescribeConnection |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |83.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate-StreamLookup [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup+EvWrite >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] >> ResultFormatter::Optional [GOOD] >> ResultFormatter::Pg >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] >> ResultFormatter::EmptyDict [GOOD] >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] >> ResultFormatter::Pg [GOOD] >> YdbQueryService::TestCreateAndAttachSession [GOOD] >> YdbQueryService::TestAttachTwice |83.7%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] |83.7%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.7%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] Test command err: 2024-11-21T17:52:07.983083Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791860494490272:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:07.983204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011f9/r3tmp/tmp2H6I6U/pdisk_1.dat 2024-11-21T17:52:08.033316Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19738, node 1 2024-11-21T17:52:08.050583Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:08.050604Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:08.050606Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:08.050641Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18979 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:08.082899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:08.082944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:08.084447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:08.113448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.114538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:08.114561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.115282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:08.115350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:08.115359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:08.115731Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:08.115743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:08.115811Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:08.116047Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.116899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211528166, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:08.116909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:08.116991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:08.117411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:08.117486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:08.117498Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:08.117514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:08.117524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:08.117538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:08.117998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:08.118020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:08.118026Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:08.118041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:18979 2024-11-21T17:52:08.145521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.145638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:08.145646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.146021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2024-11-21T17:52:08.147571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211528194, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:08.147900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:52:08.147916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T17:52:08.148062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:52:08.149071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.149241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:08.149254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.149670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2024-11-21T17:52:08.650601Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439791863799101212:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:08.650945Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:52:08.652392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:08.652414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:08.653208Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:52:08.653458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:08.665486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.665755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:08.665765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:52:08.665812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex, operationId: 281474976715660:1, transaction: WorkingDir: "/Root/ydb_ut_tenant/Table-1" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "MyIndex" KeyColumnNames: "Value" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2024-11-21T17:52:08.665839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:08.665846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex/indexImplTable, opId: 281474976715660:2, at schemeshard: 72057594046644480 2024-11-21T17:52:08.665921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:08.666642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/ydb_ut_tenant/Table-1 2024-11-21T17:52:09.118923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211529167, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:09.119362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:52:09.119392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T17:52:09.119511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at sche ... CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:12.850596Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:12.850647Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:12.852702Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:12.853645Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:12.853763Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:12.853770Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:12.854135Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:12.854181Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:12.854188Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:12.854441Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:12.854451Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:12.854675Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:12.854694Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:12.855316Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211532898, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:12.855330Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:12.855375Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:12.855681Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:12.855719Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:12.855730Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:12.855740Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:12.855749Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:12.855757Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:12.855912Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:12.855924Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:12.855927Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:12.855934Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:52:13.021661Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.021805Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:52:13.021965Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:13.021978Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.022865Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:52:13.022924Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:13.022983Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:13.023006Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:52:13.023201Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:52:13.023273Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:13.023285Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:13.023290Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:13.023343Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:13.023355Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:13.023357Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:52:13.025332Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:52:13.025363Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:52:13.025746Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:52:13.077970Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:52:13.077984Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:52:13.078011Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:52:13.078673Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:52:13.079871Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211533129, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:13.079887Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211533129 2024-11-21T17:52:13.079928Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:52:13.080440Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:13.080543Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:13.080561Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:52:13.080895Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:13.080920Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:13.080924Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:52:13.080961Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:13.080967Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:13.080968Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:52:13.081123Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732211533129 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 353 } } 2024-11-21T17:52:13.081255Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:52:13.081267Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.081273Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T17:52:13.081652Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:52:13.081669Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:52:13.081682Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] >> DataShardOutOfOrder::UncommittedReads ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2024-11-21T17:52:10.916598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:10.916618Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:10.916638Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:10.919265Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:10.919375Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:10.919421Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:10.920017Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:10.926772Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:10.926892Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:10.927020Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:10.927035Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:10.927041Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:10.927073Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:10.929742Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:10.929780Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:10.929818Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:10.929821Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:10.929824Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:10.929827Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:10.929881Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:10.929885Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:10.929898Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:10.929908Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:10.929945Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:10.929950Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:10.929955Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:10.929958Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:10.929960Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:10.929963Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:10.929966Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:10.935887Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:10.935902Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:10.935909Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:10.936280Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:10.936294Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:10.936310Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:10.936338Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:10.936347Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:10.936354Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:10.936360Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:10.936364Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:10.936369Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:10.936372Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:10.936430Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:10.936435Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:10.936439Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:10.936442Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:10.936451Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:10.936454Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:10.936458Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:10.936461Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:10.936465Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:10.957444Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:10.957467Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:10.957473Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:10.957483Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:10.957495Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:10.957605Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:10.957611Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:10.957617Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:52:10.957647Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:10.957652Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:10.957691Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:10.957699Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:10.957704Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:10.957708Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:10.958351Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:10.958377Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:10.958417Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:10.958421Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:10.958427Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:10.958434Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:10.958438Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:10.958445Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:10.958449Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:10.958455Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:10.958459Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:10.958463Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:10.958466Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:10.958504Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:10.958508Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:10.958511Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:10.958515Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:10.958518Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:10.958527Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:10.958531Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:10.958534Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:10.958537Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:10.958547Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:10.958550Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:10.958553Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:10.958558Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:10.958561Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:10.958564Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... d event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T17:52:13.321264Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:13.321267Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T17:52:13.321288Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:13.321293Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 98 Flags# 0} 2024-11-21T17:52:13.321301Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:13.321305Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 99 Flags# 0} 2024-11-21T17:52:13.321310Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:13.321314Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 100 Flags# 0} 2024-11-21T17:52:13.321319Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:13.321323Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2024-11-21T17:52:13.321332Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T17:52:13.321338Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T17:52:13.321341Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:13.321366Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:13.321371Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2024-11-21T17:52:13.321377Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:13.321381Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:13.321405Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:13.321409Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit StoreAndSendOutRS 2024-11-21T17:52:13.321419Z node 1 :TX_DATASHARD DEBUG: Send RS 50 at 9437184 from 9437184 to 9437186 txId 152 2024-11-21T17:52:13.321428Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:13.321432Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2024-11-21T17:52:13.321438Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:13.321442Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:13.321462Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:13.321466Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2024-11-21T17:52:13.321472Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:13.321476Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:13.321537Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T17:52:13.321542Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:13.321546Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T17:52:13.321560Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T17:52:13.321563Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:13.321566Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T17:52:13.321573Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:227:2222], Recipient [1:433:2383]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2024-11-21T17:52:13.321577Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T17:52:13.321581Z node 1 :TX_DATASHARD DEBUG: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2024-11-21T17:52:13.321592Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2024-11-21T17:52:13.321603Z node 1 :TX_DATASHARD TRACE: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2024-11-21T17:52:13.321617Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T17:52:13.321630Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T17:52:13.321633Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:13.321636Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2024-11-21T17:52:13.321648Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:433:2383], Recipient [1:433:2383]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:13.321652Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:13.321659Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2024-11-21T17:52:13.321664Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:52:13.321671Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2024-11-21T17:52:13.321676Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2024-11-21T17:52:13.321682Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2024-11-21T17:52:13.321686Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2024-11-21T17:52:13.321689Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2024-11-21T17:52:13.321693Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2024-11-21T17:52:13.321873Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2024-11-21T17:52:13.321886Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:52:13.321897Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2024-11-21T17:52:13.321901Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2024-11-21T17:52:13.321904Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2024-11-21T17:52:13.321908Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T17:52:13.321975Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2024-11-21T17:52:13.321980Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2024-11-21T17:52:13.321984Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2024-11-21T17:52:13.321987Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2024-11-21T17:52:13.321992Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2024-11-21T17:52:13.321995Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2024-11-21T17:52:13.321999Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2024-11-21T17:52:13.322003Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:13.322006Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2024-11-21T17:52:13.322010Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2024-11-21T17:52:13.322013Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2024-11-21T17:52:13.322077Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T17:52:13.322081Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:13.322085Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2024-11-21T17:52:13.333959Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:52:13.333985Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T17:52:13.334010Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:13.334026Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T17:52:13.334033Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:52:13.334103Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T17:52:13.334109Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:13.334115Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> ResultFormatter::List [GOOD] >> ResultFormatter::Null [GOOD] |83.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] >> YdbQueryService::TestAttachTwice [GOOD] >> YdbQueryService::TestForbidExecuteWithoutAttach |83.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop [GOOD] >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> Yq_1::ModifyConnections [GOOD] >> Yq_1::ModifyQuery |83.8%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup+EvWrite [GOOD] >> ReadSessionImplTest::UsesOnRetryStateDuringRetries >> BasicStatistics::TwoServerlessDbs >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> TPersQueueTest::DirectReadNotCached [GOOD] >> TPersQueueTest::DirectReadBadCases >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop [GOOD] Test command err: 2024-11-21T17:52:12.034412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:12.034989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:12.035023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aa2/r3tmp/tmpsCQ7tZ/pdisk_1.dat 2024-11-21T17:52:12.130690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:12.147505Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:12.189493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:12.189516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:12.200019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:12.303445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:12.317019Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:12.317284Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:12.317372Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T17:52:12.317470Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:12.322816Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:12.322950Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:12.322970Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:12.323061Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:12.323074Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:12.323079Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:12.323106Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:12.326134Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:12.326182Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:12.326206Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T17:52:12.326209Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:12.326213Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:12.326216Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:12.326297Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.326301Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.326384Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:12.326393Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:12.326402Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.326405Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.326410Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T17:52:12.326414Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:12.326417Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:12.326421Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:52:12.326424Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:12.326427Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:12.326430Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:52:12.326433Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:12.326447Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T17:52:12.326450Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:12.326463Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:12.326495Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:52:12.326501Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:52:12.326511Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:52:12.326515Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:12.326517Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:52:12.326520Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:52:12.326523Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:12.326551Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:12.326554Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:52:12.326557Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:12.326561Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:12.326568Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:52:12.326571Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:12.326575Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:52:12.326578Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:12.326582Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:12.326800Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:52:12.326806Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:52:12.337056Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:12.337080Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:12.337085Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:12.337092Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T17:52:12.337105Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:12.510891Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.510913Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.510919Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T17:52:12.510932Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T17:52:12.510935Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:12.510962Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:12.510969Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:52:12.510972Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:52:12.510975Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:52:12.511485Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:52:12.511492Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:12.511580Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.511586Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.511592Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:12.511599Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:12.511604Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T17:52:12.511612Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... TATE_FAILURE }; 2024-11-21T17:52:14.694320Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 1970-01-01T00:00:04.000000Z, after 1.550000s 2024-11-21T17:52:14.694385Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:846:2656] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmtbf57eaeqkzqjq2je9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:851:2683], task: 1, state: COMPUTE_STATE_FAILURE, stats: { CpuTimeUs: 53595 Tasks { TaskId: 1 CpuTimeUs: 52897 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 8 BuildCpuTimeUs: 52889 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211534388 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:14.694396Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715662. Ctx: { TraceId: 01jd7xmtbf57eaeqkzqjq2je9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:851:2683] 2024-11-21T17:52:14.694428Z node 2 :KQP_EXECUTER INFO: ActorId: [2:846:2656] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmtbf57eaeqkzqjq2je9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2024-11-21T17:52:14.694435Z node 2 :KQP_EXECUTER INFO: ActorId: [2:846:2656] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmtbf57eaeqkzqjq2je9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:852:2684], task: 2 2024-11-21T17:52:14.694758Z node 2 :KQP_EXECUTER INFO: ActorId: [2:846:2656] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmtbf57eaeqkzqjq2je9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 54264 DurationUs: 1732211532744011 Tables { TablePath: "/Root/table-1" } ExecuterCpuTimeUs: 669 StartTimeMs: 1950 FinishTimeMs: 1732211534694 Stages { StageGuid: "8a442f4d-8c7eb549-3bf07840-98cdece7" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (KqpTable \'\"/Root/table-1\" \'\"72057594046644480:2\" \'\"\" \'1))\n (let $2 (KqpWideReadTableRanges $1 (Void) \'(\'\"value2\") \'() \'()))\n (let $3 (DataType \'Uint64))\n (let $4 (lambda \'($9) (Just (SafeCast $9 $3))))\n (let $5 (Nothing (OptionalType $3)))\n (let $6 (Condense1 (NarrowMap $2 (lambda \'($7) (AsStruct \'(\'\"value2\" $7)))) (lambda \'($8) (IfPresent (Member $8 \'\"value2\") $4 $5)) (lambda \'($10 $11) (Bool \'false)) (lambda \'($12 $13) (block \'(\n (let $14 (IfPresent (Member $12 \'\"value2\") $4 $5))\n (return (AggrAdd $14 $13))\n )))))\n (return (FromFlow (ExpandMap $6 (lambda \'($15) $15))))\n))))\n)\n" ComputeActors { CpuTimeUs: 53595 Tasks { TaskId: 1 CpuTimeUs: 52897 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 8 BuildCpuTimeUs: 52889 HostName: "ghrun-2rqap2spfm" NodeId: 2 StartTimeMs: 1732211534388 } MaxMemoryUsage: 1048576 } } Stages { StageId: 1 StageGuid: "8c7ac020-d00d5999-8af8d82f-d40c383" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (Bool \'false))\n (let $3 (WideCondense1 (ToFlow $1) (lambda \'($5) $5) (lambda \'($6 $7) $2) (lambda \'($8 $9) (AggrAdd $8 $9))))\n (let $4 (Condense (NarrowMap (Take $3 (Uint64 \'1)) (lambda \'($10) (AsStruct \'(\'Sum0 $10)))) (Nothing (OptionalType (StructType \'(\'Sum0 (OptionalType (DataType \'Uint64)))))) (lambda \'($11 $12) $2) (lambda \'($13 $14) (Just $13))))\n (return (FromFlow (Map $4 (lambda \'($15) (AsList (AsStruct \'(\'\"column0\" (Member $15 \'Sum0))))))))\n))))\n)\n" } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"Precompute_0\",\"Parent Relationship\":\"InitPlan\",\"PlanNodeId\":4,\"PlanNodeType\":\"Materialize\",\"Plans\":[{\"Node Type\":\"Aggregate-Limit\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Aggregate\"},{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"1\",\"Name\":\"Limit\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Aggregate-TableFullScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Aggregate\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"ReadColumns\":[\"value2\"],\"ReadRanges\":[\"key (-∞, +∞)\"],\"Scan\":\"Parallel\",\"Table\":\"table-1\"}],\"PlanNodeId\":1,\"StageGuid\":\"8a442f4d-8c7eb549-3bf07840-98cdece7\",\"Stats\":{\"ComputeNodes\":[{\"CpuTimeUs\":53595,\"Tasks\":[{\"ComputeTimeUs\":8,\"Host\":\"ghrun-2rqap2spfm\",\"NodeId\":2,\"StartTimeMs\":1732211534388,\"TaskId\":1}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"table-1\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"8c7ac020-d00d5999-8af8d82f-d40c383\",\"Stats\":{\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\",\"Subplan Name\":\"CTE precompute_0_0\"}],\"StageGuid\":\"\"}" Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\016\010\333\242\003\020\333\242\003\030\333\242\003 \001" } } 2024-11-21T17:52:14.694772Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:846:2656] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmtbf57eaeqkzqjq2je9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:52:14.694793Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:852:2684], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=. TraceId : 01jd7xmtbf57eaeqkzqjq2je9n. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646735 2024-11-21T17:52:14.694805Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:852:2684], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=. TraceId : 01jd7xmtbf57eaeqkzqjq2je9n. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Handle abort execution event from: [2:846:2656], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2024-11-21T17:52:14.694843Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. pass away 2024-11-21T17:52:14.694858Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2024-11-21T17:52:14.695737Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 2. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T17:52:14.695803Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, ActorId: [2:819:2656], ActorState: ExecuteState, TraceId: 01jd7xmtbf57eaeqkzqjq2je9n, Create QueryResponse for error on request, msg: 2024-11-21T17:52:14.695923Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T17:52:14.695934Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] TxId# 281474976715664 ProcessProposeKqpTransaction 2024-11-21T17:52:14.696030Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2024-11-21T17:52:14.696047Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Handle TEvProposeTransaction 2024-11-21T17:52:14.696053Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] TxId# 0 ProcessProposeTransaction 2024-11-21T17:52:14.696071Z node 2 :TX_PROXY DEBUG: actor# [2:52:2099] Cookie# 0 userReqId# "" txid# 0 reqId# [2:888:2716] SnapshotReq marker# P0 2024-11-21T17:52:14.696178Z node 2 :TX_PROXY DEBUG: Actor# [2:890:2716] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2024-11-21T17:52:14.696226Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Resolved key sets: 0 2024-11-21T17:52:14.696256Z node 2 :TX_PROXY DEBUG: Actor# [2:890:2716] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2024-11-21T17:52:14.696287Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xmtbf57eaeqkzqjq2je9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:14.696296Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Ctx: { TraceId: 01jd7xmtbf57eaeqkzqjq2je9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T17:52:14.696307Z node 2 :KQP_EXECUTER INFO: ActorId: [2:887:2656] TxId: 281474976715664. Ctx: { TraceId: 01jd7xmtbf57eaeqkzqjq2je9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T17:52:14.696331Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:887:2656] TxId: 281474976715664. Ctx: { TraceId: 01jd7xmtbf57eaeqkzqjq2je9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:52:14.696342Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:887:2656] TxId: 281474976715664. Ctx: { TraceId: 01jd7xmtbf57eaeqkzqjq2je9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2024-11-21T17:52:14.696374Z node 2 :TX_PROXY DEBUG: Actor# [2:888:2716] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2024-11-21T17:52:14.696470Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:558:2485], selfId: [2:50:2097], source: [2:819:2656] 2024-11-21T17:52:14.696519Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:888:2716], Recipient [2:632:2537]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2024-11-21T17:52:14.696738Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZDUyOWZjZjMtZTAwOTEzODctZTVhOGEwZGMtMzcyNWIzYjQ=, workerId: [2:819:2656], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 69 |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] >> ColumnStatistics::CountMinSketchServerlessStatistics >> YdbQueryService::TestForbidExecuteWithoutAttach [GOOD] >> YdbQueryService::TestCreateDropAttachSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T17:52:12.113262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:12.113627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:12.113643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aa0/r3tmp/tmplMJdew/pdisk_1.dat 2024-11-21T17:52:12.206522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:12.222872Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:12.265305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:12.265336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:12.275811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:12.378972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:12.588782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:12.847150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:884:2699], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:12.847179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:894:2704], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:12.847189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:12.848000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:52:13.024527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:898:2707], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:52:13.081397Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xms1ebbgzy6p1wvfwpqa4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTJlOTdhOS00YzA0YmI0Ni1kOWNjZDgxZC0zNWViMDdlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:13.101576Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xms96cpnyesfyc2dtgd2x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ4ZTc4YWQtYzdiYjRiNTYtMjZhNDIyZjktOTBkNjgyN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:13.156283Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xms9t0mraxkd5ky2mt3nb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhNTQ5MjAtZGZjN2E5NTYtNmEwZjk3OTMtMjIxNmVkYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2024-11-21T17:52:13.170563Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xmsb7cgvejte0aerm4akt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhNTQ5MjAtZGZjN2E5NTYtNmEwZjk3OTMtMjIxNmVkYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2024-11-21T17:52:13.190494Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmsbyd0esv14ty370ww28, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVmMjNjNDEtY2JjZDhlNC02MzczZmRiNS0xYzc5OGM2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2024-11-21T17:52:13.191042Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1128:2804] TxId: 281474976715665. Ctx: { TraceId: 01jd7xmsbyd0esv14ty370ww28, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVmMjNjNDEtY2JjZDhlNC02MzczZmRiNS0xYzc5OGM2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting immediate tx 281474976715665 because datashard 72075186224037889 is restarting; 2024-11-21T17:52:13.192468Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGVmMjNjNDEtY2JjZDhlNC02MzczZmRiNS0xYzc5OGM2OQ==, ActorId: [1:1028:2804], ActorState: ExecuteState, TraceId: 01jd7xmsbyd0esv14ty370ww28, Create QueryResponse for error on request, msg: 2024-11-21T17:52:13.193182Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTBhNTQ5MjAtZGZjN2E5NTYtNmEwZjk3OTMtMjIxNmVkYzE=, ActorId: [1:1030:2806], ActorState: ExecuteState, TraceId: 01jd7xmsb7cgvejte0aerm4akt, Create QueryResponse for error on request, msg: 2024-11-21T17:52:13.193348Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xmsbyd0esv14ty370ww28, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVmMjNjNDEtY2JjZDhlNC02MzczZmRiNS0xYzc5OGM2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:13.203941Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmsb7cgvejte0aerm4akt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhNTQ5MjAtZGZjN2E5NTYtNmEwZjk3OTMtMjIxNmVkYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:13.357578Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd7xmsgyb1nxaafq7m5rwd4h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjU0MDVmN2EtNjlhMjdlMWQtZmRmZTQyYTYtNDY0YThiNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2024-11-21T17:52:13.717857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:13.717894Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:52:13.717917Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aa0/r3tmp/tmpSphYaP/pdisk_1.dat 2024-11-21T17:52:13.797067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.811222Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:13.853220Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:13.853249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:13.863823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:13.967568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:14.172752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2024-11-21T17:52:14.426693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:14.426717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:801:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:14.426729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:14.427610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:52:14.604094Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:804:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:52:14.644312Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xmtjt34f8t3v3vq792a2r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTBkNzQzZTktZmExZjc5ZjgtZWIxNDc5NWYtYjg1NTUyNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715661 TEvProposeTransaction 281474976715661 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 889 RawX2: 8589937235 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\ty\003\000\000\000\000\000\000\021S\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\00 ... 22\r\010\240\234\001\022\005\t\000\002\002\002\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\001\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\026\n\022CustomerSuppliedId\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=NTBkNzQzZTktZmExZjc5ZjgtZWIxNDc5NWYtYjg1NTUyNGI=\222\001\021\n\006PoolId\022\007default\222\001%\n\007TraceId\022\03201jd7xmtjt34f8t3v3vq792a2r\222\001\026\n\022CurrentExecutionId\022\000\222\001\014\n\010Database\022\000\230\001\000\"\n\010\245\243\022\020\0020\000@\n" TxId: 281474976715661 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715661 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715661 OrderId: 281474976715661 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 13 ActorId { RawX1: 630 RawX2: 8589937128 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 55 } } 2024-11-21T17:52:14.664990Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xmtt09h1khfv4z9sehpqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODVjMGQ5Yy04YzNiYjQ0Ni0yZWE1ODVlYi03YjI3NzNiOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715662 TEvProposeTransaction 281474976715662 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 911 RawX2: 8589937308 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\217\003\000\000\000\000\000\000\021\234\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\004\002\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\002\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001%\n\007TraceId\022\03201jd7xmtt09h1khfv4z9sehpqa\222\001\021\n\006PoolId\022\007default\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CurrentExecutionId\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=ODVjMGQ5Yy04YzNiYjQ0Ni0yZWE1ODVlYi03YjI3NzNiOQ==\222\001\014\n\010Database\022\000\222\001\026\n\022CustomerSuppliedId\022\000\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715662 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715662 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715662 OrderId: 281474976715662 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 13 ActorId { RawX1: 718 RawX2: 8589937190 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 62 } } ===== Begin SELECT 2024-11-21T17:52:14.719325Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xmttndsnvcjf0v5fx88r2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjBiYjEzNTgtNWI4NDRkZTMtNzdlMGI0YTUtODQ2MjI5MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2024-11-21T17:52:14.732410Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xmtw19xbm83sy05904024, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjBiYjEzNTgtNWI4NDRkZTMtNzdlMGI0YTUtODQ2MjI5MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937330 } TxBody: " \0008\000`\200\200\200\005j\365\006\010\001\022\223\006\010\001\022\024\n\022\t\321\003\000\000\000\000\000\000\021\262\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_1\022\r\010\240\234\001\022\005\t\000\002\006\004\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\003\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\014\n\010Database\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=ZjBiYjEzNTgtNWI4NDRkZTMtNzdlMGI0YTUtODQ2MjI5MjE=\222\001\023\n\nDatabaseId\022\005/Root\222\001\021\n\006PoolId\022\007default\222\001%\n\007TraceId\022\03201jd7xmtw19xbm83sy05904024\222\001\026\n\022CustomerSuppliedId\022\000\222\001\026\n\022CurrentExecutionId\022\000\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937330 } TxBody: " \0008\000`\200\200\200\005j\367\006\010\001\022\225\006\010\002\022\024\n\022\t\321\003\000\000\000\000\000\000\021\262\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\010\004\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\004\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004H\001\200\001\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=ZjBiYjEzNTgtNWI4NDRkZTMtNzdlMGI0YTUtODQ2MjI5MjE=\222\001\021\n\006PoolId\022\007default\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CustomerSuppliedId\022\000\222\001%\n\007TraceId\022\03201jd7xmtw19xbm83sy05904024\222\001\014\n\010Database\022\000\222\001\026\n\022CurrentExecutionId\022\000\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\001\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0038\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037888 TxId: 281474976715664 MinStep: 2021 MaxStep: 32021 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 15 ActorId { RawX1: 630 RawX2: 8589937128 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 72 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037889 TxId: 281474976715664 MinStep: 2021 MaxStep: 32021 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 14 ActorId { RawX1: 718 RawX2: 8589937190 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 21 } } ... captured readset ... captured readset ===== restarting tablet EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 138 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 236 } } ===== Waiting for commit response ===== Last SELECT 2024-11-21T17:52:15.030467Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmv58cxfbbp4f4cgnvbpq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDc5Mzk4NjUtODdiMDRhN2ItYWIzYjU2MzEtZGY5OTdhZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnManyTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:50:03.075679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:50:03.075706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.075710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:50:03.075715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:50:03.075722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:50:03.075726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:50:03.075743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:50:03.075844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:50:03.087086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:50:03.087113Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.089594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:50:03.089713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:50:03.089745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:50:03.092772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:50:03.092883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:50:03.092994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.093245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.094062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.094353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.094362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.094373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:50:03.094379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.094385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:50:03.094422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:50:03.096348Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:50:03.115026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:50:03.115110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.115168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:50:03.115207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:50:03.115215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.115980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.116015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:50:03.116054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.116064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:50:03.116068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:50:03.116073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:50:03.116527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.116544Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:50:03.116549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:50:03.117060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.117072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.117080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.117086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.117655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:50:03.118066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:50:03.118126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:50:03.118327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:50:03.118357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:50:03.118389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.118459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:50:03.118469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:50:03.118507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:50:03.118525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:50:03.119599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:50:03.119613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:50:03.119672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:50:03.119679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:50:03.119776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:50:03.119784Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:50:03.119797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:50:03.119801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.119807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:50:03.119813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:50:03.119818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:50:03.119822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:50:03.119835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:50:03.119842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:50:03.119846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T17:52:07.364584Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:52:07.364588Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T17:52:07.364596Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:52:07.364852Z node 265 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:52:07.364867Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:52:07.364871Z node 265 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:52:07.364876Z node 265 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 11 2024-11-21T17:52:07.364880Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:52:07.364895Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T17:52:07.365111Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T17:52:07.365119Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T17:52:07.365124Z node 265 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T17:52:07.365528Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2024-11-21T17:52:07.365565Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:52:07.365620Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:52:07.365630Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409548 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000011 2024-11-21T17:52:07.365730Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:52:07.365949Z node 265 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000011, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:07.365974Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 1138166335594 } } Step: 5000011 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:07.365982Z node 265 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000011, at schemeshard: 72057594046678944 2024-11-21T17:52:07.366008Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T17:52:07.366018Z node 265 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2024-11-21T17:52:07.366023Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T17:52:07.366033Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:52:07.366043Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:52:07.366048Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2024-11-21T17:52:07.366055Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T17:52:07.366060Z node 265 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2024-11-21T17:52:07.366064Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2024-11-21T17:52:07.366074Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:52:07.366079Z node 265 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2024-11-21T17:52:07.366083Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2024-11-21T17:52:07.366086Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2024-11-21T17:52:07.366305Z node 265 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:52:07.366324Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:52:07.366698Z node 265 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:07.366708Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:07.366749Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:52:07.366774Z node 265 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:07.366779Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [265:201:2204], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2024-11-21T17:52:07.366784Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [265:201:2204], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710763 2024-11-21T17:52:07.366937Z node 265 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:52:07.366949Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:52:07.366955Z node 265 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:52:07.366959Z node 265 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T17:52:07.366963Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T17:52:07.367091Z node 265 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:52:07.367102Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T17:52:07.367105Z node 265 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T17:52:07.367109Z node 265 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T17:52:07.367113Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:52:07.367125Z node 265 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2024-11-21T17:52:07.367129Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [265:120:2146] 2024-11-21T17:52:07.367185Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:52:07.367191Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:52:07.367200Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:52:07.367814Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:52:07.367952Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T17:52:07.367972Z node 265 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:52:07.367983Z node 265 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2024-11-21T17:52:07.367994Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 TestWaitNotification wait txId: 1004 2024-11-21T17:52:07.368427Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:52:07.368437Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:52:07.368513Z node 265 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:52:07.368528Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:52:07.368532Z node 265 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [265:955:2894] TestWaitNotification: OK eventTxId 1004 >> TopicService::DifferentConsumers_TheRangesOverlap [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2024-11-21T17:52:12.408931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:12.408948Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:12.408962Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:12.411067Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:12.411182Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:12.411227Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:12.411999Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:12.417591Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:12.417686Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:12.417788Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:12.417804Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:12.417808Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:12.417836Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:12.419971Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:12.420007Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:12.420044Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:12.420048Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:12.420051Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:12.420054Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:12.420097Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.420101Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.420115Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:12.420126Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:12.420155Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:12.420159Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:12.420163Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:12.420167Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:12.420171Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:12.420175Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:12.420180Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:12.427165Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.427183Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.427194Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:12.427666Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:12.427679Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:12.427696Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:12.427723Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:12.427737Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:12.427747Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:12.427755Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:12.427759Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:12.427765Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:12.427769Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:12.427829Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:12.427834Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:12.427838Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:12.427842Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:12.427852Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:12.427856Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:12.427860Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:12.427864Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:12.427869Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:12.448675Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:12.448691Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:12.448695Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:12.448703Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:12.448710Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:12.448777Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.448784Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.448789Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:52:12.448801Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:12.448805Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:12.448832Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:12.448838Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.448841Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:12.448843Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:12.449481Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:12.449498Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:12.449544Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.449550Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.449557Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:12.449563Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:12.449567Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:12.449575Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:12.449579Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:12.449584Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.449588Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:12.449592Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:12.449596Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:12.449631Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:12.449634Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.449637Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:12.449641Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:12.449644Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:12.449656Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:12.449659Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:12.449663Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:12.449666Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:12.449676Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:12.449680Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:12.449683Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:12.449688Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.449691Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:12.449695Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... aitInRS 2024-11-21T17:52:15.200584Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:15.200586Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T17:52:15.200588Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:52:15.200591Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2024-11-21T17:52:15.200660Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2024-11-21T17:52:15.200667Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:52:15.200675Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:15.200678Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:52:15.200680Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2024-11-21T17:52:15.200682Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T17:52:15.200725Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2024-11-21T17:52:15.200729Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2024-11-21T17:52:15.200732Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2024-11-21T17:52:15.200735Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2024-11-21T17:52:15.200740Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T17:52:15.200743Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2024-11-21T17:52:15.200748Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2024-11-21T17:52:15.200751Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:15.200754Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:15.200757Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:15.200760Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:15.200795Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:334:2307], Recipient [2:334:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:15.200798Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:15.200802Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2024-11-21T17:52:15.200805Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:15.200807Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T17:52:15.200809Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2024-11-21T17:52:15.200812Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2024-11-21T17:52:15.200815Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:15.200818Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2024-11-21T17:52:15.200821Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2024-11-21T17:52:15.200824Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2024-11-21T17:52:15.200906Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2024-11-21T17:52:15.200910Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:15.200911Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2024-11-21T17:52:15.200913Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2024-11-21T17:52:15.200915Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2024-11-21T17:52:15.200917Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:15.200919Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2024-11-21T17:52:15.200921Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:15.200922Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2024-11-21T17:52:15.200927Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437185 2024-11-21T17:52:15.200929Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437185 2024-11-21T17:52:15.200930Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437185 2024-11-21T17:52:15.200933Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:15.200935Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:15.200937Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2024-11-21T17:52:15.200939Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2024-11-21T17:52:15.200943Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:15.200946Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2024-11-21T17:52:15.200947Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2024-11-21T17:52:15.200949Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2024-11-21T17:52:15.200951Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:15.200953Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2024-11-21T17:52:15.200955Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2024-11-21T17:52:15.200957Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2024-11-21T17:52:15.200959Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:15.200960Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2024-11-21T17:52:15.200962Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2024-11-21T17:52:15.200964Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2024-11-21T17:52:15.200966Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:15.200968Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2024-11-21T17:52:15.200969Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2024-11-21T17:52:15.200972Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2024-11-21T17:52:15.201003Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2024-11-21T17:52:15.201008Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:52:15.201012Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:15.201014Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2024-11-21T17:52:15.201016Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2024-11-21T17:52:15.201018Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T17:52:15.201047Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2024-11-21T17:52:15.201050Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2024-11-21T17:52:15.201051Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2024-11-21T17:52:15.201053Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2024-11-21T17:52:15.201055Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T17:52:15.201057Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2024-11-21T17:52:15.201059Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2024-11-21T17:52:15.201060Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:15.201062Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T17:52:15.201064Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T17:52:15.201066Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T17:52:15.211998Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2024-11-21T17:52:15.212019Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2024-11-21T17:52:15.212047Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:15.212053Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T17:52:15.212070Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:15.212079Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:15.212147Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2024-11-21T17:52:15.212151Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2024-11-21T17:52:15.212158Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T17:52:15.212162Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T17:52:15.212169Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:15.212173Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2024-11-21T17:52:08.366916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:08.366934Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:08.366945Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:08.369029Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:08.369154Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:08.369202Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:08.370198Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:08.376522Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:08.376696Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:08.376838Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:08.376857Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:08.376864Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:08.376906Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:08.380293Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:08.380342Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:08.380385Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:08.380389Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:08.380392Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:08.380396Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:08.380466Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:08.380473Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:08.380490Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:08.380505Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:08.380542Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:08.380548Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:08.380552Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:08.380557Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:08.380559Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:08.380563Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:08.380567Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:08.386930Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:08.386951Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:08.386958Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:08.387303Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:08.387310Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:08.387332Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:08.387358Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:08.387367Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:08.387375Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:08.387382Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:08.387385Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:08.387389Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:08.387392Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:08.387447Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:08.387452Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:08.387455Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:08.387457Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:08.387467Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:08.387469Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:08.387472Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:08.387474Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:08.387477Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:08.408379Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:08.408399Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:08.408405Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:08.408415Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:08.408426Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:08.408521Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:08.408527Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:08.408534Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:52:08.408550Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:08.408555Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:08.408590Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:08.408597Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:08.408601Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:08.408605Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:08.409271Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:08.409283Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:08.409325Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:08.409330Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:08.409336Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:08.409343Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:08.409347Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:08.409354Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:08.409358Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:08.409367Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:08.409370Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:08.409374Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:08.409378Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:08.409426Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:08.409430Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:08.409433Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:08.409436Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:08.409439Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:08.409449Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:08.409453Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:08.409456Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:08.409459Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:08.409469Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:08.409472Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:08.409475Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:08.409480Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:08.409483Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:08.409487Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... eady operations at 9437184 2024-11-21T17:52:15.197270Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:15.197288Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2024-11-21T17:52:15.197306Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2024-11-21T17:52:15.197318Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2024-11-21T17:52:15.197324Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:15.197331Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:15.197379Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:15.197384Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2024-11-21T17:52:15.197390Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:15.197394Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T17:52:15.197396Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:15.197399Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:15.197401Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:15.197421Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:15.197424Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2024-11-21T17:52:15.197428Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:15.197431Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T17:52:15.197434Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:15.197436Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:15.197438Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:15.197455Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:15.197469Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:15.197472Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2024-11-21T17:52:15.197477Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:15.197480Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T17:52:15.197482Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:15.197485Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:15.197497Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:15.197500Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2024-11-21T17:52:15.197504Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:15.197508Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T17:52:15.197510Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:15.197524Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:15.197527Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2024-11-21T17:52:15.197530Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:15.197533Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T17:52:15.197536Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:15.197549Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:15.197551Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2024-11-21T17:52:15.197555Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:15.197558Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T17:52:15.197560Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:15.197563Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:15.197566Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:516] at 9437184 on unit FinishPropose 2024-11-21T17:52:15.197570Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2024-11-21T17:52:15.197586Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:15.197616Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:15.197619Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2024-11-21T17:52:15.197623Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 4 ms 2024-11-21T17:52:15.197627Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T17:52:15.197630Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:15.197642Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:15.197645Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2024-11-21T17:52:15.197649Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:15.197651Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:15.197678Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2024-11-21T17:52:15.197683Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:15.197687Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2024-11-21T17:52:15.197712Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T17:52:15.197714Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:15.197716Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2024-11-21T17:52:15.197732Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T17:52:15.197735Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:15.197737Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2024-11-21T17:52:15.197747Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T17:52:15.197749Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:15.197751Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2024-11-21T17:52:15.197761Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T17:52:15.197763Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:15.197764Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2024-11-21T17:52:15.197774Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T17:52:15.197776Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:15.197777Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2024-11-21T17:52:15.197789Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T17:52:15.197791Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:15.197793Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2024-11-21T17:52:15.197803Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T17:52:15.197804Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:15.197806Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - >> BasicStatistics::TwoNodes >> DataShardOutOfOrder::UncommittedReads [GOOD] |83.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicService::UnknownConsumer >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] >> IndexBuildTest::RejectsCancel ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: 2024-11-21T17:52:10.623889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T17:52:10.626191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:10.626302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:10.626333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:52:10.626580Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:52:10.626589Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000aaa/r3tmp/tmpxpIDu8/pdisk_1.dat 2024-11-21T17:52:10.709510Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:10.790769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:10.877901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:10.877931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:10.878918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:10.878944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:10.890010Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:10.890108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:10.890175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:11.222672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:11.253550Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1187:2345], Recipient [2:1214:2356]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:11.255431Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1187:2345], Recipient [2:1214:2356]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:11.255536Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1214:2356] 2024-11-21T17:52:11.255575Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:11.263722Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1187:2345], Recipient [2:1214:2356]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:11.265231Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:11.265481Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:11.265647Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:11.265669Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:11.265676Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:11.265724Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:11.269631Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:11.269706Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:11.269743Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1238:2371] 2024-11-21T17:52:11.269748Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:11.269752Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:11.269757Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:11.269986Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1214:2356], Recipient [2:1214:2356]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:11.269994Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:11.270078Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:11.270100Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:11.270135Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:11.270141Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:11.270147Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:52:11.270152Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:11.270156Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:11.270161Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:52:11.270165Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:11.333002Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1242:2372], Recipient [2:1214:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:11.333022Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:11.333030Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1196:2735], serverId# [2:1242:2372], sessionId# [0:0:0] 2024-11-21T17:52:11.333113Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:776:2433], Recipient [2:1242:2372] 2024-11-21T17:52:11.333118Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:11.333143Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:11.333183Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:52:11.333193Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:52:11.333214Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:52:11.333220Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:11.333224Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:52:11.333229Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:52:11.333232Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:11.333280Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:11.333284Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:52:11.333288Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:11.333292Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:11.333301Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:52:11.333305Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:11.333308Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:52:11.333311Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:11.333316Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:11.334334Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1243:2373], Recipient [2:1214:2356]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T17:52:11.334349Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:52:11.334378Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:11.334384Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:11.334388Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:11.334397Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:11.334410Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:11.668608Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1274:2381], Recipient [2:1214:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:11.668625Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:11.668631Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1271:2757], serverId# [2:1274:2381], sessionId# [0:0:0] 2024-11-21T17:52:11.668752Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:992:2584], Recipient [2:1274:2381] 2024-11-21T17:52:11.668758Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:11.668788Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:11.668796Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T17:52:11.668800Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T17:52:11.668804Z node 2 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T17:52:11.669414Z node 2 :TX_DATASHARD DEBUG: Plann ... \004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\00 2024-11-21T17:52:15.426075Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:15.426109Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [3:631:2536], Recipient [3:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T17:52:15.426113Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T17:52:15.426125Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:15.426196Z node 3 :TX_DATASHARD TRACE: TxId: 281474976715664, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\004\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2024-11-21T17:52:15.426205Z node 3 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Uint32 : 4) 2024-11-21T17:52:15.426212Z node 3 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2024-11-21T17:52:15.426251Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CheckDataTx 2024-11-21T17:52:15.426264Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T17:52:15.426268Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T17:52:15.426273Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:15.426277Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:52:15.426286Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2024-11-21T17:52:15.426302Z node 3 :TX_DATASHARD TRACE: Activated operation [0:281474976715664] at 72075186224037888 2024-11-21T17:52:15.426307Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T17:52:15.426311Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:15.426314Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T17:52:15.426318Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T17:52:15.426325Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2024-11-21T17:52:15.426337Z node 3 :TX_DATASHARD TRACE: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191936 2024-11-21T17:52:15.426388Z node 3 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T17:52:15.426398Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:15.426402Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T17:52:15.426405Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:15.426409Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:15.426416Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2024-11-21T17:52:15.426420Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:15.426424Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:52:15.426427Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:52:15.426437Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T17:52:15.426440Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:52:15.426444Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2024-11-21T17:52:15.436523Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xmvj2dbf9s6tbd9ckqj9d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmNjOWQ4NjItNmQ2YjlkNGYtYzdjZTY2ODYtZDA5NDEwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:15.436846Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:926:2742], Recipient [3:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T17:52:15.436876Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T17:52:15.436884Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2024-11-21T17:52:15.436893Z node 3 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v2500/18446744073709551615 2024-11-21T17:52:15.436900Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T17:52:15.436913Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:52:15.436916Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T17:52:15.436919Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:15.436922Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:52:15.436932Z node 3 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T17:52:15.436936Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:52:15.436938Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:15.436941Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T17:52:15.436943Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:52:15.436952Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T17:52:15.436987Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is DelayComplete 2024-11-21T17:52:15.436989Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T17:52:15.436992Z node 3 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:52:15.436994Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:52:15.437002Z node 3 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T17:52:15.437004Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:52:15.437006Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T17:52:15.437009Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T17:52:15.549706Z node 3 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [3:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2024-11-21T17:52:15.549796Z node 3 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [3:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 3000 2024-11-21T17:52:15.549813Z node 3 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [3:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} 2024-11-21T17:52:15.726691Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:15.726713Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:15.726721Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1000 ms, status: COMPLETE 2024-11-21T17:52:15.726740Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:15.726746Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T17:52:15.726749Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T17:52:15.726758Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:926:2742], 0} after executionsCount# 1 2024-11-21T17:52:15.726765Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:926:2742], 0} sends rowCount# 4, bytes# 96, quota rows left# 997, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T17:52:15.726788Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:926:2742], 0} finished in read 2024-11-21T17:52:15.727389Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:926:2742], Recipient [3:631:2536]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T17:52:15.727401Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } >> YdbQueryService::TestCreateDropAttachSession [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession >> Yq_1::DescribeConnection [GOOD] >> Yq_1::DeleteQuery |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] Test command err: 2024-11-21T17:52:13.102743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:13.103242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:13.103263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000a76/r3tmp/tmp7qGp3z/pdisk_1.dat 2024-11-21T17:52:13.201324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.217061Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:13.258816Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:52:13.259018Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:52:13.259054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:13.259068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:13.269636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:13.372737Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T17:52:13.372755Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:52:13.372792Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T17:52:13.379061Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:52:13.379250Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:52:13.379262Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:52:13.379300Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:52:13.379335Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:52:13.379345Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:52:13.379392Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:52:13.379754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.379966Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:52:13.379977Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T17:52:13.393210Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:13.393404Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:13.393472Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:638:2540] 2024-11-21T17:52:13.393508Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:13.394187Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:13.398683Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:13.398903Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:13.398938Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:13.399050Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T17:52:13.399067Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T17:52:13.399071Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T17:52:13.399103Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:13.401449Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T17:52:13.401503Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:13.401532Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:666:2557] 2024-11-21T17:52:13.401536Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T17:52:13.401539Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T17:52:13.401542Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:13.401557Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:13.401646Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:641:2542] 2024-11-21T17:52:13.401671Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:13.402269Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:13.402468Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:638:2540], Recipient [1:638:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:13.402472Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:13.402545Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T17:52:13.402562Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T17:52:13.402618Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:52:13.402623Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:13.402627Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T17:52:13.402630Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T17:52:13.402633Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T17:52:13.402636Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:52:13.402639Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:52:13.402655Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:653:2548], Recipient [1:638:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:13.402657Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:13.402662Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:630:2536], serverId# [1:653:2548], sessionId# [0:0:0] 2024-11-21T17:52:13.402731Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:653:2548] 2024-11-21T17:52:13.402734Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:13.402748Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:13.402792Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T17:52:13.402800Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T17:52:13.402812Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T17:52:13.402816Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:13.402833Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T17:52:13.402837Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T17:52:13.402839Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T17:52:13.402869Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:13.402871Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T17:52:13.402873Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:13.402875Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:13.402881Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T17:52:13.402883Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:13.402885Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T17:52:13.402887Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T17:52:13.402890Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:13.402970Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:13.402988Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:13.403087Z node 1 :TX_DATASHARD DEBUG: LoadChangeRec ... 2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmw700ec1mta5d8866bfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzE0NjAwZGQtMThhODY2NjgtYTg5NzFiNGItMjRkN2YyMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2024-11-21T17:52:16.105626Z node 2 :KQP_EXECUTER INFO: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmw700ec1mta5d8866bfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzE0NjAwZGQtMThhODY2NjgtYTg5NzFiNGItMjRkN2YyMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T17:52:16.105629Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmw700ec1mta5d8866bfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzE0NjAwZGQtMThhODY2NjgtYTg5NzFiNGItMjRkN2YyMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T17:52:16.105634Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmw700ec1mta5d8866bfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzE0NjAwZGQtMThhODY2NjgtYTg5NzFiNGItMjRkN2YyMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2024-11-21T17:52:16.105639Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmw700ec1mta5d8866bfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzE0NjAwZGQtMThhODY2NjgtYTg5NzFiNGItMjRkN2YyMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2024-11-21T17:52:16.105644Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmw700ec1mta5d8866bfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzE0NjAwZGQtMThhODY2NjgtYTg5NzFiNGItMjRkN2YyMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T17:52:16.105727Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:931:2745], Recipient [2:888:2712]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:16.105733Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:16.105741Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:930:2744], serverId# [2:931:2745], sessionId# [0:0:0] 2024-11-21T17:52:16.105783Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:927:2728], Recipient [2:888:2712]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 927 RawX2: 8589937320 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\237\003\000\000\000\000\000\000\021\250\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000 2024-11-21T17:52:16.105789Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:16.105819Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [2:888:2712], Recipient [2:888:2712]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T17:52:16.105822Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T17:52:16.105834Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:52:16.105887Z node 2 :TX_DATASHARD TRACE: TxId: 281474976715662, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\004\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2024-11-21T17:52:16.105894Z node 2 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Uint32 : 4) 2024-11-21T17:52:16.105899Z node 2 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2024-11-21T17:52:16.105950Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CheckDataTx 2024-11-21T17:52:16.105962Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2024-11-21T17:52:16.105964Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T17:52:16.105968Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:16.105970Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T17:52:16.105977Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T17:52:16.105986Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715662] at 72075186224037888 2024-11-21T17:52:16.105990Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2024-11-21T17:52:16.105992Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:16.105994Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T17:52:16.105997Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T17:52:16.106006Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T17:52:16.106017Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715662] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191936 2024-11-21T17:52:16.106095Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T17:52:16.106106Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T17:52:16.106108Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T17:52:16.106110Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2024-11-21T17:52:16.106112Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:16.106127Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:16.106131Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2024-11-21T17:52:16.106134Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T17:52:16.106138Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2024-11-21T17:52:16.106147Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2024-11-21T17:52:16.106151Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T17:52:16.106154Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2024-11-21T17:52:16.116501Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:52:16.116528Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2024-11-21T17:52:16.116542Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T17:52:16.116560Z node 2 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2024-11-21T17:52:16.116575Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:16.116865Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [2:24:2071], Recipient [2:888:2712]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 2001} 2024-11-21T17:52:16.116872Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2024-11-21T17:52:16.116877Z node 2 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2024-11-21T17:52:16.116883Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:52:16.116903Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmw700ec1mta5d8866bfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzE0NjAwZGQtMThhODY2NjgtYTg5NzFiNGItMjRkN2YyMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2024-11-21T17:52:16.116946Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmw700ec1mta5d8866bfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzE0NjAwZGQtMThhODY2NjgtYTg5NzFiNGItMjRkN2YyMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:52:16.116956Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmw700ec1mta5d8866bfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzE0NjAwZGQtMThhODY2NjgtYTg5NzFiNGItMjRkN2YyMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T17:52:16.116966Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd7xmw700ec1mta5d8866bfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzE0NjAwZGQtMThhODY2NjgtYTg5NzFiNGItMjRkN2YyMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets [GOOD] >> TPersQueueTest::TopicServiceReadBudget |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral >> YdbYqlClient::BuildInfo >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery >> DemoTx::Scenario_3 [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] >> YdbOlapStore::ManyTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2024-11-21T17:52:12.652789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:12.652804Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:12.652814Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:12.654664Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:12.654751Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:12.654788Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:12.655371Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:12.662060Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:12.662156Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:12.662264Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:12.662275Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:12.662281Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:12.662308Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:12.664365Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:12.664406Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:12.664446Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:12.664450Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:12.664453Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:12.664455Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:12.664506Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.664511Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.664525Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:12.664537Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:12.664565Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:12.664569Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:12.664573Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:12.664577Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:12.664579Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:12.664582Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:12.664585Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:12.669787Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.669802Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.669807Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:12.670108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:12.670119Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:12.670136Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:12.670155Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:12.670161Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:12.670167Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:12.670171Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:12.670175Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:12.670177Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:12.670179Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:12.670216Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:12.670218Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:12.670221Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:12.670224Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:12.670231Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:12.670234Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:12.670237Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:12.670239Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:12.670242Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:12.691010Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:12.691032Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:12.691038Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:12.691049Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:12.691061Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:12.691130Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.691135Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.691140Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:52:12.691152Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:12.691155Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:12.691190Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:12.691210Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.691214Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:12.691218Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:12.691774Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:12.691783Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:12.691815Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.691819Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.691825Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:12.691830Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:12.691832Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:12.691837Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:12.691841Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:12.691845Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.691848Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:12.691850Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:12.691852Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:12.691884Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:12.691886Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.691888Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:12.691890Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:12.691892Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:12.691899Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:12.691901Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:12.691903Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:12.691905Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:12.691915Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:12.691917Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:12.691919Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:12.691922Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.691924Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:12.691925Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... 1000005:149] at 9437186 on unit CompleteOperation 2024-11-21T17:52:16.445581Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:16.445586Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:52:16.445590Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:52:16.445679Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T17:52:16.445709Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:16.445713Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T17:52:16.445724Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:227:2222], Recipient [1:433:2383]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2024-11-21T17:52:16.445728Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T17:52:16.445731Z node 1 :TX_DATASHARD DEBUG: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2024-11-21T17:52:16.445740Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2024-11-21T17:52:16.445748Z node 1 :TX_DATASHARD TRACE: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2024-11-21T17:52:16.445759Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T17:52:16.445775Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2024-11-21T17:52:16.445778Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:16.445782Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2024-11-21T17:52:16.445794Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2024-11-21T17:52:16.445798Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:16.445801Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2024-11-21T17:52:16.445808Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T17:52:16.445811Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:16.445814Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T17:52:16.445825Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:433:2383], Recipient [1:433:2383]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:16.445829Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:16.445834Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2024-11-21T17:52:16.445839Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:52:16.445845Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2024-11-21T17:52:16.445850Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2024-11-21T17:52:16.445855Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2024-11-21T17:52:16.445859Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2024-11-21T17:52:16.483132Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2024-11-21T17:52:16.483154Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2024-11-21T17:52:16.483313Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2024-11-21T17:52:16.483323Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:52:16.483334Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2024-11-21T17:52:16.483338Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2024-11-21T17:52:16.483342Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2024-11-21T17:52:16.483346Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T17:52:16.483409Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2024-11-21T17:52:16.483413Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2024-11-21T17:52:16.483416Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2024-11-21T17:52:16.483419Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2024-11-21T17:52:16.483424Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2024-11-21T17:52:16.483427Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2024-11-21T17:52:16.483430Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2024-11-21T17:52:16.483435Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:16.483438Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2024-11-21T17:52:16.483443Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2024-11-21T17:52:16.483446Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2024-11-21T17:52:16.483566Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T17:52:16.483571Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:16.483575Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T17:52:16.483595Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T17:52:16.483597Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:16.483600Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2024-11-21T17:52:16.483617Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T17:52:16.483619Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:16.483623Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T17:52:16.483632Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T17:52:16.483634Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:16.483636Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T17:52:16.483645Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T17:52:16.483647Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:16.483649Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T17:52:16.483657Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T17:52:16.483659Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:16.483661Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T17:52:16.483670Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T17:52:16.483672Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:16.483674Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T17:52:16.495414Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T17:52:16.495445Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T17:52:16.495463Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T17:52:16.495477Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T17:52:16.495484Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T17:52:16.495551Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:433:2383], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T17:52:16.495557Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:16.495562Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] Test command err: 2024-11-21T17:52:12.715143Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791880964957724:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:12.715362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011ec/r3tmp/tmpKiQS4z/pdisk_1.dat 2024-11-21T17:52:12.771282Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16406, node 1 2024-11-21T17:52:12.819084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:12.819113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:12.823659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:12.827801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:12.827816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:12.827818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:12.827859Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:12.858101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:12.859255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:12.859269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:12.860044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:12.860112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:12.860121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:52:12.860535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:12.860549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:52:12.860600Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:52:12.860849Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:12.861603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211532905, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:12.861615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:52:12.861677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:52:12.862036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:12.862075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:12.862090Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:52:12.862100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:52:12.862107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:52:12.862116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:52:12.862562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:52:12.862584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:52:12.862588Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:12.862603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:52:13.129448Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknownSesson 2024-11-21T17:52:13.752511Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439791886552497548:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:13.752669Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011ec/r3tmp/tmpoUaAHe/pdisk_1.dat 2024-11-21T17:52:13.764943Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9424, node 4 2024-11-21T17:52:13.779103Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:13.779115Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:13.779116Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:13.779149Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:13.853119Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:13.853158Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:13.854863Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:13.855288Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.855395Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:13.855407Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.855929Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:13.855987Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:13.855997Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:13.856496Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:13.856507Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:13.856561Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:13.856902Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.857789Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211533906, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:13.857804Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:13.857874Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:13.858233Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:13.858290Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:13.858305Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:13.858318Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:13.858332Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:13.858349Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:5 ... CHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:15.570653Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:15.570668Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2024-11-21T17:52:15.576683Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:16.345904Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7439791898130210474:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:16.345946Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011ec/r3tmp/tmpu3VYhC/pdisk_1.dat 2024-11-21T17:52:16.367749Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18729, node 13 2024-11-21T17:52:16.390050Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:16.390062Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:16.390065Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:16.390116Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:16.446859Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:16.446900Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:16.448893Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:16.449428Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:16.449535Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:16.449546Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:16.450876Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:16.450947Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:16.450953Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:16.451479Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:16.451490Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:16.451558Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:16.452244Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:16.454304Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211536496, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:16.454313Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:16.454399Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:16.454952Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:16.455015Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:16.455032Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:16.455048Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:16.455064Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:16.455083Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:16.455292Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:16.455319Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:16.455323Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:16.455338Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:52:16.660900Z node 13 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:52:16.661335Z node 13 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2024-11-21T17:52:16.661543Z node 13 :KQP_PROXY DEBUG: TraceId: "01jd7xmwjj8bh3xc5bah8qysrg", Request has 18445011862172.890077s seconds to be completed 2024-11-21T17:52:16.661957Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=ZjAyNTNlNjMtNGFkNmIzY2ItNDllODA2MzktYWZkN2E2ZWU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjAyNTNlNjMtNGFkNmIzY2ItNDllODA2MzktYWZkN2E2ZWU= 2024-11-21T17:52:16.661984Z node 13 :KQP_PROXY DEBUG: TraceId: "01jd7xmwjj8bh3xc5bah8qysrg", Created new session, sessionId: ydb://session/3?node_id=13&id=ZjAyNTNlNjMtNGFkNmIzY2ItNDllODA2MzktYWZkN2E2ZWU=, workerId: [13:7439791898130211225:2292], database: , longSession: 1, local sessions count: 1 2024-11-21T17:52:16.661994Z node 13 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2024-11-21T17:52:16.662020Z node 13 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jd7xmwjj8bh3xc5bah8qysrg 2024-11-21T17:52:16.662039Z node 13 :KQP_PROXY DEBUG: Subscribed for config changes. 2024-11-21T17:52:16.662047Z node 13 :KQP_PROXY DEBUG: Updated table service config. 2024-11-21T17:52:16.662052Z node 13 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T17:52:16.662066Z node 13 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2024-11-21T17:52:16.662092Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:52:16.662104Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:52:16.662115Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:52:16.662123Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:52:16.662131Z node 13 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T17:52:16.662425Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=ZjAyNTNlNjMtNGFkNmIzY2ItNDllODA2MzktYWZkN2E2ZWU=, ActorId: [13:7439791898130211225:2292], ActorState: unknown state, session actor bootstrapped 2024-11-21T17:52:16.664288Z node 13 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=13&id=ZjAyNTNlNjMtNGFkNmIzY2ItNDllODA2MzktYWZkN2E2ZWU=, rpc ctrl: [13:7439791898130211241:2293], sameNode: 1, trace_id: 2024-11-21T17:52:16.664302Z node 13 :KQP_PROXY TRACE: Attach local session: [13:7439791898130211225:2292] to rpc: [13:7439791898130211241:2293] on same node 2024-11-21T17:52:16.665767Z node 13 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=13&id=ZjAyNTNlNjMtNGFkNmIzY2ItNDllODA2MzktYWZkN2E2ZWU=, ActorId: [13:7439791898130211225:2292], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T17:52:16.665783Z node 13 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=13&id=ZjAyNTNlNjMtNGFkNmIzY2ItNDllODA2MzktYWZkN2E2ZWU=, ActorId: [13:7439791898130211225:2292], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:52:16.665787Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=ZjAyNTNlNjMtNGFkNmIzY2ItNDllODA2MzktYWZkN2E2ZWU=, ActorId: [13:7439791898130211225:2292], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T17:52:16.665790Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=ZjAyNTNlNjMtNGFkNmIzY2ItNDllODA2MzktYWZkN2E2ZWU=, ActorId: [13:7439791898130211225:2292], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T17:52:16.665816Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=ZjAyNTNlNjMtNGFkNmIzY2ItNDllODA2MzktYWZkN2E2ZWU=, ActorId: [13:7439791898130211225:2292], ActorState: unknown state, Session actor destroyed 2024-11-21T17:52:16.665845Z node 13 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=13&id=ZjAyNTNlNjMtNGFkNmIzY2ItNDllODA2MzktYWZkN2E2ZWU=, workerId: [13:7439791898130211225:2292], local sessions count: 0 2024-11-21T17:52:16.667155Z node 13 :KQP_PROXY DEBUG: Received ping session request, request_id: 3, sender: [13:7439791898130211244:2295], trace_id: 2024-11-21T17:52:16.667198Z node 13 :KQP_PROXY NOTICE: Session not found: ydb://session/3?node_id=13&id=ZjAyNTNlNjMtNGFkNmIzY2ItNDllODA2MzktYWZkN2E2ZWU= 2024-11-21T17:52:16.667223Z node 13 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [13:7439791898130211244:2295], selfId: [13:7439791898130210550:2256], source: [13:7439791898130210550:2256] >> DemoTx::Scenario_4 >> YdbYqlClient::BuildInfo [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics >> TPersQueueTest::SchemeshardRestart [GOOD] >> TPersQueueTest::SameOffset >> Yq_1::ModifyQuery [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TPersQueueTest::Init [GOOD] >> TPersQueueTest::NoDecompressionMemoryLeaks |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables >> IndexBuildTest::RejectsCancel [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2024-11-21T17:52:12.993216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791881282612167:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:12.993401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:52:13.022664Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13409: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13409 } ] E1121 17:52:13.022792367 807913 dns_resolver.cc:162] no server name supplied in dns URI E1121 17:52:13.022833107 807913 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016c1/r3tmp/tmpaIsrsn/pdisk_1.dat 2024-11-21T17:52:13.288267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791885577580006:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:13.288307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 13409, node 1 2024-11-21T17:52:13.311444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:13.311465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:13.313249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:13.385666Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:13.385686Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:13.385688Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:13.385728Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:13.385933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:52:13.385945Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:13.684933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.686045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:13.686071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.686657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:13.686759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:13.686772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:13.687159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:13.687168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:13.687281Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:13.687453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.688301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211533731, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:13.688313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:13.688376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:13.688715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:13.688765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:13.688782Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:13.688795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:13.688805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:13.688818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:13.689129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:13.689144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:13.689146Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:13.689158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:52:14.022067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:14.022148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:14.022922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T17:52:14.022973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:14.023029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:14.023052Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:14.023185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:14.023196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:14.023200Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:14.023243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:14.023252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:14.023253Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:52:14.024026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211534067, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:14.024037Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211534067, at schemeshard: 72057594046644480 2024-11-21T17:52:14.024052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:52:14.024327Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2024-11-21T17:52:14.024335Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2024-11-21T17:52:14.024336Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2024-11-21T17:52:14.024387Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2024-11-21T17:52:14.024400Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2024-11-21T17:52:14.024401Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2024-11-21T17:52:14.024758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:14.024784Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2024-11-21T17:52:14.024795Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2024-11-21T17:52:14.024796Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2024-11-21T17:52:14.024823Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:14.024840Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:52:14.024856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:52:14.024870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:52:14.024891Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2024-11-21T17:52:14.024897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in pro ... elId: 1, peer: [4:7439791905085699332:2831] 2024-11-21T17:52:17.775267Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. Shards State: TShardState{ TabletId: 72075186224037890, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : utqueoi09bah0vv5r3un)], RetryAttempt: 0, ResolveAttempt: 0 } 2024-11-21T17:52:17.775275Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. effective maxinflight 1024 sorted 0 2024-11-21T17:52:17.775276Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. BEFORE: 1.0 2024-11-21T17:52:17.775284Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. Send EvRead to shardId: 72075186224037890, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2024-11-21T17:52:17.775297Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. AFTER: 0.1 2024-11-21T17:52:17.775304Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2024-11-21T17:52:17.775325Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699331:2830], TxId: 281474976715772, task: 1. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:17.775329Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. enter getasyncinputdata results size 0, freeSpace 8388608 2024-11-21T17:52:17.775332Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2024-11-21T17:52:17.775579Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. Recv TEvReadResult from ShardID=72075186224037890, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2024-11-21T17:52:17.775595Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. Taken 0 locks 2024-11-21T17:52:17.775598Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. new data for read #0 seqno = 1 finished = 1 2024-11-21T17:52:17.775606Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699331:2830], TxId: 281474976715772, task: 1. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2024-11-21T17:52:17.775613Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699331:2830], TxId: 281474976715772, task: 1. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:17.775621Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T17:52:17.775625Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. enter pack cells method shardId: 72075186224037890 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T17:52:17.775639Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. exit pack cells method shardId: 72075186224037890 processedRows: 0 packed rows: 1 freeSpace: 8387510 2024-11-21T17:52:17.775647Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. returned 1 rows; processed 1 rows 2024-11-21T17:52:17.775663Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. dropping batch for read #0 2024-11-21T17:52:17.775668Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. effective maxinflight 1024 sorted 0 2024-11-21T17:52:17.775685Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T17:52:17.775690Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1, CA Id [4:7439791905085699331:2830]. returned async data processed rows 1 left freeSpace 8387510 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T17:52:17.776034Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699331:2830], TxId: 281474976715772, task: 1. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:17.776045Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699332:2831], TxId: 281474976715772, task: 2. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2024-11-21T17:52:17.776056Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699331:2830], TxId: 281474976715772, task: 1. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:52:17.776062Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 2. Finish input channelId: 1, from: [4:7439791905085699331:2830] 2024-11-21T17:52:17.776070Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T17:52:17.776073Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699331:2830], TxId: 281474976715772, task: 1. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2024-11-21T17:52:17.776074Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699332:2831], TxId: 281474976715772, task: 2. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:17.776080Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699331:2830], TxId: 281474976715772, task: 1. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:17.776082Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699331:2830], TxId: 281474976715772, task: 1. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:52:17.776084Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1. Tasks execution finished 2024-11-21T17:52:17.776086Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699331:2830], TxId: 281474976715772, task: 1. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T17:52:17.776121Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 1. pass away 2024-11-21T17:52:17.776135Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699332:2831], TxId: 281474976715772, task: 2. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:17.776138Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699332:2831], TxId: 281474976715772, task: 2. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:52:17.776143Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T17:52:17.776146Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T17:52:17.776162Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715772;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:52:17.776191Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699332:2831], TxId: 281474976715772, task: 2. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:17.776202Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699332:2831], TxId: 281474976715772, task: 2. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:52:17.776205Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T17:52:17.776206Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 2. Tasks execution finished 2024-11-21T17:52:17.776209Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791905085699332:2831], TxId: 281474976715772, task: 2. Ctx: { TraceId : 01jd7xmxtmdshg3b26a4ekxxvr. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTA3MzNkZjYtYTk4MmM4ZDYtZGIzYmZiOTEtMjNhYWU1YzU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T17:52:17.776222Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715772, task: 2. pass away 2024-11-21T17:52:17.776258Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715772;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; >> HttpRequest::Analyze |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] >> YdbYqlClient::AlterTableAddIndexWithDataColumn |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TPersQueueTest::BadSids [GOOD] >> TPersQueueTest::Cache ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsCancel [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:56.471448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:56.471473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:56.471477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:56.471481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:56.471496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:56.471498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:56.471505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:56.471570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:56.482470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:56.482488Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:56.485185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:56.485898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:56.485942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:56.487348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:56.487567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:56.487655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.487712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:56.488726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.489001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:56.489013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.489053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:56.489060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:56.489065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:56.489078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.490291Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:56.506637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:56.506708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.506780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:56.506842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:56.506851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.507606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.507628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:56.507684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.507692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:56.507695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:56.507698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:56.507984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.507991Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:56.507994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:56.508288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.508295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.508299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.508304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.508723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:56.509200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:56.509280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:56.509569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:56.509595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:56.509603Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.509657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:56.509664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:56.509700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:56.509713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:56.510244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:56.510258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:56.510315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:56.510323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:56.510431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:56.510439Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:56.510452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:56.510456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.510462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:56.510468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:56.510473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:56.510477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:56.510490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:56.510495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:56.510500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:56.510847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:56.510864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:56.510867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:56.510871Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:56.510874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:56.510885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1142:3006], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:52:18.696335Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T17:52:18.696709Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:52:18.696724Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1142:3006], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:52:18.696728Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T17:52:18.696750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:52:18.696755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1232:3086] TestWaitNotification: OK eventTxId 102 2024-11-21T17:52:18.697099Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: DoExecute TxId: 105 DatabaseName: "/MyRoot" IndexBuildId: 102 2024-11-21T17:52:18.697126Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: Reply TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } BUILDINDEX RESPONSE CANCEL: NKikimrIndexBuilder.TEvCancelResponse TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } 2024-11-21T17:52:18.697308Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2024-11-21T17:52:18.697363Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } 2024-11-21T17:52:18.697576Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:52:18.697626Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 55us result status StatusSuccess 2024-11-21T17:52:18.697735Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:18.697961Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:52:18.697998Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 40us result status StatusSuccess 2024-11-21T17:52:18.698143Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/index1" PathDescription { Self { Name: "index1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> Yq_1::DeleteQuery [GOOD] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless >> YdbYqlClient::AlterTableAddIndexWithDataColumn [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2024-11-21T17:52:13.429908Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791886105534081:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:13.430052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 17:52:13.465722793 808407 dns_resolver.cc:162] no server name supplied in dns URI E1121 17:52:13.465772020 808407 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T17:52:13.466228Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2791: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2791 } ] 2024-11-21T17:52:13.471124Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2791: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:2791 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016a9/r3tmp/tmpIiWw23/pdisk_1.dat 2024-11-21T17:52:13.712648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791886105534550:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:13.712684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:52:13.732263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:13.732283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:13.733604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2791, node 1 TClient is connected to server localhost:3924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:13.847578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:52:13.847591Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:13.847799Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:13.847807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:13.847809Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:13.847848Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:14.103800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:14.104773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:14.104790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:14.105373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:14.105452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:14.105462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:14.105956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:14.106024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:14.106033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:14.106412Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:14.107323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211534151, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:14.107336Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:14.107424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:14.107790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:14.107830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:14.107843Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:14.107855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:14.107877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:14.107887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:14.108273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:14.108286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:14.108289Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:14.108299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:52:14.474671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:14.474741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:14.475588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T17:52:14.475654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:14.475716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:14.475738Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:14.475918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:14.475929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:14.475933Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:14.475976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:14.475983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:14.475985Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:52:14.477312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211534522, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:14.477336Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211534522, at schemeshard: 72057594046644480 2024-11-21T17:52:14.477380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:52:14.477993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:14.478067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:14.478086Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:52:14.478098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:52:14.478108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:52:14.478122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 0 2024-11-21T17:52:14.478574Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2024-11-21T17:52:14.478582Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2024-11-21T17:52:14.478584Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2024-11-21T17:52:14.479071Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2024-11-21T17:52:14.479079Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2024-11-21T17:52:14.479080Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2024-11-21T17:52:14.479344Z node 1 :FL ... :52:19.078632Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. effective maxinflight 1024 sorted 0 2024-11-21T17:52:19.078633Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. BEFORE: 1.0 2024-11-21T17:52:19.078643Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. Send EvRead to shardId: 72075186224037895, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2024-11-21T17:52:19.078655Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. AFTER: 0.1 2024-11-21T17:52:19.078661Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2024-11-21T17:52:19.078681Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754010:2806], TxId: 281474976715769, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:52:19.078688Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. enter getasyncinputdata results size 0, freeSpace 8388608 2024-11-21T17:52:19.078690Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2024-11-21T17:52:19.078858Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. Recv TEvReadResult from ShardID=72075186224037895, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2024-11-21T17:52:19.078867Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. Taken 0 locks 2024-11-21T17:52:19.078869Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. new data for read #0 seqno = 1 finished = 1 2024-11-21T17:52:19.078873Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754010:2806], TxId: 281474976715769, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2024-11-21T17:52:19.078877Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754010:2806], TxId: 281474976715769, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:52:19.078881Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T17:52:19.078883Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. enter pack cells method shardId: 72075186224037895 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T17:52:19.078885Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. exit pack cells method shardId: 72075186224037895 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T17:52:19.078886Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. returned 0 rows; processed 0 rows 2024-11-21T17:52:19.078899Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. dropping batch for read #0 2024-11-21T17:52:19.078905Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. effective maxinflight 1024 sorted 0 2024-11-21T17:52:19.078907Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T17:52:19.078909Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1, CA Id [4:7439791912075754010:2806]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T17:52:19.078930Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754010:2806], TxId: 281474976715769, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:52:19.078930Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754011:2807], TxId: 281474976715769, task: 2. Ctx: { TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2024-11-21T17:52:19.078936Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 2. Finish input channelId: 1, from: [4:7439791912075754010:2806] 2024-11-21T17:52:19.078936Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754010:2806], TxId: 281474976715769, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T17:52:19.078941Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T17:52:19.078943Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754010:2806], TxId: 281474976715769, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2024-11-21T17:52:19.078947Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754010:2806], TxId: 281474976715769, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:52:19.078949Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754010:2806], TxId: 281474976715769, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T17:52:19.078950Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1. Tasks execution finished 2024-11-21T17:52:19.078953Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754011:2807], TxId: 281474976715769, task: 2. Ctx: { TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:19.078958Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754010:2806], TxId: 281474976715769, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T17:52:19.078969Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754011:2807], TxId: 281474976715769, task: 2. Ctx: { TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:19.078971Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754011:2807], TxId: 281474976715769, task: 2. Ctx: { TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:52:19.078974Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T17:52:19.078977Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T17:52:19.078979Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 1. pass away 2024-11-21T17:52:19.079002Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754011:2807], TxId: 281474976715769, task: 2. Ctx: { TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:19.079003Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715769;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:52:19.079010Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754011:2807], TxId: 281474976715769, task: 2. Ctx: { TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:52:19.079012Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T17:52:19.079013Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 2. Tasks execution finished 2024-11-21T17:52:19.079014Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791912075754011:2807], TxId: 281474976715769, task: 2. Ctx: { TraceId : 01jd7xmz3b8a8x2cepq6tte4hd. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=N2VjZGJlMWUtMTBjMDRhMTgtZjg3OTVhNjAtYzI4NjFmMWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T17:52:19.079027Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715769, task: 2. pass away 2024-11-21T17:52:19.079045Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715769;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:52:19.080144Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: DescribeQueryRequest - DescribeQueryResult: {query_id: "utqueoi099v66en0ofgi" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:561: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] >> Yq_1::Basic_TaggedLiteral [GOOD] >> TSchemeShardMoveTest::MoveIndexSameDst >> TSchemeShardMoveTest::ResetCachedPath |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple >> TPersQueueTest::DirectReadBadCases [GOOD] >> TPersQueueTest::DirectReadStop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011e2/r3tmp/tmpJLvkzN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15727, node 1 TClient is connected to server localhost:5402 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T17:52:17.728040Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439791903169170188:2242];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:17.728067Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011e2/r3tmp/tmpjX53n3/pdisk_1.dat 2024-11-21T17:52:17.743811Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22787, node 4 2024-11-21T17:52:17.763513Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:17.763528Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:17.763530Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:17.763593Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:17.827804Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:17.827841Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:17.829510Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:17.832029Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:17.832169Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:17.832175Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:17.832665Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:17.832718Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:17.832722Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:52:17.833078Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:17.833089Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:17.833586Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:17.834468Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:17.834500Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211537882, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:17.834506Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:17.834586Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:17.834990Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:17.835041Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:17.835054Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:17.835068Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:17.835078Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:17.835086Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:17.835251Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:17.835273Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:17.835277Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:17.835287Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:52:18.035243Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439791907464138210:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:18.035272Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:18.066440Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:18.066556Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:52:18.066725Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:18.066734Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:18.068573Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:52:18.068653Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:18.068746Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:18.068771Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:52:18.068984Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:18.069007Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:18.069013Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:18.069080Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:18.069084Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:18.069086Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:52:18.069116Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:52:18.071329Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:52:18.071362Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:52:18.071834Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:52:18.129212Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:52:18.129227Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:52:18.129270Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:52:18.130079Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:52:18.130930Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211538176, transact ... node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:20.422290Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:20.422392Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:20.422403Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:20.422857Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:20.422913Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:20.422926Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:20.423205Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:20.423262Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:20.423271Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:20.423500Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:20.424153Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211540472, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:20.424166Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:20.424219Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:20.424739Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:20.424790Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:20.424806Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:20.424818Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:20.424834Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:20.424854Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:20.425048Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:20.425061Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:20.425065Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:20.425077Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:52:20.434844Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:20.434985Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:52:20.435126Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:20.435138Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:20.435811Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:52:20.435867Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:20.435911Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:20.435930Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:52:20.435977Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:52:20.436091Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:20.436116Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:20.436125Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:20.436174Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:20.436180Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:20.436181Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:52:20.437607Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:52:20.437637Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:52:20.438072Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:52:20.442780Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:52:20.442800Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:52:20.442821Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:52:20.443114Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:52:20.443664Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211540493, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:20.443675Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211540493 2024-11-21T17:52:20.443695Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:52:20.443972Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:20.444042Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:20.444057Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:52:20.444578Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:20.444594Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:20.444598Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:52:20.444635Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:20.444643Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:20.444643Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:52:20.445048Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732211540493 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 214 } } 2024-11-21T17:52:20.445216Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:52:20.445225Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:20.445229Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T17:52:20.445455Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:52:20.445469Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:52:20.445477Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2024-11-21T17:52:13.018964Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791885263859073:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:13.019031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 17:52:13.049351939 807978 dns_resolver.cc:162] no server name supplied in dns URI E1121 17:52:13.049398444 807978 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T17:52:13.049886Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23200: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23200 } ] 2024-11-21T17:52:13.298554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:52:13.298605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439791885263859384:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016bb/r3tmp/tmpArNHnv/pdisk_1.dat 2024-11-21T17:52:13.343687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:13.343716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:13.345083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23200, node 1 TClient is connected to server localhost:61447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:13.395713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:52:13.395734Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:13.395969Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:13.395980Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:13.395982Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:13.396039Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:13.716163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.717270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:13.717292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.717975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:13.718065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:13.718078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:13.718564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:13.718649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:13.718662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:13.719055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.720278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211533766, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:13.720292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:13.720357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:13.720808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:13.720855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:13.720870Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:13.720887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:13.720901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:13.720917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:13.721288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:13.721300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:13.721303Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:13.721313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:52:14.051766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:14.051815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:14.052444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T17:52:14.052492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:14.052548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:14.052567Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:14.052796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:14.052818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:14.052824Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:14.052893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:14.052901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:14.052903Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:52:14.053590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211534102, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:14.053603Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211534102, at schemeshard: 72057594046644480 2024-11-21T17:52:14.053625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:52:14.054147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:14.054207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:14.054233Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:52:14.054247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:52:14.054255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:52:14.054271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 0 2024-11-21T17:52:14.054332Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2024-11-21T17:52:14.054336Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2024-11-21T17:52:14.054338Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2024-11-21T17:52:14.054507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:14.054518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:14.054521Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:52:14.054553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644 ... ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606415Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606424Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606432Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606436Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606438Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606442Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606454Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606458Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606462Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606465Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606476Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606479Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606482Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606490Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606494Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606502Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606508Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606510Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606514Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606524Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606526Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606528Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606539Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606542Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606544Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606549Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606557Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606562Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606570Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606576Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606579Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606583Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606587Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606597Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606599Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606607Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606613Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606616Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606619Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606623Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606634Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606638Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606641Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606645Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606655Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606659Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606661Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606672Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606676Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606680Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606685Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606697Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606700Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606704Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606707Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606713Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606718Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606727Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606731Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606734Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606738Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606742Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606753Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606756Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606760Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606764Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606770Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606775Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606784Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606787Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606804Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606809Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606812Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606817Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606821Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606825Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606829Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606840Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606843Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606846Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606850Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606854Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606866Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606869Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606872Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606880Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606883Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606892Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606894Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606897Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606905Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606909Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606913Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606918Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606921Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606928Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606932Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606942Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606946Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606950Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606954Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606959Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606966Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606970Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606974Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606979Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606988Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606992Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.606995Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607005Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607007Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607010Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607019Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607022Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607029Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607031Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607040Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607042Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607049Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607053Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607058Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607061Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607068Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607074Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607077Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607086Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607090Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607096Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607099Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2024-11-21T17:52:20.607103Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> TPersQueueTest::SetupLockSession [GOOD] >> TPersQueueTest::StreamReadCreateAndDestroyMsgs >> TopicService::UnknownConsumer [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2024-11-21T17:51:52.201493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:52.201516Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:52.201536Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:51:52.205874Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:51:52.206013Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:51:52.206064Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:52.206895Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:51:52.216534Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:52.216700Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:52.216874Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:51:52.216891Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:51:52.216899Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:51:52.216938Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:52.220514Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:51:52.220588Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:52.220647Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:51:52.220652Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:51:52.220656Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:51:52.220661Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:52.220758Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:52.220764Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:52.220789Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:51:52.220809Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:51:52.220867Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:52.220874Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:52.220881Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:51:52.220885Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:51:52.220889Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:51:52.220894Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:51:52.220899Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:52.229059Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:52.229082Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:52.229091Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:51:52.229518Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:51:52.229529Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:51:52.229549Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:51:52.229578Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:51:52.229587Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:51:52.229596Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:51:52.229604Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:52.229609Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:51:52.229614Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:51:52.229618Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:52.229677Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:51:52.229681Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:51:52.229685Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:51:52.229689Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:52.229698Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:51:52.229701Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:51:52.229704Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:51:52.229707Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:52.229711Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:51:52.251350Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:51:52.251372Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:52.251378Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:52.251389Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:51:52.251403Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:52.251495Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:52.251503Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:52.251510Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:51:52.251529Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:51:52.251534Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:51:52.252797Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:52.252824Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:52.252841Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:51:52.252846Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:51:52.253318Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:51:52.253328Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:52.253386Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:52.253390Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:52.253398Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:52.253403Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:52.253406Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:51:52.253411Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:51:52.253414Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:51:52.253419Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:52.253422Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:51:52.253424Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:51:52.253426Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:51:52.253467Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:51:52.253472Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:52.253474Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:51:52.253477Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:51:52.253479Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:51:52.253486Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:52.253488Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:51:52.253490Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:51:52.253492Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:51:52.253501Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:51:52.253503Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:51:52.253505Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:51:52.253509Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:52.253511Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:51:52.253513Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... cy: 1 ms 2024-11-21T17:52:20.657248Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657272Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657276Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657283Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657287Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657308Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657313Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657320Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657324Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657346Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657350Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657358Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657362Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657394Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657399Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657405Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657409Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657431Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657434Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657440Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657444Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657475Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657480Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657486Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657490Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657519Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657524Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657531Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657534Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657558Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657561Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657568Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657571Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657598Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657603Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657609Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657613Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657640Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657645Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657651Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657655Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657682Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657686Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657693Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657697Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657720Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657724Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657731Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657735Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657758Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:20.657762Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2024-11-21T17:52:20.657769Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:20.657775Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:20.657842Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2024-11-21T17:52:20.657849Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:20.657855Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2024-11-21T17:52:20.657883Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T17:52:20.657887Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:20.657890Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2024-11-21T17:52:20.657906Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T17:52:20.657909Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:20.657913Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2024-11-21T17:52:20.657921Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T17:52:20.657925Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:20.657928Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2024-11-21T17:52:20.657940Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T17:52:20.657943Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:20.657946Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2024-11-21T17:52:20.657956Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T17:52:20.657959Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:20.657962Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2024-11-21T17:52:20.657973Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T17:52:20.657976Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:20.657980Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2024-11-21T17:52:20.657991Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T17:52:20.658002Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:20.658005Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2024-11-21T17:52:20.658018Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2024-11-21T17:52:20.658021Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:20.658024Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 24 29 27 28 31 25 29 30 28 24 30 30 26 17 25 27 28 14 31 29 31 22 28 24 29 29 29 29 29 17 29 - actual 24 29 27 28 31 25 29 30 28 24 30 30 26 17 25 27 28 14 31 29 31 22 28 24 29 29 29 29 29 17 29 - interm 5 - 5 5 4 4 5 - 1 4 6 6 1 3 - 6 1 - 4 - 4 4 - - 4 - - - - - - - |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TPersQueueTest::WriteExistingBigValue [GOOD] >> TPersQueueTest::WriteEmptyData >> TopicService::UnknownTopic >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> Yq_1::DescribeQuery [GOOD] >> BasicStatistics::TwoDatabases >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> TSchemeShardMoveTest::ResetCachedPath [GOOD] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2024-11-21T17:52:12.920447Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791880732691323:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:12.920467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1121 17:52:12.954939373 807736 dns_resolver.cc:162] no server name supplied in dns URI E1121 17:52:12.954984165 807736 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2024-11-21T17:52:12.959135Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4481: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:4481 } ] 2024-11-21T17:52:12.959141Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:323: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4481: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4481 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0016c6/r3tmp/tmpgYbUfM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4481, node 1 2024-11-21T17:52:13.232603Z node 1 :KQP_COMPUTE INFO: Init DQ local file spilling service at /home/runner/.ya/build/build_root/6fwh/0016c6/r3tmp/spilling-tmp-runner/node_1_4aa4ccef-6e29cf84-dfe27012-c54aebb1, actor: [1:7439791885027659281:2281] 2024-11-21T17:52:13.232713Z node 1 :KQP_COMPUTE INFO: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/6fwh/0016c6/r3tmp/spilling-tmp-runner TClient is connected to server localhost:23373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:52:13.249025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:13.249054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:13.250582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:13.330875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:52:13.330894Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:13.330925Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:13.330929Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:13.330931Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:13.330978Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:13.572766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.573898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:13.573923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.574706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:13.574971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:13.575010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:13.575470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:13.575784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:13.575796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:13.576215Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.577315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211533626, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:13.577333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:13.577413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:13.577892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:13.577944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:13.577962Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:13.577972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:13.577987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:13.578005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:13.578673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:13.578694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:13.578698Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:13.578725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:52:13.957885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/yq, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:13.957945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:13.958677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/yq 2024-11-21T17:52:13.958736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:13.958785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:13.958805Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:13.958971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:13.958984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:13.958988Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:13.959041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:13.959061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:13.959068Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:52:13.959725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211534004, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:13.959736Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211534004, at schemeshard: 72057594046644480 2024-11-21T17:52:13.959769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:52:13.960113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:13.960151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:13.960158Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:52:13.960172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:52:13.960183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:52:13.960192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 0 2024-11-21T17:52:13.960339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:13.960357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:13.960360Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:52:13.960395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 720575940466444 ... er_id, String : utqueoi0998dosp4rhog)], RetryAttempt: 0, ResolveAttempt: 0 } 2024-11-21T17:52:20.705972Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. effective maxinflight 1024 sorted 0 2024-11-21T17:52:20.705973Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. BEFORE: 1.0 2024-11-21T17:52:20.705980Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. Send EvRead to shardId: 72075186224037892, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2024-11-21T17:52:20.705994Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. AFTER: 0.1 2024-11-21T17:52:20.705999Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2024-11-21T17:52:20.706013Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728657:2797], TxId: 281474976715767, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. CustomerSuppliedId : . TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:20.706019Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. enter getasyncinputdata results size 0, freeSpace 8388608 2024-11-21T17:52:20.706021Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2024-11-21T17:52:20.706144Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. Recv TEvReadResult from ShardID=72075186224037892, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2024-11-21T17:52:20.706153Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. Taken 0 locks 2024-11-21T17:52:20.706156Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. new data for read #0 seqno = 1 finished = 1 2024-11-21T17:52:20.706161Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728657:2797], TxId: 281474976715767, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. CustomerSuppliedId : . TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2024-11-21T17:52:20.706169Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728657:2797], TxId: 281474976715767, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. CustomerSuppliedId : . TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:20.706172Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. enter getasyncinputdata results size 1, freeSpace 8388608 2024-11-21T17:52:20.706177Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. enter pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2024-11-21T17:52:20.706184Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. exit pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 1 freeSpace: 8386499 2024-11-21T17:52:20.706192Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. returned 1 rows; processed 1 rows 2024-11-21T17:52:20.706208Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. dropping batch for read #0 2024-11-21T17:52:20.706215Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. effective maxinflight 1024 sorted 0 2024-11-21T17:52:20.706217Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2024-11-21T17:52:20.706220Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1, CA Id [4:7439791916548728657:2797]. returned async data processed rows 1 left freeSpace 8386499 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T17:52:20.706265Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728659:2798], TxId: 281474976715767, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2024-11-21T17:52:20.706266Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728657:2797], TxId: 281474976715767, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. CustomerSuppliedId : . TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:20.706268Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728657:2797], TxId: 281474976715767, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. CustomerSuppliedId : . TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:52:20.706273Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T17:52:20.706278Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 2. Finish input channelId: 1, from: [4:7439791916548728657:2797] 2024-11-21T17:52:20.706290Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728657:2797], TxId: 281474976715767, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. CustomerSuppliedId : . TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2024-11-21T17:52:20.706291Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728659:2798], TxId: 281474976715767, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:52:20.706300Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728657:2797], TxId: 281474976715767, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. CustomerSuppliedId : . TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T17:52:20.706303Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728657:2797], TxId: 281474976715767, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. CustomerSuppliedId : . TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T17:52:20.706304Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1. Tasks execution finished 2024-11-21T17:52:20.706306Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728657:2797], TxId: 281474976715767, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. CustomerSuppliedId : . TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T17:52:20.706332Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 1. pass away 2024-11-21T17:52:20.706334Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728659:2798], TxId: 281474976715767, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:52:20.706336Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728659:2798], TxId: 281474976715767, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T17:52:20.706339Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T17:52:20.706342Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T17:52:20.706364Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715767;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:52:20.706369Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728659:2798], TxId: 281474976715767, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2024-11-21T17:52:20.706371Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728659:2798], TxId: 281474976715767, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T17:52:20.706372Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T17:52:20.706373Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 2. Tasks execution finished 2024-11-21T17:52:20.706374Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7439791916548728659:2798], TxId: 281474976715767, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=NjdkN2I5ZTYtNjExYmNlOTItODJlMjc3OTctZTRhZTZmNGU=. TraceId : 01jd7xn0pb8vgwqaf3tc77ehnb. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T17:52:20.706383Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715767, task: 2. pass away 2024-11-21T17:52:20.706400Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715767;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:52:21.499760Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:2107: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:2107 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:52:21.936402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:21.936440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:21.936444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:21.936449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:21.936460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:21.936474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:21.936480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:21.936582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:21.983804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:21.983827Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:21.987042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:21.987845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:21.987887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:52:21.997214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:21.997518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:22.016327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:22.028306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:22.041941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:22.074038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:22.074069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:22.074116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:22.074129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:22.074135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:22.074155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.075637Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:52:22.121415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:22.130740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.130847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:22.130883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:22.130889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.131723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:22.131769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:22.131815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.131824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:22.131837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:22.131841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:22.132163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.132173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:22.132177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:22.132473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.132480Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.132484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:22.132489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:22.133128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:22.133490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:22.143806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:22.144089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:22.144124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:22.144132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:22.144194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:22.144200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:22.144254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:22.144267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:22.145000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:22.145024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:22.145060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:22.145065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:52:22.145151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.145159Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:22.145170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:22.145174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:22.145179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:22.145183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:22.145187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:22.145191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:22.145203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:22.145208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:22.145211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:52:22.145532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:22.145548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:22.145551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:52:22.145555Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:52:22.145558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:22.145570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 149000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 46 } } 2024-11-21T17:52:22.498278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2024-11-21T17:52:22.498301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 149000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 46 } } 2024-11-21T17:52:22.498311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#105:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 149000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 46 } } 2024-11-21T17:52:22.498315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T17:52:22.498340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.498344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2024-11-21T17:52:22.498889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.498937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.498947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#105:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:22.498960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2024-11-21T17:52:22.498994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:22.499468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2024-11-21T17:52:22.499500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000004 2024-11-21T17:52:22.499598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:22.499619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:22.499626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000004, at schemeshard: 72057594046678944 2024-11-21T17:52:22.509921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2024-11-21T17:52:22.509983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2024-11-21T17:52:22.511632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:22.511647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:52:22.511716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:22.511723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T17:52:22.511857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.511867Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 105 2024-11-21T17:52:22.512200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:52:22.512218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:52:22.512222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:52:22.512256Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2024-11-21T17:52:22.512265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:52:22.512284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T17:52:22.512418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 257 } } 2024-11-21T17:52:22.512425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2024-11-21T17:52:22.512438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 257 } } 2024-11-21T17:52:22.512446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 257 } } 2024-11-21T17:52:22.512566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 658 RawX2: 4294969896 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2024-11-21T17:52:22.512570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2024-11-21T17:52:22.512592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 658 RawX2: 4294969896 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2024-11-21T17:52:22.512595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:52:22.512599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 658 RawX2: 4294969896 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2024-11-21T17:52:22.512607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:22.512610Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.512612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:52:22.512617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2024-11-21T17:52:22.513079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:52:22.513125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.513140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.513171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.513176Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T17:52:22.513184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T17:52:22.513186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:52:22.513190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2024-11-21T17:52:22.513197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 105 2024-11-21T17:52:22.513201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T17:52:22.513205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T17:52:22.513207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T17:52:22.513223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:52:22.513586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:52:22.513597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:815:2739] TestWaitNotification: OK eventTxId 105 >> TPersQueueTest::TopicServiceReadBudget [GOOD] >> TPersQueueTest::TopicServiceSimpleHappyWrites >> HttpRequest::AnalyzeServerless |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:52:21.936402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:21.936441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:21.936444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:21.936449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:21.936460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:21.936463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:21.936470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:21.936579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:21.983803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:21.983827Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:21.987042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:21.987907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:21.987937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:52:21.997214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:21.997490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:22.016327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:22.028306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:22.042016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:22.074024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:22.074069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:22.074120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:22.074132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:22.074138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:22.074158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.075590Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:52:22.121231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:22.130740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.130851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:22.130885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:22.130893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.131983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:22.132011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:22.132044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.132051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:22.132058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:22.132062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:22.132462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.132473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:22.132478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:22.132861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.132870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.132874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:22.132880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:22.133477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:22.133828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:22.143811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:22.144082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:22.144121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:22.144128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:22.144194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:22.144200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:22.144247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:22.144260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:22.144986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:22.144997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:22.145059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:22.145064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:52:22.145133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.145139Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:22.145150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:22.145153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:22.145157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:22.145160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:22.145163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:22.145165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:22.145179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:22.145184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:22.145187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:52:22.145503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:22.145513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:22.145517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:52:22.145521Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:52:22.145526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:22.145538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 904Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:52:22.724924Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:442:2406], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:52:22.725010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2024-11-21T17:52:22.725039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:52:22.725072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T17:52:22.725078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2024-11-21T17:52:22.725083Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T17:52:22.725142Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:22.725161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:22.725168Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T17:52:22.725174Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T17:52:22.725532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T17:52:22.725544Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T17:52:22.725559Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T17:52:22.725563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T17:52:22.725585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T17:52:22.725596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:123:2149] message: TxId: 281474976710760 2024-11-21T17:52:22.725601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T17:52:22.725606Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T17:52:22.725609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T17:52:22.725622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T17:52:22.725971Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T17:52:22.725986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T17:52:22.725996Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T17:52:22.726008Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:442:2406], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:52:22.726336Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:52:22.726352Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:442:2406], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:52:22.726360Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T17:52:22.726670Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T17:52:22.726687Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:442:2406], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T17:52:22.726691Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T17:52:22.726709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:52:22.726714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:614:2567] TestWaitNotification: OK eventTxId 102 2024-11-21T17:52:22.726845Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:52:22.726906Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 68us result status StatusSuccess 2024-11-21T17:52:22.727023Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |83.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> DemoTx::Scenario_4 [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] |83.9%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DemoTx::Scenario_5 >> PersQueueSdkReadSessionTest::SettingsValidation >> BasicStatistics::NotFullStatisticsDatashard >> HttpRequest::Analyze [GOOD] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TPersQueueTest::Cache [GOOD] >> YdbOlapStore::ManyTables [GOOD] >> TPersQueueTest::DirectReadStop [GOOD] >> TPersQueueTest::SameOffset [GOOD] >> TPersQueueTest::CacheHead >> YdbOlapStore::LogPagingBetween >> TPersQueueTest::DirectReadCleanCache >> TPersQueueTest::SchemeOperationsTest >> TPersQueueTest::NoDecompressionMemoryLeaks [GOOD] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive >> TopicService::UnknownTopic [GOOD] >> SubDomainWithReboots::DropSplittedTabletInsideWithStoragePools [GOOD] >> TopicService::UseDoubleSlashInTopicPath >> DemoTx::Scenario_5 [GOOD] >> TPersQueueTest::WriteEmptyData [GOOD] >> TPersQueueTest::WriteNonExistingPartition >> YdbOlapStore::LogPagingBetween [GOOD] >> TPersQueueTest::TopicServiceSimpleHappyWrites [GOOD] >> TPersQueueTest::StreamReadCreateAndDestroyMsgs [GOOD] >> TFstClassSrcIdPQTest::TestTableCreated >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode >> YdbOlapStore::LogWithUnionAllAscending >> TPersQueueTest::StreamReadCommitAndStatusMsgs >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError |83.9%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: 2024-11-21T17:52:19.201282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:19.201333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:19.201344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00164c/r3tmp/tmpXwjLSl/pdisk_1.dat 2024-11-21T17:52:19.278282Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30297, node 1 2024-11-21T17:52:19.370200Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:19.370219Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:19.370222Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:19.370286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:19.375364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:19.451457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:19.451492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:19.462982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19812 2024-11-21T17:52:19.865571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:20.626027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:20.626050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:20.659295Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:20.660139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:20.707929Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:20.715068Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:52:20.715091Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:52:20.720617Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:52:20.720753Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:52:20.720774Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:52:20.720779Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:52:20.720785Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:52:20.720791Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:52:20.720796Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:52:20.720802Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:52:20.720915Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:52:20.895515Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:20.895540Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:20.896584Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T17:52:20.898735Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T17:52:20.898875Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T17:52:20.899717Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T17:52:20.903728Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:52:20.903743Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:52:20.903755Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T17:52:20.905125Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:20.905147Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:20.906196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:52:20.907447Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:52:20.907471Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:52:20.910222Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:52:20.922167Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:20.944364Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:52:21.056769Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:52:21.223066Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:52:21.934807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.934841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.937746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T17:52:21.978949Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:52:21.979008Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:52:21.979059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:52:21.979083Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:52:21.979101Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:52:21.979118Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:52:21.979141Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:52:21.979162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:52:21.979181Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:52:21.979200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:52:21.979217Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:52:21.979234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:52:21.985773Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:52:21.985803Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:52:21.985833Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:52:21.985847Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:52:21.985862Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:52:21.985874Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cl ... nished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:52:22.018282Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:52:22.018296Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:52:22.018301Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:52:22.018313Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:52:22.018319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:52:22.018335Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:52:22.018340Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:52:22.018351Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:52:22.018356Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:52:22.018391Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:52:22.018398Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:52:22.018407Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:52:22.018413Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:52:22.018429Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:52:22.018435Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:52:22.018444Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:52:22.018450Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:52:22.018459Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:52:22.018466Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:52:22.018473Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:52:22.018478Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:52:22.018503Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:52:22.018508Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:52:22.018524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:52:22.018530Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:52:22.018542Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:52:22.018547Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:52:22.018562Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:52:22.018568Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:52:22.018579Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:52:22.018584Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:52:22.994878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2925:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:22.994934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:22.996082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037889 2024-11-21T17:52:23.625192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3074:3165], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:23.625235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:23.628277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037889 waiting actualization: 0/0.000014s 2024-11-21T17:52:25.391618Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3400:3607] 2024-11-21T17:52:25.392136Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:3397:3255] , Record { OperationId: "\000\000\000\000\023\346:\326\337\343AS}\345T\253" Tables { PathId { OwnerId: 72075186224037889 LocalId: 4 } } } 2024-11-21T17:52:25.392147Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Create new force traversal operation, OperationId=:AS}T 2024-11-21T17:52:25.392151Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Create new force traversal table, OperationId=:AS}T , PathId [OwnerId: 72075186224037889, LocalPathId: 4] Answer: 'Analyze sent. OperationId: 00000004z67bbdzrt1adyyan5b' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; |84.0%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::DropSplittedTabletInsideWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:51:04.970779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:04.970802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:04.970807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:04.970812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:04.970825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:04.970829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:04.970838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:04.970923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:04.984824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:04.984853Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:51:04.987502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:04.987611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:04.987662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:04.991894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:04.991991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:04.992117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:04.992354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:04.993085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:04.993346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:04.993354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:04.993366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:04.993372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:04.993378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:04.993422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:51:04.994797Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:51:05.014948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:05.019590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.019703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:05.019774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:05.019788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.020727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:05.020773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:05.020842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.020853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:05.020859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:05.020865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:05.021435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.021452Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:05.021457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:05.021864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.021874Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.021881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:05.021888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:05.022534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:05.023070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:05.023126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:05.023336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:05.023363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:05.023382Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:05.023439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:05.023445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:05.023476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:05.023488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:05.023934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:05.023945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:05.023986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:05.023991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:05.024066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:05.024073Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:05.024083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:05.024087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:05.024093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:05.024097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:05.024102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:05.024106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:05.024117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:05.024123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:05.024126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ndToSchemeshard, txId 1006 2024-11-21T17:52:28.384751Z node 187 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T17:52:28.384763Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T17:52:28.384767Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [187:920:2831] TestWaitNotification: OK eventTxId 1006 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted 2024-11-21T17:52:28.384832Z node 187 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:52:28.384841Z node 187 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:52:28.384848Z node 187 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:52:28.384855Z node 187 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T17:52:28.384865Z node 187 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 2024-11-21T17:52:28.384947Z node 187 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:52:28.384972Z node 187 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 35us result status StatusPathDoesNotExist 2024-11-21T17:52:28.385003Z node 187 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:52:28.385052Z node 187 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:52:28.385063Z node 187 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 13us result status StatusSuccess 2024-11-21T17:52:28.385110Z node 187 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:28.385300Z node 187 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [187:122:2148] sender: [187:927:2058] recipient: [187:99:2134] Leader for TabletID 72057594046678944 is [187:122:2148] sender: [187:930:2058] recipient: [187:15:2062] Leader for TabletID 72057594046678944 is [187:122:2148] sender: [187:931:2058] recipient: [187:929:2838] Leader for TabletID 72057594046678944 is [187:932:2839] sender: [187:933:2058] recipient: [187:929:2838] 2024-11-21T17:52:28.394094Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:28.394121Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:28.394126Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:28.394131Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:28.394137Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:28.394145Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:28.394154Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:28.394213Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:28.395360Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:28.395701Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:28.395734Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:28.395771Z node 187 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:28.395777Z node 187 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:28.395816Z node 187 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:28.395922Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:52:28.395939Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: DirA, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:52:28.395949Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.395957Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396011Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396039Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396053Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396067Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396082Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396095Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396124Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396158Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396170Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396215Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396224Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396280Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396294Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396307Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396332Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396342Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396371Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396398Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396413Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396419Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.396425Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:28.397862Z node 187 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:28.397877Z node 187 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:28.397936Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:28.397943Z node 187 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:28.397948Z node 187 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:28.397992Z node 187 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Waiting until shard idx 72057594046678944:3 is deleted Waiting until shard idx 72057594046678944:4 is deleted Waiting until shard idx 72057594046678944:5 is deleted Leader for TabletID 72057594046678944 is [187:932:2839] sender: [187:990:2058] recipient: [187:15:2062] Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 Deleted shard idx 72057594046678944:3 Deleted shard idx 72057594046678944:4 Deleted shard idx 72057594046678944:5 >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] |84.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |84.0%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> KqpRanges::NullInKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] Test command err: 2024-11-21T17:52:23.496726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:23.496763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:23.496771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001616/r3tmp/tmpaoG5sf/pdisk_1.dat 2024-11-21T17:52:23.573824Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22227, node 1 2024-11-21T17:52:23.665272Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:23.665293Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:23.665297Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:23.665384Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:23.669898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:23.744751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:23.744781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:23.756358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8149 2024-11-21T17:52:24.159632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:24.902873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:24.902899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:24.935693Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:24.936712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:24.984471Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:24.992270Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:52:24.992296Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:52:24.999135Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:52:24.999274Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:52:24.999299Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:52:24.999304Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:52:24.999311Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:52:24.999316Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:52:24.999321Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:52:24.999329Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:52:24.999451Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:52:25.173442Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:25.173468Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:25.174626Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T17:52:25.176353Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T17:52:25.176446Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T17:52:25.177228Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T17:52:25.180585Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:52:25.180598Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:52:25.180606Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T17:52:25.181780Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:25.181799Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:25.182795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:52:25.183918Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:52:25.183937Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:52:25.185792Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:52:25.197356Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:25.219189Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:52:25.328044Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:52:25.483272Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:52:26.206301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:52:26.722656Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:26.807755Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T17:52:26.807790Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T17:52:26.807800Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2483:2898], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T17:52:26.807966Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2484:2899] 2024-11-21T17:52:26.807996Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2484:2899], schemeshard id = 72075186224037899 2024-11-21T17:52:27.516167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2614:3190], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:27.516205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:27.518899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2024-11-21T17:52:27.564084Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2767:3037];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:52:27.564143Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2767:3037];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:52:27.564175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2767:3037];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:52:27.564193Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2767:3037];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:52:27.564206Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2767:3037];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:52:27.564220Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2767:3037];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:52:27.564254Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2767:3037];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:52:27.564270Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2767:3037];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:52:27.564283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2767:3037];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:52:27.564299Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2767:3037];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:52:27.564312Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2767:3037];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:52:27.564326Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2767:3037];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:52:27.573433Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:2768:3038];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:52:27.573478Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:2768: ... iption=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:52:27.607364Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:52:27.607374Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:52:27.607377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:52:27.607384Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:52:27.607387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:52:27.607396Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:52:27.607399Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:52:27.607405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:52:27.607408Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:52:27.608767Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:52:27.608779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:52:27.608787Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:52:27.608791Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:52:27.608800Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:52:27.608804Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:52:27.608810Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:52:27.608814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:52:27.608820Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:52:27.608824Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:52:27.608830Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:52:27.608834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:52:27.608858Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:52:27.608862Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:52:27.608872Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:52:27.608876Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:52:27.608883Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:52:27.608887Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:52:27.608897Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:52:27.608901Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:52:27.608908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:52:27.608911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:52:29.044243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3418:3290], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:29.044282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:29.046591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715662:0, at schemeshard: 72075186224037899 2024-11-21T17:52:30.328095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3576:3337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:30.328195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:30.329356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 waiting actualization: 0/0.000011s 2024-11-21T17:52:32.750468Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3914:3833] 2024-11-21T17:52:32.751023Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. ReplyToActorId [1:3910:3428] , Record { OperationId: "\000\000\000\000\031\311W\016\327F\206$\034\353\213\225" Tables { PathId { OwnerId: 72057594046644480 LocalId: 2 } } } 2024-11-21T17:52:32.751035Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Create new force traversal operation, OperationId=WF$ 닕 2024-11-21T17:52:32.751040Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAnalyze::Execute. Create new force traversal table, OperationId=WF$ 닕 , PathId [OwnerId: 72057594046644480, LocalPathId: 2] Answer: 'Analyze sent. OperationId: 00000006e9aw7dehm64geeq2wn' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2024-11-21T17:51:53.072370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:53.072392Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:53.072414Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:51:53.075563Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:51:53.075682Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:51:53.075735Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:53.076705Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:51:53.085535Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:53.085685Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:51:53.085810Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:51:53.085824Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:51:53.085830Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:51:53.085867Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:51:53.089163Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:51:53.089218Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:51:53.089264Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:51:53.089268Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:51:53.089272Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:51:53.089276Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:53.089347Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:53.089353Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:53.089373Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:51:53.089391Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:51:53.089429Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:53.089436Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:51:53.089441Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:51:53.089446Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:51:53.089449Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:51:53.089452Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:51:53.089457Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:51:53.097234Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:53.097257Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:53.097266Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:51:53.097717Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:51:53.097731Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:51:53.097748Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:51:53.097776Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:51:53.097786Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:51:53.097793Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:51:53.097800Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:53.097805Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:51:53.097809Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:51:53.097813Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:53.097872Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:51:53.097876Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:51:53.097880Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:51:53.097884Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:53.097893Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:51:53.097897Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:51:53.097900Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:51:53.097904Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:53.097908Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:51:53.118745Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:51:53.118765Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:51:53.118770Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:51:53.118779Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:51:53.118790Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:51:53.118872Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:53.118878Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:53.118885Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:51:53.118903Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:51:53.118907Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:51:53.118962Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:51:53.118969Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:53.118973Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:51:53.118977Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:51:53.119626Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:51:53.119643Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:51:53.119695Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:53.119701Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:51:53.119707Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:51:53.119713Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:51:53.119717Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:51:53.119724Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:51:53.119729Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:51:53.119735Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:53.119739Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:51:53.119743Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:51:53.119747Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:51:53.119794Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:51:53.119798Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:53.119801Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:51:53.119805Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:51:53.119810Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:51:53.119821Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:51:53.119824Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:51:53.119827Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:51:53.119830Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:51:53.119842Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:51:53.119845Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:51:53.119848Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:51:53.119854Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:51:53.119856Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:51:53.119860Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907334Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907337Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:22] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907341Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 22] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907344Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907362Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907365Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:23] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907369Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 23] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907372Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907394Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907397Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907401Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907404Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907423Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907426Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907430Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907433Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907452Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907455Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907459Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907462Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907478Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907482Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907487Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907489Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907506Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907509Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907514Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907516Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907532Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907535Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907539Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907541Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907559Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907561Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907565Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907567Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907585Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907588Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907592Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907595Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907613Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907616Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907620Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907623Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907638Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907641Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907645Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907648Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907666Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907668Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907672Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907676Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907694Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907696Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907700Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907702Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907718Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907720Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907725Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907728Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907745Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:31.907747Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2024-11-21T17:52:31.907751Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:31.907753Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:31.907793Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T17:52:31.907798Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:31.907803Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2024-11-21T17:52:31.907820Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T17:52:31.907822Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:31.907824Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2024-11-21T17:52:31.907831Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T17:52:31.907834Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:31.907835Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2024-11-21T17:52:31.907842Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T17:52:31.907844Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:31.907846Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2024-11-21T17:52:31.907852Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T17:52:31.907855Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:31.907857Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2024-11-21T17:52:31.907865Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T17:52:31.907867Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:31.907869Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 expect 31 24 22 20 31 29 28 31 25 22 31 22 18 28 19 18 31 20 26 18 - - 8 26 - 14 4 3 - - - - actual 31 24 22 20 31 29 28 31 25 22 31 22 18 28 19 18 31 20 26 18 - - 8 26 - 14 4 3 - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> TopicService::UseDoubleSlashInTopicPath [GOOD] >> TopicService::RelativePath >> TPersQueueTest::DirectReadCleanCache [FAIL] >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder >> YdbOlapStore::LogWithUnionAllAscending [GOOD] >> YdbOlapStore::LogWithUnionAllDescending |84.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |84.0%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2024-11-21T17:52:02.175796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:02.175814Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:02.175832Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:02.178635Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:02.178772Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:02.178823Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:02.179614Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:02.187202Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:02.187350Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:02.187496Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:02.187512Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:02.187519Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:02.187558Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:02.190549Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:02.190606Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:02.190660Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:02.190665Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:02.190670Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:02.190675Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:02.190751Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.190757Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.190779Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:02.190797Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:02.190837Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:02.190843Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:02.190849Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:02.190854Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:02.190858Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:02.190863Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:02.190867Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:02.200259Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.200280Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.200289Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:02.200710Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:02.200722Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:02.200740Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:02.200765Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:02.200774Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:02.200782Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:02.200789Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:02.200793Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:02.200798Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:02.200801Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:02.200864Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:02.200868Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:02.200871Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:02.200875Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:02.200884Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:02.200887Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:02.200890Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:02.200894Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:02.200899Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:02.221801Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:02.221819Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:02.221824Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:02.221833Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:02.221843Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:02.221920Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.221925Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.221930Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:52:02.221942Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:02.221945Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:02.221977Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:02.221984Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:02.221987Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:02.221990Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:02.222499Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:02.222512Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:02.222552Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.222555Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.222560Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:02.222565Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:02.222568Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:02.222574Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:02.222576Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:02.222582Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:02.222585Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:02.222587Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:02.222590Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:02.222620Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:02.222623Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:02.222624Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:02.222627Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:02.222629Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:02.222637Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:02.222638Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:02.222640Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:02.222642Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:02.222649Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:02.222651Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:02.222653Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:02.222657Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:02.222658Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:02.222660Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... 21T17:52:32.950521Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950537Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950539Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950544Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950546Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950568Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950571Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950575Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950577Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950589Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950592Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950596Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950599Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950612Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950615Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950619Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950623Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950638Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950641Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950647Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950650Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950670Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950673Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950677Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950679Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950695Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950698Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950701Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950705Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950719Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950722Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950726Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950728Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950754Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950759Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950763Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950766Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950781Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950784Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950787Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950790Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950805Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950808Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950812Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950814Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950831Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950835Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950841Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950845Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950872Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950876Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950881Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950885Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950910Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:32.950914Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2024-11-21T17:52:32.950920Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:32.950924Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:32.950976Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T17:52:32.950984Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:32.950990Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2024-11-21T17:52:32.951014Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T17:52:32.951017Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:32.951021Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2024-11-21T17:52:32.951032Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T17:52:32.951035Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:32.951039Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2024-11-21T17:52:32.951050Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T17:52:32.951053Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:32.951056Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2024-11-21T17:52:32.951066Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T17:52:32.951069Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:32.951072Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2024-11-21T17:52:32.951082Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T17:52:32.951085Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:32.951088Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2024-11-21T17:52:32.951098Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T17:52:32.951101Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:32.951104Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2024-11-21T17:52:32.951115Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:780:2716], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2024-11-21T17:52:32.951118Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:32.951121Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 21 31 30 24 30 28 19 31 28 19 31 24 30 16 14 28 30 15 29 29 30 16 30 30 15 10 30 10 10 15 15 - actual 21 31 30 24 30 28 19 31 28 19 31 24 30 16 14 28 30 15 29 29 30 16 30 30 15 10 30 10 10 15 15 - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TPersQueueTest::CacheHead [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials >> TPersQueueTest::StreamReadCommitAndStatusMsgs [GOOD] >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> TPersQueueTest::WriteNonExistingPartition [GOOD] >> TPersQueueTest::CheckACLForGrpcWrite >> TPersQueueTest::StreamReadManyUpdateTokenAndRead >> CompressExecutor::TestReorderedExecutor >> TPersQueueTest::WriteNonExistingTopic >> TFstClassSrcIdPQTest::TestTableCreated [GOOD] |84.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TFstClassSrcIdPQTest::NoMapping >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials [GOOD] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive [GOOD] >> KqpRanges::NullInKey [GOOD] >> TPersQueueTest::SchemeOperationsTest [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> LabeledDbCounters::TwoTablets [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin >> LabeledDbCounters::TwoTabletsKillOneTablet >> TPersQueueTest::SchemeOperationFirstClassCitizen >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> KqpRanges::LiteralOrCompisite >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin [GOOD] >> KqpRanges::LiteralOrCompisite [GOOD] >> KqpRanges::LiteralOrCompisiteCollision >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword [GOOD] >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNoTable >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] >> KqpRanges::LiteralOrCompisiteCollision [GOOD] >> KqpRanges::NoFullScanAtScanQuery >> IndexBuildTest::CancellationNoTable [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> YdbOlapStore::LogWithUnionAllDescending [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter [GOOD] >> TGRpcYdbTest::ExecuteQueryImplicitSession >> TGRpcNewClient::CreateAlterUpsertDrop |84.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |84.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> YdbOlapStore::LogTsRangeDescending |84.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |84.1%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |84.1%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |84.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancellationNoTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:51:58.648213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:51:58.648254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:58.648259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:51:58.648263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:51:58.648277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:51:58.648279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:51:58.648285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:51:58.648341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:51:58.655259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:51:58.655276Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:58.657426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:51:58.657973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:51:58.658007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:51:58.659191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:51:58.659338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:51:58.659399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.659446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:51:58.660123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.660375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:58.660383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.660411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:51:58.660415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:58.660420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:51:58.660430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.661300Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:51:58.671596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:51:58.671657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.671707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:51:58.671747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:51:58.671752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.672384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.672407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:51:58.672478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.672486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:51:58.672490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:51:58.672494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:51:58.672909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.672919Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:51:58.672923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:51:58.673232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.673238Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.673241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.673246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.673675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:58.674085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:51:58.674166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:51:58.674358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:51:58.674386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:51:58.674394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.674453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:51:58.674459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:51:58.674490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:58.674503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:51:58.674994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:51:58.675005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:51:58.675052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:51:58.675058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:51:58.675155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:51:58.675162Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:51:58.675176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:51:58.675180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.675186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:51:58.675192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:51:58.675197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:51:58.675201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:51:58.675216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:51:58.675222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:51:58.675226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:51:58.675547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:58.675563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:51:58.675568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:51:58.675572Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:51:58.675577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:51:58.675590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... rlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:38.284988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:38.285007Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:52:38.286065Z node 2 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:237:2058] recipient: [2:15:2062] 2024-11-21T17:52:38.287799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:38.287833Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:38.287881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:38.287910Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:38.287915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:38.288295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:38.288320Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:38.288356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:38.288365Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:38.288370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:38.288374Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:38.288815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:38.288827Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:38.288832Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:38.289283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:38.289298Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:38.289305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:38.289312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:38.289344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:38.296835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:38.296916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:38.297152Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:38.297192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:38.297202Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:38.297283Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:38.297293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:38.297333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:38.297348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:38.298087Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:38.298099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:38.298147Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:38.298152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:52:38.298163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:38.298174Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:38.298200Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:38.298204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:38.298210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:38.298216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:38.298220Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:38.298224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:38.298238Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:38.298243Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:38.298247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:52:38.298448Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:38.298463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:38.298466Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:52:38.298470Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:52:38.298474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:38.298486Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:52:38.299122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:52:38.299205Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:38.299310Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 101 DatabaseName: "/MyRoot" Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { settings { } } } max_batch_rows: 2 max_shards_in_flight: 2 } 2024-11-21T17:52:38.299357Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: Reply TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp:70" severity: 1 } SchemeStatus: 2 2024-11-21T17:52:38.299377Z node 2 :TX_PROXY DEBUG: actor# [2:267:2259] Bootstrap 2024-11-21T17:52:38.300208Z node 2 :TX_PROXY DEBUG: actor# [2:267:2259] Become StateWork (SchemeCache [2:272:2264]) 2024-11-21T17:52:38.300334Z node 2 :TX_PROXY DEBUG: actor# [2:267:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:52:38.300680Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp:70" severity: 1 } SchemeStatus: 2 TestWaitNotification wait txId: 101 2024-11-21T17:52:38.300743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:52:38.300748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:52:38.300783Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:52:38.300794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:52:38.300797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:279:2271] TestWaitNotification: OK eventTxId 101 2024-11-21T17:52:38.300837Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" PageSize: 100 PageToken: "" 2024-11-21T17:52:38.300849Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [FAIL] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T17:51:09.744362Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:09.744390Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:09.749255Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:09.751984Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } Consumers { Name: "another-user" Generation: 1 Important: false } 2024-11-21T17:51:09.752323Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T17:51:09.752902Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:09.753543Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T17:51:09.753844Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:51:09.754389Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [1:186:2199] 2024-11-21T17:51:09.754821Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:186:2199] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:09.757829Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:09.757850Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:208:2214], now have 1 active actors on pipe 2024-11-21T17:51:09.757863Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2024-11-21T17:51:09.757871Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2024-11-21T17:51:09.757962Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2024-11-21T17:51:09.757967Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2024-11-21T17:51:09.757999Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2024-11-21T17:51:09.758036Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2024-11-21T17:51:09.758041Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2024-11-21T17:51:09.758082Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:09.758086Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:210:2216], now have 1 active actors on pipe 2024-11-21T17:51:09.758091Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2024-11-21T17:51:09.758095Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2024-11-21T17:51:09.758116Z node 1 :PERSQUEUE INFO: new Cookie default|9d5cf834-189280f3-483bbc3a-b18a7fe_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2024-11-21T17:51:09.758142Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:51:09.758155Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:51:09.758190Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:51:09.758193Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:212:2218], now have 1 active actors on pipe 2024-11-21T17:51:09.758203Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2024-11-21T17:51:09.758206Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2024-11-21T17:51:09.758212Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2024-11-21T17:51:09.758215Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2024-11-21T17:51:09.758241Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 1 partNo 0 2024-11-21T17:51:09.758287Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 72 count 1 nextOffset 1 batches 1 2024-11-21T17:51:09.758297Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 2 partNo 0 2024-11-21T17:51:09.758301Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 112 count 2 nextOffset 2 batches 1 2024-11-21T17:51:09.813191Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--asdfgs--topic' partition 0 compactOffset 0,2 HeadOffset 0 endOffset 0 curOffset 2 d0000000000_00000000000000000000_00000_0000000002_00000| size 94 WTime 332 2024-11-21T17:51:09.813293Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:09.814321Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 18 2024-11-21T17:51:09.814344Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:51:09.814358Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T17:51:09.814367Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:51:09.814371Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2024-11-21T17:51:09.814397Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2024-11-21T17:51:09.814403Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2024-11-21T17:51:09.814409Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user another-user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2024-11-21T17:51:09.814415Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 1 rrg 1 2024-11-21T17:51:09.814460Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:51:09.814480Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--asdfgs--topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 2 max time lag 0ms effective offset 0 2024-11-21T17:51:09.814486Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T17:51:09.814548Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T17:51:09.814554Z node 1 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:51:09.814574Z node 1 :PERSQUEUE DEBUG: Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp done, result 332 queuesize 2 startOffset 0 2024-11-21T17:51:09.814579Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user another-user readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 0 rrg 1 2024-11 ... ystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:51:54.885961Z node 62 :PERSQUEUE INFO: new Cookie default|8b51bb14-1872f659-24e3d9c8-43c8e1c3_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:54.894974Z node 62 :PERSQUEUE INFO: new Cookie default|9bcc86d2-36b51933-6940284-3227e331_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:101:2057] recipient: [63:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:101:2057] recipient: [63:99:2133] Leader for TabletID 72057594037927937 is [63:105:2137] sender: [63:106:2057] recipient: [63:99:2133] 2024-11-21T17:51:55.120781Z node 63 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:55.120800Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [63:147:2057] recipient: [63:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [63:147:2057] recipient: [63:145:2168] Leader for TabletID 72057594037927938 is [63:151:2172] sender: [63:152:2057] recipient: [63:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [63:105:2137] sender: [63:177:2057] recipient: [63:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:55.124558Z node 63 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:55.124678Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 66 actor [63:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 66 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 66 ReadRuleGenerations: 66 ReadRuleGenerations: 66 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 66 Important: false } Consumers { Name: "user1" Generation: 66 Important: true } Consumers { Name: "user2" Generation: 66 Important: true } 2024-11-21T17:51:55.124774Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [63:184:2197] 2024-11-21T17:51:55.125196Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [63:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:55.127142Z node 63 :PERSQUEUE INFO: new Cookie default|98fde109-34cd3941-5e57f009-f97520a1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:55.266172Z node 63 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2024-11-21T17:51:55.277278Z node 63 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [63:105:2137] sender: [63:241:2057] recipient: [63:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [63:105:2137] sender: [63:244:2057] recipient: [63:14:2061] Leader for TabletID 72057594037927937 is [63:105:2137] sender: [63:245:2057] recipient: [63:243:2244] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [63:246:2245] sender: [63:247:2057] recipient: [63:243:2244] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:51:55.285859Z node 63 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:55.285880Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:51:55.285957Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [63:297:2288] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:51:55.290235Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [63:297:2288] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:51:55.296081Z node 63 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8296398 2024-11-21T17:51:55.308223Z node 63 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8296398 2024-11-21T17:51:55.324709Z node 63 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8296398 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [63:246:2245] sender: [63:328:2057] recipient: [63:14:2061] 2024-11-21T17:51:55.325109Z node 63 :PERSQUEUE ERROR: [PQ: 72057594037927937] Config has too small version 42 actual 66 actor [63:325:2307] txId 42 config: PartitionIds: 0 Version: 42 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } Consumers { Name: "user2" Important: true } 2024-11-21T17:51:55.336104Z node 63 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8296398 assertion failed at ydb/core/persqueue/ut/pq_ut.cpp:2405, auto NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()(const TString &, std::function, bool &) const: ((int)consumerDeleteResult->Record.GetStatus() == (int)NKikimrPQ::EStatus::OK) failed: (2 != 0) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x127EE7D9) ??+0 (0x1248D91D) ??+0 (0x1248C3F0) NKikimr::RunTestWithReboots(TVector> const&, std::__y1::function&)> ()>, std::__y1::function> const&, std::__y1::function, bool&)>, unsigned int, unsigned long, unsigned int, unsigned int, bool)+193 (0x1E7D2181) NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext&)+114 (0x1242C6B2) NKikimr::NPQ::NTestSuiteTPQTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x124300E7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x127F078E) NKikimr::NPQ::NTestSuiteTPQTest::TCurrentTest::Execute()+422 (0x1242FAA6) NUnitTest::TTestFactory::Execute()+803 (0x127F0F03) NUnitTest::RunMain(int, char**)+3005 (0x1280412D) ??+0 (0x7F61F8791D90) __libc_start_main+128 (0x7F61F8791E40) _start+41 (0x1180B029) forced failure at ydb/core/testlib/tablet_helpers.cpp:806, void NKikimr::RunTestWithReboots(const TVector &, std::function, std::function, bool &)>, ui32, ui64, ui32, ui32, bool): Failed at dispatch Trace with exception NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x127EE7D9) NKikimr::RunTestWithReboots(TVector> const&, std::__y1::function&)> ()>, std::__y1::function> const&, std::__y1::function, bool&)>, unsigned int, unsigned long, unsigned int, unsigned int, bool)+3359 (0x1E7D2DDF) NKikimr::NPQ::NTestSuiteTPQTest::TTestCaseTestReadAndDeleteConsumer::Execute_(NUnitTest::TTestContext&)+114 (0x1242C6B2) NKikimr::NPQ::NTestSuiteTPQTest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x124300E7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x127F078E) NKikimr::NPQ::NTestSuiteTPQTest::TCurrentTest::Execute()+422 (0x1242FAA6) NUnitTest::TTestFactory::Execute()+803 (0x127F0F03) NUnitTest::RunMain(int, char**)+3005 (0x1280412D) ??+0 (0x7F61F8791D90) __libc_start_main+128 (0x7F61F8791E40) _start+41 (0x1180B029) >> TGRpcYdbTest::ExecuteQueryImplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryExplicitSession >> TopicService::RelativePath [GOOD] >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] |84.2%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcNewClient::CreateAlterUpsertDrop [GOOD] >> TopicService::AccessRights ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcNewClient::CreateAlterUpsertDrop [GOOD] Test command err: 2024-11-21T17:52:35.529325Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791982482716746:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:35.529489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011d5/r3tmp/tmpZiLEEA/pdisk_1.dat 2024-11-21T17:52:35.571130Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19884, node 1 2024-11-21T17:52:35.587492Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:35.587505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:35.587506Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:35.587534Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:35.610237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:35.610984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:35.610997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:35.611630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:35.611686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:35.611694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:52:35.612012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:52:35.612062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:35.612066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:52:35.612368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:35.613290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211555662, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:35.613307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:52:35.613373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:52:35.613747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:35.613798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:35.613812Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:52:35.613825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:52:35.613837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:52:35.613854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:52:35.614211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:52:35.614231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:52:35.614235Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:35.614258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:52:35.629368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:35.629393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:35.631108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:36.002760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:36.002813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:36.002950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 0 2024-11-21T17:52:36.595554Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439791983651939308:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:36.595817Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011d5/r3tmp/tmp4nfvoB/pdisk_1.dat 2024-11-21T17:52:36.606789Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18335, node 4 2024-11-21T17:52:36.624434Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:36.624445Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:36.624446Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:36.624478Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:36.695843Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:36.695896Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:36.697420Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:36.698863Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:36.698970Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:36.698981Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:36.699313Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:36.699361Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:36.699370Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:36.699625Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:36.699635Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:36.699779Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:36.699899Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:36.700584Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211556747, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:36.700593Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:36.700647Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:36.700991Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:36.701038Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:36.701053Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 Progress ... teAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T17:52:39.317059Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T17:52:39.317060Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T17:52:39.317070Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T17:52:39.317075Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T17:52:39.317076Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2024-11-21T17:52:39.317085Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T17:52:39.317091Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T17:52:39.317092Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 2 2024-11-21T17:52:39.317095Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 2024-11-21T17:52:39.317357Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439791996227736228:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:52:39.393341Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715662:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:52:39.393387Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715662:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:52:39.394134Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715662, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:52:39.427182Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xnjwf7afm188nwe2q45p7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjlmNjc2MzEtYWVlMTYyZGItMTgwZjIyZTYtOGJkNDFkNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:39.432831Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: /Root/TheDir/FooTable, pathId: 0, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:52:39.432880Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715664:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:39.433558Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715664, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: /Root/TheDir/FooTable 2024-11-21T17:52:39.433608Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:39.433663Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:39.433680Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDropParts operationId#281474976715664:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:39.433744Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715664, at schemeshard: 72057594046644480 2024-11-21T17:52:39.433917Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T17:52:39.433940Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T17:52:39.433945Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2024-11-21T17:52:39.434029Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T17:52:39.434037Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T17:52:39.434038Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2024-11-21T17:52:39.435302Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDropParts operationId#281474976715664:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:52:39.435311Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:52:39.435325Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 4 -> 128 2024-11-21T17:52:39.435737Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#281474976715664:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:39.436550Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211559484, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:39.436563Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#281474976715664:0 HandleReply TEvOperationPlan, step: 1732211559484, at schemeshard: 72057594046644480 2024-11-21T17:52:39.436573Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 128 -> 136 2024-11-21T17:52:39.436899Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 281474976715664:0 ProgressState, operation type: TxDropTable, at tablet72057594046644480 2024-11-21T17:52:39.436911Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 281474976715664:0 ProgressState, no renaming has been detected for this operation 2024-11-21T17:52:39.436915Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 136 -> 137 2024-11-21T17:52:39.437184Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 281474976715664:0 ProgressState, operation type: TxDropTable, at tablet72057594046644480 2024-11-21T17:52:39.437195Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715664, done: 0, blocked: 1 2024-11-21T17:52:39.437211Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 281474976715664:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715664 Name: RenamePathBarrier }, at tablet72057594046644480 2024-11-21T17:52:39.437230Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 137 -> 129 2024-11-21T17:52:39.437302Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715664 Step: 1732211559484 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 176 } } 2024-11-21T17:52:39.437360Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:52:39.437643Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:39.437684Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:39.437701Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715664:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:52:39.437711Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:52:39.437714Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 129 -> 240 2024-11-21T17:52:39.437842Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T17:52:39.437854Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T17:52:39.437856Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2024-11-21T17:52:39.437885Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T17:52:39.437891Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T17:52:39.437892Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:52:39.438065Z node 13 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715664:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:39.438177Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:0 progress is 1/1 2024-11-21T17:52:39.438188Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 2024-11-21T17:52:39.439790Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2024-11-21T17:52:39.441565Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037888 not found 2024-11-21T17:52:39.441950Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 |84.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2024-11-21T17:52:12.402338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:12.402352Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:12.402364Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:12.405758Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:12.405947Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:12.406031Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:12.406915Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:12.412903Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:12.413038Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:12.413213Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:12.413235Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:12.413242Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:12.413287Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:12.415684Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:12.415728Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:12.415778Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:12.415782Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:12.415786Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:12.415790Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:12.415849Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.415854Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.415875Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:12.415890Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:12.415927Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:12.415934Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:12.415940Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:12.415945Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:12.415948Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:12.415954Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:12.415958Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:12.422202Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.422219Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.422226Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:12.422537Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:12.422544Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:12.422564Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:12.422586Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:12.422596Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:12.422603Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:12.422608Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:12.422612Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:12.422616Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:12.422618Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:12.422665Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:12.422668Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:12.422670Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:12.422672Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:12.422680Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:12.422682Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:12.422684Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:12.422687Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:12.422690Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:12.443678Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:12.443698Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:12.443713Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:12.443723Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:12.443735Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:12.443841Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:212:2212], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.443847Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:12.443853Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:211:2211], serverId# [1:212:2212], sessionId# [0:0:0] 2024-11-21T17:52:12.443866Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:12.443869Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:12.443904Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:12.443909Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.443912Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:12.443915Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:12.444480Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:12.444491Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:12.444525Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.444528Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:12.444535Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:12.444540Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:12.444543Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:12.444549Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:12.444552Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:12.444557Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.444559Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:12.444562Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:12.444564Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:12.444595Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:12.444597Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.444599Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:12.444602Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:12.444604Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:12.444612Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:12.444614Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:12.444616Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:12.444619Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:12.444626Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:12.444629Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:12.444631Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:12.444635Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:12.444637Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:12.444639Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725159Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725165Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:22] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725172Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 22] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725177Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725202Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725206Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:23] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725212Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 23] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725217Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725240Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725244Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725252Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725256Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725286Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725291Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725297Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725301Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725326Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725330Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725336Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725339Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725363Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725366Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725372Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725376Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725400Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725404Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725411Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725416Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725437Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725442Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725448Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725453Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725477Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725484Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725490Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725494Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725524Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725529Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725536Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725540Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725594Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725599Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725607Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725611Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725638Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725642Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725649Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725653Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725674Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725679Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725686Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725690Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725724Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725728Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725735Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725739Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725762Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725766Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725773Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725777Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725798Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:39.725802Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2024-11-21T17:52:39.725809Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:39.725816Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:39.725878Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2024-11-21T17:52:39.725884Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:39.725889Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2024-11-21T17:52:39.725909Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T17:52:39.725913Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:39.725917Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2024-11-21T17:52:39.725932Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T17:52:39.725936Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:39.725939Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2024-11-21T17:52:39.725948Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T17:52:39.725951Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:39.725955Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2024-11-21T17:52:39.725966Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T17:52:39.725970Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:39.725973Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2024-11-21T17:52:39.725985Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:229:2224], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T17:52:39.725988Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:39.725991Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 expect 25 30 29 30 30 31 28 30 28 24 27 27 26 25 29 19 29 26 - 16 0 29 29 26 - 29 - 29 - - - - actual 25 30 29 30 30 31 28 30 28 24 27 27 26 25 29 19 29 26 - 16 0 29 29 26 - 29 - 29 - - - - interm 4 5 6 5 4 1 - 4 4 6 4 6 4 - 4 6 4 6 - - 0 - 1 - - - - - - - - - >> TGRpcYdbTest::ExecuteQueryExplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryWithUuid |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |84.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |84.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans >> YdbOlapStore::LogTsRangeDescending [GOOD] |84.3%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder [GOOD] >> TPersQueueTest::Delete |84.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign >> TGRpcYdbTest::ExecuteQueryWithUuid [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest >> ExternalIndex::Simple |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> YdbImport::Simple |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> KqpRanges::NoFullScanAtScanQuery [GOOD] >> KqpRanges::NoFullScanAtDNFPredicate >> TPersQueueTest::CheckACLForGrpcWrite [GOOD] |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |84.3%| [TA] $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::CheckACLForGrpcRead |84.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogTsRangeDescending [GOOD] Test command err: 2024-11-21T17:52:17.462552Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791903538668490:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:17.462573Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011df/r3tmp/tmpnmlOT9/pdisk_1.dat 2024-11-21T17:52:17.515097Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28456, node 1 2024-11-21T17:52:17.535564Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:17.535580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:17.535582Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:17.535612Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:17.562769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:17.562792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:17.564315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:17.591358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:17.592169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:17.592192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:17.592819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:17.592887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:17.592897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:17.593245Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:17.593255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:17.593411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:17.593587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:17.594511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211537637, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:17.594525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:17.594605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:17.595123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:17.595184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:17.595203Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:17.595223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:17.595238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:17.595261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:17.595766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:17.595788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:17.595793Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:17.595809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TClient is connected to server localhost:20103 2024-11-21T17:52:17.637826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } Columns { Name: "request_id" Type: "Utf8" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:52:17.637909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:17.662794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: OlapStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T17:52:17.662835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T17:52:17.662849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T17:52:17.662857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T17:52:17.662874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T17:52:17.662880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T17:52:17.662963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2024-11-21T17:52:17.663289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:52:17.663351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:17.663363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:17.663395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:52:17.663406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2024-11-21T17:52:17.664211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2024-11-21T17:52:17.664262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2024-11-21T17:52:17.664333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:17.664339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:52:17.664380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T17:52:17.664398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:17.664404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439791903538669061:2372], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2024-11-21T17:52:17.664410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439791903538669061:2372], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2024-11-21T17:52:17.664416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:17.664426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateOlapStore, at tablet72057594046644480 2024-11-21T17:52:17.664551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { Sto ... livery in input channelId: 28, seqNo: [1] 2024-11-21T17:52:40.634371Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 29, seqNo: [1] 2024-11-21T17:52:40.634373Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 30, seqNo: [1] 2024-11-21T17:52:40.634375Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 31, seqNo: [1] 2024-11-21T17:52:40.634377Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 32, seqNo: [1] 2024-11-21T17:52:40.634379Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 33, seqNo: [1] 2024-11-21T17:52:40.634381Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 34, seqNo: [1] 2024-11-21T17:52:40.634383Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 35, seqNo: [1] 2024-11-21T17:52:40.634385Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 36, seqNo: [1] 2024-11-21T17:52:40.634387Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 37, seqNo: [1] 2024-11-21T17:52:40.634389Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 38, seqNo: [1] 2024-11-21T17:52:40.634391Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 39, seqNo: [1] 2024-11-21T17:52:40.634393Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 40, seqNo: [1] 2024-11-21T17:52:40.634395Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 41, seqNo: [1] 2024-11-21T17:52:40.634397Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 42, seqNo: [1] 2024-11-21T17:52:40.634399Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 43, seqNo: [1] 2024-11-21T17:52:40.634401Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 44, seqNo: [1] 2024-11-21T17:52:40.634404Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 45, seqNo: [1] 2024-11-21T17:52:40.634406Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 46, seqNo: [1] 2024-11-21T17:52:40.634407Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 47, seqNo: [1] 2024-11-21T17:52:40.634410Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 48, seqNo: [1] 2024-11-21T17:52:40.634412Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 49, seqNo: [1] 2024-11-21T17:52:40.634413Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 50, seqNo: [1] 2024-11-21T17:52:40.634415Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 51, seqNo: [1] 2024-11-21T17:52:40.634417Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 52, seqNo: [1] 2024-11-21T17:52:40.634419Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 53, seqNo: [1] 2024-11-21T17:52:40.634421Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 54, seqNo: [1] 2024-11-21T17:52:40.634423Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 55, seqNo: [1] 2024-11-21T17:52:40.634425Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 56, seqNo: [1] 2024-11-21T17:52:40.634427Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 57, seqNo: [1] 2024-11-21T17:52:40.634430Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 58, seqNo: [1] 2024-11-21T17:52:40.634432Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 59, seqNo: [1] 2024-11-21T17:52:40.634435Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 60, seqNo: [1] 2024-11-21T17:52:40.634438Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 61, seqNo: [1] 2024-11-21T17:52:40.634440Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 62, seqNo: [1] 2024-11-21T17:52:40.634442Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 63, seqNo: [1] 2024-11-21T17:52:40.634444Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 64, seqNo: [1] 2024-11-21T17:52:40.634446Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished 2024-11-21T17:52:40.634448Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7439792000633998451:3608], TxId: 281474976715770, task: 65. Ctx: { SessionId : ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=. CustomerSuppliedId : . TraceId : 01jd7xnm3x5r4ygts28xwqvzc3. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T17:52:40.634473Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. pass away 2024-11-21T17:52:40.634493Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715770;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T17:52:40.634515Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439792000633998363:3538] TxId: 281474976715770. Ctx: { TraceId: 01jd7xnm3x5r4ygts28xwqvzc3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7439792000633998451:3608], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 923 DurationUs: 10000 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 80 FinishTimeMs: 1732211560634 ComputeCpuTimeUs: 26 BuildCpuTimeUs: 54 WaitInputTimeUs: 5325 HostName: "ghrun-2rqap2spfm" NodeId: 28 StartTimeMs: 1732211560624 } MaxMemoryUsage: 1048576 } 2024-11-21T17:52:40.634522Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715770. Ctx: { TraceId: 01jd7xnm3x5r4ygts28xwqvzc3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7439792000633998451:3608] 2024-11-21T17:52:40.634544Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439792000633998363:3538] TxId: 281474976715770. Ctx: { TraceId: 01jd7xnm3x5r4ygts28xwqvzc3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T17:52:40.634554Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439792000633998363:3538] TxId: 281474976715770. Ctx: { TraceId: 01jd7xnm3x5r4ygts28xwqvzc3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.011543s ReadRows: 0 ReadBytes: 0 ru: 7 rate limiter was not found force flag: 1 2024-11-21T17:52:40.634570Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=, ActorId: [28:7439792000633998342:3538], ActorState: ExecuteState, TraceId: 01jd7xnm3x5r4ygts28xwqvzc3, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T17:52:40.634645Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=, ActorId: [28:7439792000633998342:3538], ActorState: ExecuteState, TraceId: 01jd7xnm3x5r4ygts28xwqvzc3, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 22.976 QueriesCount: 1 2024-11-21T17:52:40.634661Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=, ActorId: [28:7439792000633998342:3538], ActorState: ExecuteState, TraceId: 01jd7xnm3x5r4ygts28xwqvzc3, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T17:52:40.634680Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=, ActorId: [28:7439792000633998342:3538], ActorState: ExecuteState, TraceId: 01jd7xnm3x5r4ygts28xwqvzc3, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:52:40.634682Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=, ActorId: [28:7439792000633998342:3538], ActorState: ExecuteState, TraceId: 01jd7xnm3x5r4ygts28xwqvzc3, EndCleanup, isFinal: 1 2024-11-21T17:52:40.634693Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=, ActorId: [28:7439792000633998342:3538], ActorState: ExecuteState, TraceId: 01jd7xnm3x5r4ygts28xwqvzc3, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7439791996339027200:2256] 2024-11-21T17:52:40.634695Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=, ActorId: [28:7439792000633998342:3538], ActorState: unknown state, TraceId: 01jd7xnm3x5r4ygts28xwqvzc3, Cleanup temp tables: 0 2024-11-21T17:52:40.635170Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211560625, txId: 18446744073709551615] shutting down 2024-11-21T17:52:40.635205Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ODUzZTRiNzMtYmViMGNjZjMtODgwNWQwOTYtZTBlNmVjMWU=, ActorId: [28:7439792000633998342:3538], ActorState: unknown state, TraceId: 01jd7xnm3x5r4ygts28xwqvzc3, Session actor destroyed |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession >> TPersQueueTest::SchemeOperationFirstClassCitizen [GOOD] >> TPersQueueTest::SchemeOperationsCheckPropValues ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:49:12.737954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:49:12.737981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:12.737986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:49:12.737991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:49:12.738009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:49:12.738013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:49:12.738023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:49:12.738127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:49:12.750981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:49:12.751002Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:49:12.753362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:49:12.753461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:49:12.753495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:49:12.756353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:49:12.756460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:49:12.756587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:12.756797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:12.757514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:12.757831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:12.757842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:12.757855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:49:12.757863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:12.757869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:49:12.757918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:49:12.759271Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:49:12.776791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:49:12.776884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.776948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:49:12.776997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:49:12.777006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.777665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:12.777696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:49:12.777749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.777760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:49:12.777765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:49:12.777770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:49:12.778179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.778190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:49:12.778195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:49:12.778538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.778548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.778554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:12.778562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:49:12.779218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:49:12.779732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:49:12.779789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:49:12.780000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:49:12.780030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:49:12.780038Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:12.780110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:49:12.780117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:49:12.780155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:49:12.780169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:49:12.785397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:49:12.785416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:49:12.785462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:49:12.785466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:49:12.785533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:49:12.785542Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:49:12.785554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:49:12.785557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:12.785564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:49:12.785569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:49:12.785574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:49:12.785577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:49:12.785596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:49:12.785600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:49:12.785603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... rQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:38.099984Z node 176 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T17:52:38.100020Z node 176 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 43us result status StatusSuccess 2024-11-21T17:52:38.100120Z node 176 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:38.110499Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][176:1008:2788] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T17:52:38.110534Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][176:1009:2788] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T17:52:38.110549Z node 176 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][176:937:2788] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2024-11-21T17:52:38.110564Z node 176 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][176:937:2788] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2024-11-21T17:52:38.110589Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][176:1008:2788] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732211558097600 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732211558097600 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T17:52:38.110786Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][176:1009:2788] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1732211558097600 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T17:52:38.111959Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][176:1008:2788] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2024-11-21T17:52:38.111979Z node 176 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][176:937:2788] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2024-11-21T17:52:38.112011Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][176:1009:2788] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T17:52:38.112064Z node 176 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][176:937:2788] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> YdbImport::Simple [GOOD] >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] >> TPersQueueTest::WriteNonExistingTopic [GOOD] >> TBlobStorageBlocksCacheTest::Repeat [GOOD] >> KqpRanges::MergeRanges >> TPersQueueTest::WriteAfterAlter >> TFstClassSrcIdPQTest::NoMapping [GOOD] >> TFstClassSrcIdPQTest::ProperPartitionSelected >> YdbIndexTable::AlterIndexImplBySuperUser >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::Repeat [GOOD] >> TPersQueueTest::StreamReadManyUpdateTokenAndRead [GOOD] |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |84.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> TPersQueueTest::SetupWriteSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] Test command err: 2024-11-21T17:52:38.964149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791993241059459:2215];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:38.964171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011d4/r3tmp/tmpCwXT6O/pdisk_1.dat 2024-11-21T17:52:39.016377Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10017, node 1 2024-11-21T17:52:39.032406Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:39.032423Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:39.032425Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:39.032476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:39.064184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:39.064213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:39.065971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:39.095513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:39.096760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:39.096779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:39.097459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:39.097524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:39.097538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:39.097910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:39.097921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:39.098114Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:39.098305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:39.099174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211559148, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:39.099187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:39.099260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:39.099603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:39.099660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:39.099677Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:39.099694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:39.099709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:39.099730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:39.100252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:39.100269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:39.100275Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:39.100303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:52:39.818926Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439791999342687212:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:39.819076Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011d4/r3tmp/tmpC42NWk/pdisk_1.dat 2024-11-21T17:52:39.837588Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25677, node 4 2024-11-21T17:52:39.855410Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:39.855427Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:39.855431Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:39.855478Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:39.919222Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:39.919269Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:39.920912Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:39.924047Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:39.924170Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:39.924184Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:39.924684Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:39.924744Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:39.924752Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:39.925143Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:39.925153Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:39.925291Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:39.925536Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:39.926522Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211559974, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:39.926536Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:39.926601Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:39.927082Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:39.927138Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:39.927155Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:39.927188Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:39.927198Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:39.927211Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:39.927331Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594 ... :52:43.049270Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:43.049295Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T17:52:43.049332Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:43.049346Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:43.049370Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:43.049395Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:52:43.049690Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:43.049712Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:43.049720Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:43.049802Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:43.049808Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:43.049810Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:52:43.049822Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:43.049827Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:43.049828Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T17:52:43.049839Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:43.049847Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:43.049848Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T17:52:43.049857Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:43.049859Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:43.049860Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T17:52:43.050539Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211563096, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:43.050553Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211563096, at schemeshard: 72057594046644480 2024-11-21T17:52:43.050582Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:52:43.050607Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211563096, at schemeshard: 72057594046644480 2024-11-21T17:52:43.050621Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T17:52:43.050635Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211563096, at schemeshard: 72057594046644480 2024-11-21T17:52:43.050647Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T17:52:43.050661Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732211563096 2024-11-21T17:52:43.050673Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T17:52:43.051224Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:43.051334Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:43.051354Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T17:52:43.051374Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T17:52:43.051407Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T17:52:43.051424Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T17:52:43.051434Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:52:43.051440Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T17:52:43.051449Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T17:52:43.051455Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T17:52:43.051460Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:52:43.051468Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T17:52:43.051472Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T17:52:43.051474Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T17:52:43.051481Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T17:52:43.051743Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:43.051758Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:43.051761Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:52:43.051791Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:43.051799Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:43.051800Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T17:52:43.051814Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:43.051821Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:43.051822Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T17:52:43.051834Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:43.051841Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:43.051842Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:52:43.051855Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:43.051857Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:43.051859Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T17:52:43.051864Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:52:43.052417Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439792014468665943:2298], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:52:43.120133Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:52:43.120181Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:52:43.120934Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> KqpRanges::MergeRanges [GOOD] >> YdbIndexTable::AlterIndexImplBySuperUser [GOOD] >> YdbIndexTable::CreateTableAddIndex >> YdbMonitoring::SelfCheck >> YdbImport::EmptyData |84.3%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpRanges::MergeRanges [GOOD] Test command err: Trying to start YDB, gRPC: 27365, MsgBus: 21562 2024-11-21T17:52:33.827142Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791971035048694:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:33.827211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001084/r3tmp/tmpQoyKX8/pdisk_1.dat 2024-11-21T17:52:33.927030Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:33.932846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:33.932878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:33.933753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27365, node 1 2024-11-21T17:52:34.064279Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:34.064292Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:34.064294Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:34.064328Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21562 TClient is connected to server localhost:21562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:34.286549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:34.303262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:34.326259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:34.337016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:34.347945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:34.411067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791975330017328:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:34.411103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:35.100329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:52:35.108576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:52:35.163782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:52:35.173411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:52:35.185332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:52:35.194457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:52:35.224294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791979624985246:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:35.224320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791979624985251:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:35.224325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:35.231163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:52:35.232832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791979624985253:2430], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:52:35.876295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:52:35.900996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:52:35.979030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T17:52:35.996624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T17:52:36.040811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16338, MsgBus: 10324 2024-11-21T17:52:36.689177Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791986977887372:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:36.689377Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001084/r3tmp/tmpz2RVsA/pdisk_1.dat 2024-11-21T17:52:36.696923Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16338, node 2 2024-11-21T17:52:36.706121Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:36.706133Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:36.706134Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:36.706165Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10324 TClient is connected to server localhost:10324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:36.789671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:36.789707Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:36.790785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:36.791446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:36.799418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:36.807267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:36.820152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660 ... vice] [TPoolFetcherActor] ActorId: [8:7439792010860709790:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:42.461042Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:42.464455Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:52:42.470740Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:52:42.481255Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:52:42.536717Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:52:42.543917Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:52:42.552059Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:52:42.560029Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439792010860710304:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:42.560056Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:42.560072Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439792010860710309:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:42.560783Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:52:42.564094Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439792010860710311:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:52:42.753998Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:52:42.830083Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211562865, txId: 281474976715673] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND (Key2 = 100 OR Key2 = 300) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]]] ------------------------ 2024-11-21T17:52:42.865181Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211562907, txId: 281474976715675] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND Key2 IN (100, 300, 400) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]];[[10u]]] ------------------------ Trying to start YDB, gRPC: 29523, MsgBus: 1901 2024-11-21T17:52:43.042658Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7439792016894751040:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:43.042901Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001084/r3tmp/tmpdjS2oY/pdisk_1.dat 2024-11-21T17:52:43.051654Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29523, node 9 2024-11-21T17:52:43.060879Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:43.060891Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:43.060893Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:43.060930Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1901 TClient is connected to server localhost:1901 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:43.143078Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:43.143116Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:43.144180Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:43.144808Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:43.146612Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:43.161132Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:43.177569Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:43.189674Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:43.320846Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439792016894752592:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:43.320889Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:43.325318Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:52:43.331093Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:52:43.343424Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:52:43.356835Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:52:43.370902Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:52:43.384769Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:52:43.399934Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439792016894753097:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:43.399963Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:43.399964Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7439792016894753102:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:43.400634Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:52:43.404362Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7439792016894753104:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:52:43.553765Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:52:43.609651Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211563656, txId: 281474976715673] shutting down |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay |84.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |84.4%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut >> YdbIndexTable::CreateTableAddIndex [GOOD] >> YdbIndexTable::AlterTableAddIndex >> DataShardTxOrder::RandomPointsAndRanges [GOOD] >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> YdbMonitoring::SelfCheck [GOOD] >> YdbMonitoring::SelfCheckWithNodesDying >> YdbImport::EmptyData [GOOD] >> YdbIndexTable::AlterTableAddIndex [GOOD] >> TYqlDateTimeTests::TimestampKey >> YdbLogStore::AlterLogStore >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TopicService::AccessRights [GOOD] >> TopicService::ThereAreGapsInTheOffsetRanges >> YdbLogStore::AlterLogStore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2024-11-21T17:52:15.230142Z :ReadSession INFO: Random seed for debugging is 1732211535230136 2024-11-21T17:52:15.330884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791896673738545:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:15.331197Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:52:15.333693Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791893680549027:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:15.334068Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:52:15.362479Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:52:15.365767Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001821/r3tmp/tmpTAhtv5/pdisk_1.dat 2024-11-21T17:52:15.393180Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2186, node 1 2024-11-21T17:52:15.406160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001821/r3tmp/yandexbGHcBE.tmp 2024-11-21T17:52:15.406178Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001821/r3tmp/yandexbGHcBE.tmp 2024-11-21T17:52:15.406242Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001821/r3tmp/yandexbGHcBE.tmp 2024-11-21T17:52:15.406291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:15.410287Z INFO: TTestServer started on Port 10230 GrpcPort 2186 TClient is connected to server localhost:10230 PQClient connected to localhost:2186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:15.431041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:15.431065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:15.432621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:15.470300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:15.470354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:15.471093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:15.471629Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:15.471945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T17:52:15.646472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791893680549319:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:15.646515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791893680549283:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:15.646601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:15.648259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T17:52:15.666516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791893680549335:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T17:52:15.706632Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791896673739506:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:52:15.706712Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjJiNWMyYTQtMTE3N2U3ZGItYTM2OTY0MmYtYWRhNzcwYQ==, ActorId: [1:7439791896673739465:2299], ActorState: ExecuteState, TraceId: 01jd7xmvsqd8gzgc1rp6t7j3nv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:52:15.707171Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:52:15.708002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2024-11-21T17:52:15.742079Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791893680549415:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:52:15.742162Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTMwMzU1MzItMjRlZGZlN2MtZjRkM2Y1OGUtNDQ0YzA4N2E=, ActorId: [2:7439791893680549281:2277], ActorState: ExecuteState, TraceId: 01jd7xmvrx1qne73y970qfxv3e, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:52:15.742342Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:52:15.778913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:52:15.843213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:2186", true, true, 1000); 2024-11-21T17:52:15.871125Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jd7xmvzg826q23rmv0ka95nq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjU2YmEwZmItNDRiMGYzZDEtZmEwYjRlOTQtOTY1ZjYxMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439791896673739886:2920] 2024-11-21T17:52:20.331269Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791896673738545:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:20.331303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:52:20.333842Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439791893680549027:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:20.333876Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:52:21.872960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720679:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:2186 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:52:21.884248Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:2186 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lz ... hared/user session shared/user_7_1_16247076335051052177_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset3 2024-11-21T17:52:45.450445Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16247076335051052177_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 4d28f552-3c50596e-e5813eb5-2b602e62 has messages 1 2024-11-21T17:52:45.450511Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16247076335051052177_v1 read done: guid# 4d28f552-3c50596e-e5813eb5-2b602e62, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2024-11-21T17:52:45.450531Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16247076335051052177_v1 response to read: guid# 4d28f552-3c50596e-e5813eb5-2b602e62 2024-11-21T17:52:45.450664Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16247076335051052177_v1 Process answer. Aval parts: 0 2024-11-21T17:52:45.450870Z :DEBUG: [/Root] [/Root] [8b9af467-492a2142-e52bc8c6-fbf0746f] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.450928Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2024-11-21T17:52:45.450979Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2024-11-21T17:52:45.450972Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16247076335051052177_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T17:52:45.450999Z :DEBUG: [/Root] [/Root] [8b9af467-492a2142-e52bc8c6-fbf0746f] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:52:45.451013Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16247076335051052177_v1 got read request: guid# a7b65385-e2a55b4f-e11543f6-642b2ab5 DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T17:52:44.344000Z WriteTime: 2024-11-21T17:52:44.345000Z Ip: "ipv6:[::1]:36262" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:36262" } } } } 2024-11-21T17:52:45.451039Z :INFO: [/Root] [/Root] [8b9af467-492a2142-e52bc8c6-fbf0746f] Closing read session. Close timeout: 3.000000s 2024-11-21T17:52:45.451048Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T17:52:45.451056Z :INFO: [/Root] [/Root] [8b9af467-492a2142-e52bc8c6-fbf0746f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1229 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:52:45.451205Z :INFO: [/Root] [/Root] [8b9af467-492a2142-e52bc8c6-fbf0746f] Closing read session. Close timeout: 0.000000s 2024-11-21T17:52:45.451211Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T17:52:45.451215Z :INFO: [/Root] [/Root] [8b9af467-492a2142-e52bc8c6-fbf0746f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1229 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:52:45.451232Z :NOTICE: [/Root] [/Root] [8b9af467-492a2142-e52bc8c6-fbf0746f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T17:52:45.451449Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_16247076335051052177_v1 grpc read done: success# 0, data# { } 2024-11-21T17:52:45.451456Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_16247076335051052177_v1 grpc read failed 2024-11-21T17:52:45.451461Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_16247076335051052177_v1 grpc closed 2024-11-21T17:52:45.451470Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_16247076335051052177_v1 is DEAD 2024-11-21T17:52:45.451788Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7439792020588628531:2494] disconnected; active server actors: 1 2024-11-21T17:52:45.451799Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7439792020588628531:2494] client user disconnected session shared/user_7_1_16247076335051052177_v1 2024-11-21T17:52:45.451869Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:52:45.451885Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_16247076335051052177_v1 2024-11-21T17:52:45.451892Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7439792020588628534:2497] destroyed 2024-11-21T17:52:45.451909Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_16247076335051052177_v1 2024-11-21T17:52:45.858913Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.858921Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.858925Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:52:45.859007Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:52:45.859139Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:52:45.859210Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.859293Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2024-11-21T17:52:45.859498Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.859501Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.859502Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:52:45.859566Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:52:45.859652Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:52:45.859695Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.859751Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:52:45.859958Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T17:52:45.860135Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2024-11-21T17:52:45.860153Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2024-11-21T17:52:45.860195Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:52:45.860201Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T17:52:45.860204Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T17:52:45.860210Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T17:52:45.860673Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.860677Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.860680Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:52:45.860738Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:52:45.860818Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:52:45.860855Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.860881Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T17:52:45.860948Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.860985Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:52:45.861014Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:52:45.861019Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T17:52:45.861027Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2024-11-21T17:52:45.861277Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.861279Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.861280Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:52:45.861323Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T17:52:45.861376Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T17:52:45.861412Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.861472Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:45.861493Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T17:52:45.861507Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T17:52:45.861522Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogStore [GOOD] Test command err: 2024-11-21T17:52:42.056722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792009083445027:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:42.056881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011c8/r3tmp/tmpvzZfQm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31452, node 1 2024-11-21T17:52:42.110993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T17:52:42.111009Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:42.120030Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:42.120041Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:42.120042Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:42.120070Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:42.141905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:42.142792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:42.142805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:42.143507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:42.143553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:42.143560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:52:42.143943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:52:42.143979Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:42.143985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:52:42.144324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:42.144949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211562193, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:42.144956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:52:42.145006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:52:42.145294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:42.145324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:42.145335Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:52:42.145341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:52:42.145350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:52:42.145361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:52:42.145702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:52:42.145727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:52:42.145731Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:42.145741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:52:42.157121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:42.157140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:42.158585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:42.287076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Logs, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:42.287164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:52:42.287313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:42.287321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:42.287899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Logs 2024-11-21T17:52:42.287936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:42.287975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:42.287995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:52:42.288035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:52:42.288130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:42.288146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:42.288149Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:42.288206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:42.288211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:42.288213Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:52:42.289404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:52:42.289464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T17:52:42.289732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:52:42.342090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:52:42.342100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:52:42.342129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T17:52:42.342735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:52:42.343621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211562389, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:42.343635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211562389 2024-11-21T17:52:42.343653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T17:52:42.344041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:42.344119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:42.344143Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:52:42.344398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:42.344411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:42.344414Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:52:42.344439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:42.344441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:42.344442Z node 1 :FLAT_TX_S ... 72075186224037889 CpuTimeUsec: 150 } } 2024-11-21T17:52:45.031647Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:52:45.031684Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:52:45.031692Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T17:52:45.031697Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T17:52:45.031730Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:52:45.031771Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976710759 Step: 1732211565077 OrderId: 281474976710759 ExecLatency: 2 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 110 } } 2024-11-21T17:52:45.031787Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:52:45.031879Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:52:45.032125Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037893 Status: COMPLETE TxId: 281474976710759 Step: 1732211565077 OrderId: 281474976710759 ExecLatency: 2 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037893 CpuTimeUsec: 92 } } 2024-11-21T17:52:45.032224Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:52:45.032246Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:2, at schemeshard: 72057594046644480 2024-11-21T17:52:45.032249Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:2 129 -> 240 2024-11-21T17:52:45.032300Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:0 ProgressState 2024-11-21T17:52:45.032313Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 2/3 2024-11-21T17:52:45.032516Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:2 ProgressState 2024-11-21T17:52:45.032525Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:2 progress is 3/3 2024-11-21T17:52:45.032532Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T17:52:45.032561Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:1 2024-11-21T17:52:45.032567Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:2 2024-11-21T17:52:45.032848Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T17:52:45.033325Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976715659, at schemeshard: 72057594046644480 2024-11-21T17:52:45.033666Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976710760:0, path# /Root/TheTable 2024-11-21T17:52:45.033712Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710760:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:45.033974Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/TheTable 2024-11-21T17:52:45.033989Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2024-11-21T17:52:45.034021Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 ProgressState 2024-11-21T17:52:45.034265Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046644480 2024-11-21T17:52:45.034718Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211565084, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:45.034729Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 1732211565084 2024-11-21T17:52:45.034731Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T17:52:45.035004Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710760:0 ProgressState 2024-11-21T17:52:45.035020Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T17:52:45.035032Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T17:52:45.035267Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T17:52:45.682247Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7439792024226123261:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:45.682410Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011c8/r3tmp/tmpxJEgsk/pdisk_1.dat 2024-11-21T17:52:45.693924Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12080, node 13 2024-11-21T17:52:45.712570Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:45.712584Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:45.712588Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:45.712636Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:45.782672Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:45.782728Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:45.784407Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:45.785941Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:45.786077Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:45.786091Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:45.786565Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:45.786624Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:45.786633Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:52:45.786996Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:45.787013Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2024-11-21T17:52:45.787400Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:45.787656Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:45.788182Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211565833, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:45.788197Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:45.788288Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:45.788738Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:45.788790Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:45.788812Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:45.788830Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:45.788842Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:45.788862Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:45.788983Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:45.789006Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:45.789010Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:45.789024Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TYqlDateTimeTests::TimestampKey [GOOD] >> TYqlDecimalTests::SimpleUpsertSelect ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2024-11-21T17:52:02.625542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:02.625572Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:02.625597Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:02.628350Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:02.628488Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:02.628536Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:02.629448Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:02.637513Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:02.637684Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:02.637827Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:02.637848Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:02.637856Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:02.637900Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:02.640857Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:02.640901Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:02.640948Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:02.640953Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:02.640958Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:02.640962Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:02.641024Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.641030Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.641047Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:02.641061Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:02.641094Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:02.641099Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:02.641104Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:02.641108Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:02.641112Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:02.641116Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:02.641121Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:02.646337Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.646355Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.646361Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:02.646697Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:02.646707Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:02.646720Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:02.646743Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:02.646751Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:02.646758Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:02.646763Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:02.646766Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:02.646769Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:02.646772Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:02.646822Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:02.646825Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:02.646827Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:02.646829Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:02.646837Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:02.646839Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:02.646842Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:02.646844Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:02.646847Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:02.667787Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:02.667813Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:02.667819Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:02.667830Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:02.667844Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:02.667970Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.667979Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:02.667987Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:52:02.668007Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:02.668028Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:02.668069Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:02.668081Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:02.668095Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:02.668100Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:02.668750Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:02.668767Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:02.668824Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.668830Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:02.668837Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:02.668845Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:02.668849Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:02.668860Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:02.668865Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:02.668871Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:02.668875Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:02.668879Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:02.668883Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:02.668933Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:02.668938Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:02.668941Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:02.668944Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:02.668947Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:02.668959Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:02.668962Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:02.668965Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:02.668968Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:02.668982Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:02.668985Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:02.668988Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:02.668994Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:02.668997Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:02.669001Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437184 for LoadAndWaitInRS 2024-11-21T17:52:44.657425Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit LoadAndWaitInRS 2024-11-21T17:52:44.657429Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2024-11-21T17:52:44.657432Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T17:52:44.657435Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit ExecuteDataTx 2024-11-21T17:52:44.657441Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit ExecuteDataTx 2024-11-21T17:52:44.657702Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437184 with status COMPLETE 2024-11-21T17:52:44.657710Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437184: {NSelectRow: 6, NSelectRange: 5, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 5, SelectRowBytes: 40, SelectRangeRows: 131, SelectRangeBytes: 1048, UpdateRowBytes: 13, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:52:44.657718Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:44.657721Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit ExecuteDataTx 2024-11-21T17:52:44.657724Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompleteOperation 2024-11-21T17:52:44.657728Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompleteOperation 2024-11-21T17:52:44.657771Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is DelayComplete 2024-11-21T17:52:44.657775Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompleteOperation 2024-11-21T17:52:44.657778Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompletedOperations 2024-11-21T17:52:44.657781Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompletedOperations 2024-11-21T17:52:44.657786Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2024-11-21T17:52:44.657789Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompletedOperations 2024-11-21T17:52:44.657792Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437184 has finished 2024-11-21T17:52:44.657795Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:44.657799Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:44.657802Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:44.657805Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:44.669790Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:44.669816Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:402] at 9437184 on unit CompleteOperation 2024-11-21T17:52:44.669836Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 402] from 9437184 at tablet 9437184 send result to client [4:97:2132], exec latency: 4 ms, propose latency: 6 ms 2024-11-21T17:52:44.669850Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 399} 2024-11-21T17:52:44.669858Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:44.670155Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:44.670160Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437184 on unit StoreAndSendOutRS 2024-11-21T17:52:44.670166Z node 4 :TX_DATASHARD DEBUG: Send RS 400 at 9437184 from 9437184 to 9437185 txId 403 2024-11-21T17:52:44.670175Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:44.670180Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T17:52:44.670182Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:44.670185Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2024-11-21T17:52:44.670190Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:52:44.670198Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2024-11-21T17:52:44.670202Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:44.670262Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:227:2222], Recipient [4:434:2384]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 399} 2024-11-21T17:52:44.670269Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:44.670274Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 402 2024-11-21T17:52:44.670368Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:227:2222], Recipient [4:434:2384]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2024-11-21T17:52:44.670371Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:44.670374Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 2024-11-21T17:52:44.670421Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [4:227:2222], Recipient [4:329:2302]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2024-11-21T17:52:44.670425Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T17:52:44.670439Z node 4 :TX_DATASHARD DEBUG: Receive RS at 9437185 source 9437184 dest 9437185 producer 9437184 txId 403 2024-11-21T17:52:44.670450Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437185 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2024-11-21T17:52:44.670459Z node 4 :TX_DATASHARD TRACE: Filled readset for [1000004:403] from=9437184 to=9437185origin=9437184 2024-11-21T17:52:44.670476Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437185 2024-11-21T17:52:44.670528Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [4:329:2302], Recipient [4:329:2302]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:44.670533Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:44.670537Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2024-11-21T17:52:44.670542Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2024-11-21T17:52:44.670547Z node 4 :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437185 for LoadAndWaitInRS 2024-11-21T17:52:44.670551Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit LoadAndWaitInRS 2024-11-21T17:52:44.670556Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is Executed 2024-11-21T17:52:44.670559Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit LoadAndWaitInRS 2024-11-21T17:52:44.670562Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437185 to execution unit ExecuteDataTx 2024-11-21T17:52:44.670566Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit ExecuteDataTx 2024-11-21T17:52:44.670920Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437185 with status COMPLETE 2024-11-21T17:52:44.670930Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437185: {NSelectRow: 3, NSelectRange: 4, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 120, SelectRangeBytes: 960, UpdateRowBytes: 13, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T17:52:44.670942Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is ExecutedNoMoreRestarts 2024-11-21T17:52:44.670944Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit ExecuteDataTx 2024-11-21T17:52:44.670947Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437185 to execution unit CompleteOperation 2024-11-21T17:52:44.670949Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit CompleteOperation 2024-11-21T17:52:44.671010Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is DelayComplete 2024-11-21T17:52:44.671013Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit CompleteOperation 2024-11-21T17:52:44.671015Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437185 to execution unit CompletedOperations 2024-11-21T17:52:44.671017Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit CompletedOperations 2024-11-21T17:52:44.671021Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is Executed 2024-11-21T17:52:44.671023Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit CompletedOperations 2024-11-21T17:52:44.671026Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437185 has finished 2024-11-21T17:52:44.671030Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:44.671033Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T17:52:44.671037Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T17:52:44.671040Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T17:52:44.683109Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T17:52:44.683134Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437185 on unit CompleteOperation 2024-11-21T17:52:44.683153Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437185 at tablet 9437185 send result to client [4:97:2132], exec latency: 4 ms, propose latency: 6 ms 2024-11-21T17:52:44.683167Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2024-11-21T17:52:44.683173Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2024-11-21T17:52:44.683277Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:329:2302], Recipient [4:227:2222]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2024-11-21T17:52:44.683284Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:52:44.683290Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437185 consumer 9437185 txId 403 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2024-11-21T17:51:45.264686Z :WriteRAW INFO: Random seed for debugging is 1732211505264678 2024-11-21T17:51:45.401959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791766548021402:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:45.402588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:45.404373Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791767277592878:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:45.404388Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001840/r3tmp/tmpUwMveR/pdisk_1.dat 2024-11-21T17:51:45.434586Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:45.446425Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:45.466976Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27481, node 1 2024-11-21T17:51:45.491512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/001840/r3tmp/yandexT7lbPo.tmp 2024-11-21T17:51:45.491523Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/001840/r3tmp/yandexT7lbPo.tmp 2024-11-21T17:51:45.491579Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/001840/r3tmp/yandexT7lbPo.tmp 2024-11-21T17:51:45.491622Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:45.496947Z INFO: TTestServer started on Port 29240 GrpcPort 27481 2024-11-21T17:51:45.504487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:45.504518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:29240 2024-11-21T17:51:45.509154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:27481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:45.538471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:45.548067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:45.548093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:45.548600Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:51:45.550366Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:45.553135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:45.564560Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720658, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:51:45.849894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791766548022126:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:45.849934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:45.850152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791766548022161:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:45.851104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T17:51:45.876274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791766548022163:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T17:51:45.902507Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791767277593202:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:45.902990Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTAwZWY4YzItMjc5MGY2ZDYtY2E3YWZhZDQtMzRlNWY5MTc=, ActorId: [2:7439791767277593185:2277], ActorState: ExecuteState, TraceId: 01jd7xkypn4m7grr5btqa3w5sb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:45.903498Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:45.901859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:45.969741Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791766548022353:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:45.970134Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGIzNzI4ODItMTNiNTFmM2QtN2NmMWM0ZjEtZjUwZTk2NzQ=, ActorId: [1:7439791766548022122:2298], ActorState: ExecuteState, TraceId: 01jd7xkynk5fgdqad8attxa9fj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:45.970263Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:45.973446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:46.061315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:27481", true, true, 1000); 2024-11-21T17:51:46.147254Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd7xkyyeb1j58fjspw28s7gs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2E1NmZhNDQtYjBkZTc1MmItZGM2NTdlMmYtNzZmZWQ4YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439791770842989924:2918] 2024-11-21T17:51:50.401954Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791766548021402:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:50.401989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:51:50.404509Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439791767277592878:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:50.404570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:51:51.269184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:27481 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: 2024-11-21T17:51:51.309018Z node 1 :PERSQUEUE INFO: proxy answer Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:27481 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 ... WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:46250 2024-11-21T17:52:46.206075Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:46250 proto=v1 topic=test-topic durationSec=0 2024-11-21T17:52:46.206077Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:52:46.206458Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T17:52:46.206488Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:52:46.206494Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:52:46.206496Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:52:46.206499Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439792028460212831:2481] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:52:46.206929Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439792028460212831:2481] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:52:46.221321Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439792028460212831:2481] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T17:52:46.221408Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439792028460212861:2481] connected; active server actors: 1 2024-11-21T17:52:46.221425Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439792028460212831:2481] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T17:52:46.221428Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439792028460212831:2481] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T17:52:46.221488Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439792028460212861:2481] disconnected; active server actors: 1 2024-11-21T17:52:46.221499Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439792028460212861:2481] disconnected no session 2024-11-21T17:52:46.236393Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439792028460212831:2481] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:52:46.236410Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439792028460212831:2481] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T17:52:46.236414Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439792028460212831:2481] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T17:52:46.236422Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:52:46.236696Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:46.236720Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7439792028460212879:2481], now have 1 active actors on pipe 2024-11-21T17:52:46.236761Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2024-11-21T17:52:46.236860Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:52:46.236877Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:52:46.236921Z node 16 :PERSQUEUE INFO: new Cookie src|e1e35601-27b033d6-d2f1eb27-2845253a_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T17:52:46.236968Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:52:46.236996Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:52:46.237236Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T17:52:46.237247Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T17:52:46.237270Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:52:46.237360Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|e1e35601-27b033d6-d2f1eb27-2845253a_0 2024-11-21T17:52:46.237729Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211566237 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:52:46.237778Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|e1e35601-27b033d6-d2f1eb27-2845253a_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T17:52:46.237866Z :INFO: [] MessageGroupId [src] SessionId [src|e1e35601-27b033d6-d2f1eb27-2845253a_0] Write session: close. Timeout = 0 ms 2024-11-21T17:52:46.237874Z :INFO: [] MessageGroupId [src] SessionId [src|e1e35601-27b033d6-d2f1eb27-2845253a_0] Write session will now close 2024-11-21T17:52:46.237880Z :DEBUG: [] MessageGroupId [src] SessionId [src|e1e35601-27b033d6-d2f1eb27-2845253a_0] Write session: aborting 2024-11-21T17:52:46.238044Z :INFO: [] MessageGroupId [src] SessionId [src|e1e35601-27b033d6-d2f1eb27-2845253a_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:52:46.238050Z :DEBUG: [] MessageGroupId [src] SessionId [src|e1e35601-27b033d6-d2f1eb27-2845253a_0] Write session: destroy 2024-11-21T17:52:46.238156Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|e1e35601-27b033d6-d2f1eb27-2845253a_0 grpc read done: success: 0 data: 2024-11-21T17:52:46.238165Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|e1e35601-27b033d6-d2f1eb27-2845253a_0 grpc read failed 2024-11-21T17:52:46.238170Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|e1e35601-27b033d6-d2f1eb27-2845253a_0 grpc closed 2024-11-21T17:52:46.238175Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|e1e35601-27b033d6-d2f1eb27-2845253a_0 is DEAD 2024-11-21T17:52:46.238378Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:52:46.238648Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:52:46.238665Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7439792028460212879:2481] destroyed 2024-11-21T17:52:46.238678Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:52:46.252636Z :INFO: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Starting read session 2024-11-21T17:52:46.252651Z :DEBUG: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Starting cluster discovery 2024-11-21T17:52:46.252692Z :INFO: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4503: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4503
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4503. " 2024-11-21T17:52:46.252696Z :DEBUG: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Restart cluster discovery in 0.006293s 2024-11-21T17:52:46.259377Z :DEBUG: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Starting cluster discovery 2024-11-21T17:52:46.259459Z :INFO: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4503: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4503
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4503. " 2024-11-21T17:52:46.259467Z :DEBUG: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Restart cluster discovery in 0.015769s 2024-11-21T17:52:46.276337Z :DEBUG: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Starting cluster discovery 2024-11-21T17:52:46.276400Z :INFO: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4503: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4503
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4503. " 2024-11-21T17:52:46.276404Z :DEBUG: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Restart cluster discovery in 0.023950s 2024-11-21T17:52:46.301376Z :DEBUG: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Starting cluster discovery 2024-11-21T17:52:46.301457Z :NOTICE: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4503: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4503
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4503. " } 2024-11-21T17:52:46.301544Z :NOTICE: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4503: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4503
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4503. " } 2024-11-21T17:52:46.301586Z :INFO: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Closing read session. Close timeout: 0.000000s 2024-11-21T17:52:46.301596Z :NOTICE: [/Root] [/Root] [ad3e565-7f222c43-c6817e26-a113986] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } |84.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/test-results/unittest/{meta.json ... results_accumulator.log} >> TYqlDecimalTests::SimpleUpsertSelect [GOOD] >> TYqlDecimalTests::NegativeValues |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate >> TBSV::ShardsNotLeftInShardsToDelete >> TBSV::CleanupDroppedVolumesOnRestart |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TYqlDecimalTests::NegativeValues [GOOD] >> TYqlDecimalTests::DecimalKey >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed [GOOD] >> TPersQueueTest::TestWriteStat >> TGRpcNewCoordinationClient::SessionDescribeWatchData >> TFstClassSrcIdPQTest::ProperPartitionSelected [GOOD] >> TPQCompatTest::DiscoverTopics >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] >> TYqlDecimalTests::DecimalKey [GOOD] >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchData [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners >> TPersQueueTest::SchemeOperationsCheckPropValues [GOOD] >> TPersQueueTest::ReadRuleServiceType >> TUserAttrsTestWithReboots::InSubdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:52:49.472570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:49.472614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:49.472620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:49.472626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:49.484309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:49.484354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:49.484398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:49.484541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:49.591063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:49.591086Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:49.608642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:49.609782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:49.616472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:52:49.644508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:49.644888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:49.645042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:49.648025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:49.673294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:49.673685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:49.673698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:49.673742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:49.673752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:49.673758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:49.673782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.675343Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:52:49.698390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:49.698507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.698587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:49.698645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:49.698655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.701877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:49.701925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:49.701983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.701999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:49.702003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:49.702007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:49.702544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.702555Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:49.702559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:49.702827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.702836Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.702840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:49.702853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:49.703378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:49.703673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:49.703710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:49.703846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:49.703866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:49.703870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:49.703912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:49.703932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:49.703952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:49.703964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:49.712359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:49.712379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:49.712437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:49.712441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:52:49.712541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.712549Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:49.712564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:49.712569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:49.712575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:49.712583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:49.712590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:49.712594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:49.712629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:49.712634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:49.712637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:52:49.720665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:49.720719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:49.720724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:52:49.720730Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:52:49.720735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:49.720755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... HARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:50.146496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2024-11-21T17:52:50.146524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:50.146782Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 TabletID: 72075186233409569 Forgetting tablet 72075186233409569 2024-11-21T17:52:50.146958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2024-11-21T17:52:50.147024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 4 2024-11-21T17:52:50.147077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T17:52:50.147134Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 TabletID: 72075186233409568 2024-11-21T17:52:50.147235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2024-11-21T17:52:50.147268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 3 Forgetting tablet 72075186233409568 2024-11-21T17:52:50.147372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T17:52:50.147561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2024-11-21T17:52:50.147592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000028 2024-11-21T17:52:50.147654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000028, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:50.147673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000028 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:50.147680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000028, at schemeshard: 72057594046678944 2024-11-21T17:52:50.147707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2024-11-21T17:52:50.147733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2024-11-21T17:52:50.147736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2024-11-21T17:52:50.147745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:50.147752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2024-11-21T17:52:50.147757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2024-11-21T17:52:50.147763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2024-11-21T17:52:50.147766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2024-11-21T17:52:50.147769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2024-11-21T17:52:50.147798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2024-11-21T17:52:50.147803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2024-11-21T17:52:50.147807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 54 2024-11-21T17:52:50.147810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 13], 18446744073709551615 2024-11-21T17:52:50.148356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2024-11-21T17:52:50.148370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2024-11-21T17:52:50.148390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2024-11-21T17:52:50.148395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2024-11-21T17:52:50.148509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:52:50.148515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:52:50.148547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:50.148553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:50.148591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 13] 2024-11-21T17:52:50.148620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:50.148624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 129, path id: 1 2024-11-21T17:52:50.148628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 129, path id: 13 FAKE_COORDINATOR: Erasing txId 129 2024-11-21T17:52:50.148742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T17:52:50.148751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T17:52:50.148755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2024-11-21T17:52:50.148759Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 13], version: 18446744073709551615 2024-11-21T17:52:50.148763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2024-11-21T17:52:50.148810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:52:50.148814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2024-11-21T17:52:50.148823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:50.148859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T17:52:50.148866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T17:52:50.148868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2024-11-21T17:52:50.148871Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 54 2024-11-21T17:52:50.148875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:50.148881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2024-11-21T17:52:50.148908Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 2024-11-21T17:52:50.148936Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 2024-11-21T17:52:50.148975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2024-11-21T17:52:50.149036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2024-11-21T17:52:50.149266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T17:52:50.149633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:52:50.149738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T17:52:50.149770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2024-11-21T17:52:50.149803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2024-11-21T17:52:50.149940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2024-11-21T17:52:50.149946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2024-11-21T17:52:50.150029Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2024-11-21T17:52:50.150045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2024-11-21T17:52:50.150050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:1676:3547] TestWaitNotification: OK eventTxId 129 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:52:49.473023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:49.473046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:49.473050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:49.473056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:49.484833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:49.484858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:49.484892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:49.484995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:49.592017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:49.592037Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:49.610010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:49.610913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:49.616766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:52:49.639141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:49.639476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:49.643931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:49.646162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:49.649548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:49.653912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:49.653939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:49.653989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:49.654003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:49.654009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:49.654031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.655937Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:52:49.678051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:49.678159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.678231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:49.678281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:49.678290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.686224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:49.688875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:49.689011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.689440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:49.689452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:49.689458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:49.690417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.690434Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:49.690440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:49.690907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.690922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.690927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:49.693535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:49.694188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:49.694796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:49.694849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:49.703205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:49.703250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:49.703263Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:49.703326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:49.703337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:49.703361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:49.703380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:49.712359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:49.712380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:49.712438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:49.712443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:52:49.712537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.712547Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:49.712564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:49.712568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:49.712576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:49.712583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:49.712594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:49.712598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:49.712618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:49.712624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:49.712628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:52:49.719763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:49.719821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:49.719829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:52:49.719839Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:52:49.719847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:49.719884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... erId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:52:49.853900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409546 2024-11-21T17:52:49.853919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:49.854005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:52:49.877754Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:52:49.877904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:52:49.877959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2024-11-21T17:52:49.883534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:52:49.883573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T17:52:49.884752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:49.884800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:49.884810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:52:49.884855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:52:49.884878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:52:49.884882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:52:49.884892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:49.884904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:52:49.884910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T17:52:49.884916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:52:49.884921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:52:49.884924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:52:49.884960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:52:49.884966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T17:52:49.884970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:52:49.884973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:52:49.886035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:52:49.886050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:52:49.886104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:52:49.886110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:52:49.886233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:52:49.886240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:52:49.886278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:49.886284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:49.886312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:52:49.886348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:49.886353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T17:52:49.886357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:52:49.886473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:52:49.886484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:52:49.886489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:52:49.886495Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:52:49.886500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:52:49.886564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:52:49.886569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:52:49.886578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:49.886631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:52:49.886643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:52:49.886647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:52:49.886651Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:52:49.886654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:49.886662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T17:52:49.886713Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T17:52:49.886769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:49.886831Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T17:52:49.886857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:52:49.896443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:52:49.900640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:52:49.900679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:52:49.902179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:52:49.902211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T17:52:49.902295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:52:49.902302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:52:49.902441Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:52:49.902463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:52:49.902468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:389:2370] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:52:49.907138Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:52:49.907221Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:52:49.472861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:49.472890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:49.472896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:49.472902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:49.485037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:49.485065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:49.485088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:49.485186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:49.590711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:49.590742Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:49.616830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:49.617758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:49.617808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:52:49.642144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:49.645651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:49.645791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:49.646443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:49.652596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:49.657452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:49.657496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:49.657574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:49.657599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:49.657607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:49.657631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.660700Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:52:49.680930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:49.681081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.681172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:49.681228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:49.681237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.684709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:49.688865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:49.689001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.689448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:49.689463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:49.689469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:49.690376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.690392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:49.690398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:49.690813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.690823Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.690830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:49.692281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:49.693029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:49.694622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:49.694683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:49.703020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:49.703077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:49.703092Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:49.703165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:49.703174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:49.703209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:49.703233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:49.712376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:49.712394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:49.712437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:49.712443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:52:49.712536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:49.712546Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:49.712564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:49.712569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:49.712576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:49.712583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:49.712590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:49.712594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:49.712618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:49.712624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:49.712628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:52:49.720724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:49.720770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:52:49.720776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:52:49.720783Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:52:49.720791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:49.720818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... nit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.064418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.064463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.064487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.076476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.076521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.108855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:52:50.109597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:50.109685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:50.109699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:50.109724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.109782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:52:50.109787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:52:50.109806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:50.116385Z node 1 :FLAT_TX_SCHEMESHARD WARN: TTxCleanBlockStoreVolumes Complete, done PersistRemoveBlockStoreVolume for 1 volumes, left 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.116412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:50.116421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:50.116505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:400:2377] sender: [1:465:2058] recipient: [1:15:2062] 2024-11-21T17:52:50.164633Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:52:50.164705Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 100us result status StatusPathDoesNotExist 2024-11-21T17:52:50.164752Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:52:50.164943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:400:2377] sender: [1:466:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:400:2377] sender: [1:469:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:400:2377] sender: [1:470:2058] recipient: [1:468:2430] Leader for TabletID 72057594046678944 is [1:471:2431] sender: [1:472:2058] recipient: [1:468:2430] 2024-11-21T17:52:50.198462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:50.198505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:50.198511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:50.198516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:50.198522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:50.198525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:50.198535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:50.198611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:50.199949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:50.212555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:50.212647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:50.212691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:50.212698Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:50.212721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:50.212854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:50.212873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.212882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.212939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.212963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.212974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.212988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.213370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:52:50.235442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:50.235493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:50.235671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:50.235698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:50.235708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:50.236174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:471:2431] sender: [1:531:2058] recipient: [1:15:2062] 2024-11-21T17:52:50.276669Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:52:50.276742Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 101us result status StatusPathDoesNotExist 2024-11-21T17:52:50.276788Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::DecimalKey [GOOD] Test command err: 2024-11-21T17:52:44.701436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792017226550344:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:44.701631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011bf/r3tmp/tmpiPUUmJ/pdisk_1.dat 2024-11-21T17:52:44.759362Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1069, node 1 2024-11-21T17:52:44.775477Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:44.775491Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:44.775493Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:44.775538Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:44.799938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:44.801035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:44.801058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:44.801834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:44.801838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:44.801861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:44.801883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:44.801886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:52:44.802328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:44.802341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:52:44.802401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:52:44.802665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:44.803150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:44.803534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211564853, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:44.803543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:52:44.803591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:52:44.803987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:44.804029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:44.804043Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:52:44.804057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:52:44.804067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:52:44.804078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:52:44.804462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:52:44.804476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:52:44.804479Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:44.804492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:52:44.972126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:44.972289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:52:44.972460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:44.972473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:44.973175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Table 2024-11-21T17:52:44.973238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:44.973288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:44.973309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:52:44.973382Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:52:44.973540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:44.973550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:44.973553Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:44.973596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:44.973607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:44.973608Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:52:44.975294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:52:44.975327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T17:52:44.975789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:52:45.028217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:52:45.028246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:52:45.028275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T17:52:45.028781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:52:45.029794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211565077, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:45.029810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211565077 2024-11-21T17:52:45.029839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T17:52:45.030230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:45.030302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:45.030326Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:52:45.030446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:45.030457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:45.030460Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:52:45.030485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:45.030496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:45.030499Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 28147497 ... 74646Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:49.374647Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T17:52:49.380941Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211569424, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:49.380962Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211569424, at schemeshard: 72057594046644480 2024-11-21T17:52:49.381008Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T17:52:49.381038Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211569424, at schemeshard: 72057594046644480 2024-11-21T17:52:49.381047Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T17:52:49.381057Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211569424, at schemeshard: 72057594046644480 2024-11-21T17:52:49.381064Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T17:52:49.381080Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732211569424 2024-11-21T17:52:49.381087Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T17:52:49.381764Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:49.381916Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:49.381934Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T17:52:49.381951Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T17:52:49.381996Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T17:52:49.382004Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T17:52:49.382015Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T17:52:49.382021Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T17:52:49.382031Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T17:52:49.382038Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T17:52:49.382046Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:52:49.382059Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T17:52:49.382062Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T17:52:49.382065Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T17:52:49.382070Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 0 2024-11-21T17:52:49.383185Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:49.383198Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:49.383204Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T17:52:49.383257Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:49.383260Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:49.383262Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T17:52:49.383275Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:49.383277Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:49.383280Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:52:49.383294Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:49.383296Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:49.383298Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T17:52:49.383313Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:52:49.383315Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:52:49.383317Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T17:52:49.383323Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 2024-11-21T17:52:49.384602Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T17:52:49.384730Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439792039424694655:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:52:49.445192Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:52:49.445249Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:52:49.446416Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:52:49.464306Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xnwpt7ta2erhgbsw92xv0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzU2YzA0OTUtZjI4MjdmNGMtYjRmODk5ZWMtYTliM2NhNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:49.498367Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xnwt35b4ntv109ne1cczk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzU2YzA0OTUtZjI4MjdmNGMtYjRmODk5ZWMtYTliM2NhNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:49.525225Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xnwtx76mfygmjdvc6x52z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzU2YzA0OTUtZjI4MjdmNGMtYjRmODk5ZWMtYTliM2NhNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:49.550951Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xnwvx8crg72gnyevs3mh7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzU2YzA0OTUtZjI4MjdmNGMtYjRmODk5ZWMtYTliM2NhNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:49.568551Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xnwwges6rxzv12qp1ssj0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzU2YzA0OTUtZjI4MjdmNGMtYjRmODk5ZWMtYTliM2NhNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:49.595836Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xnwx2cyq1a66j4222h0wd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzU2YzA0OTUtZjI4MjdmNGMtYjRmODk5ZWMtYTliM2NhNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:49.613746Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xnwxy26vntm9y9yaf4p87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzU2YzA0OTUtZjI4MjdmNGMtYjRmODk5ZWMtYTliM2NhNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:49.652601Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd7xnwygd9wffaryncdpy80b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzU2YzA0OTUtZjI4MjdmNGMtYjRmODk5ZWMtYTliM2NhNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:49.711778Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd7xnwzse4x0sf4ezks43sqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzU2YzA0OTUtZjI4MjdmNGMtYjRmODk5ZWMtYTliM2NhNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:49.759487Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd7xnx1j2fr3syek4c4p07g2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzU2YzA0OTUtZjI4MjdmNGMtYjRmODk5ZWMtYTliM2NhNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TUserAttrsTestWithReboots::AllowedSymbolsReboots |84.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TPersQueueTest::CheckACLForGrpcRead [GOOD] >> TPersQueueTest::CheckKillBalancer |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TPersQueueTest::Delete [GOOD] >> TPersQueueTest::FetchRequest |84.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicService::ThereAreGapsInTheOffsetRanges [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace |84.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot >> TopicService::OnePartitionAndNoGapsInTheOffsets >> TUserAttrsTestWithReboots::Reboots >> SystemView::PartitionStatsTtlFields [GOOD] >> SystemView::PartitionStatsFields >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace [GOOD] >> TPersQueueTest::SetupWriteSession [GOOD] >> TPersQueueTest::WriteAfterAlter [GOOD] |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TPQCompatTest::DiscoverTopics [GOOD] >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore >> TPersQueueTest::ReadRuleServiceType [GOOD] >> TPersQueueTest::StoreNoMoreThanXSourceIDs >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TopicService::OnePartitionAndNoGapsInTheOffsets [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> SystemView::PartitionStatsFields [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] >> LabeledDbCounters::OneTablet [GOOD] |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TPersQueueTest::FetchRequest [GOOD] >> YdbMonitoring::SelfCheckWithNodesDying [GOOD] >> TUserAttrsTestWithReboots::Reboots [GOOD] >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets >> TPQCompatTest::SetupLockSession >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay >> TPersQueueTest::ReadRuleServiceTypeLimit >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed [GOOD] >> LabeledDbCounters::OneTabletRemoveCounters |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TPersQueueTest::EventBatching |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> YdbOlapStore::BulkUpsert |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |84.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets [GOOD] >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay >> TPQCompatTest::SetupLockSession [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed >> TTopicYqlTest::DropTopicYql >> TPersQueueTest::ReadRuleServiceTypeLimit [GOOD] >> TPQCompatTest::BadTopics |84.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TPersQueueTest::ReadRuleDisallowDefaultServiceType >> TPQCompatTest::BadTopics [GOOD] >> TPQCompatTest::CommitOffsets >> TTopicYqlTest::DropTopicYql [GOOD] >> TTopicYqlTest::CreateTopicYqlBackCompatibility >> TPersQueueTest::ReadRuleDisallowDefaultServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigration >> TPersQueueTest::CheckKillBalancer [GOOD] >> TPersQueueTest::CheckDeleteTopic |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |84.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/test-results/unittest/{meta.json ... results_accumulator.log} |84.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |84.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TUserAttrsTestWithReboots::Reboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:52:52.825997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:52.826035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:52.826044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:52.826049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:52.826065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:52.826069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:52.826079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:52.826173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:52.980518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:52.980542Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:52:52.992287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:52.992471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:53.000318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:52:53.023890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:53.024021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:53.024150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:53.024391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:53.038179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:53.086589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:53.086612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:53.094738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:53.094789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:53.094825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:53.094932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:52:53.097285Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:52:53.138154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:53.138230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.138286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:53.146006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:53.146050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.148853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:53.148916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:53.148990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.149008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:53.159224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:53.159267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:53.160713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.160740Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:53.160748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:53.169496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.169527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.169536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:53.184151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:53.184842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:53.185778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:53.188515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:53.197707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:53.197762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:53.197777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:53.197855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:53.197862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:53.197897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:53.197917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:53.199488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:53.199498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:53.199528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:53.199532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:53.199585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.199590Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:53.199601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:53.199604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:53.199607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:53.199610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:53.199613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:53.199615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:53.199625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:53.199629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:53.199631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2024-11-21T17:53:07.357356Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2024-11-21T17:53:07.357393Z node 60 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2024-11-21T17:53:07.357405Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:53:07.357409Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [60:363:2355] TestWaitNotification: OK eventTxId 1005 2024-11-21T17:53:07.357447Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:53:07.357460Z node 60 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 17us result status StatusSuccess 2024-11-21T17:53:07.357490Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 1006 2024-11-21T17:53:07.357746Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirB" UserAttributes { Key: "AttrA3" } UserAttributes { Key: "AttrA1" } } } TxId: 1006 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:53:07.357755Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot/DirB, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:53:07.357763Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T17:53:07.357772Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1006:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:53:07.357774Z node 60 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:53:07.357983Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1006, response: Status: StatusAccepted TxId: 1006 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:53:07.357995Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1006, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirB 2024-11-21T17:53:07.358008Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:53:07.358010Z node 60 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 1006:0, at schemeshard: 72057594046678944 2024-11-21T17:53:07.358013Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1006 ready parts: 1/1 2024-11-21T17:53:07.358022Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1006 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:53:07.358193Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1006 msg type: 269090816 2024-11-21T17:53:07.358204Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000006 2024-11-21T17:53:07.358238Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:53:07.358247Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 257698039914 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:53:07.358249Z node 60 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 1006:0, stepId:5000006, at schemeshard: 72057594046678944 2024-11-21T17:53:07.358263Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T17:53:07.358266Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:53:07.358270Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:53:07.358274Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T17:53:07.358278Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:53:07.358280Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:53:07.358282Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T17:53:07.358284Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T17:53:07.358287Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:53:07.358289Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 1, subscribers: 0 2024-11-21T17:53:07.358291Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 6 2024-11-21T17:53:07.358481Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:53:07.358486Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:53:07.358499Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:53:07.358501Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:201:2204], at schemeshard: 72057594046678944, txId: 1006, path id: 3 FAKE_COORDINATOR: Erasing txId 1006 2024-11-21T17:53:07.358542Z node 60 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:53:07.358547Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:53:07.358549Z node 60 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:53:07.358551Z node 60 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 6 2024-11-21T17:53:07.358553Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:53:07.358560Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T17:53:07.358746Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T17:53:07.358772Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T17:53:07.358775Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T17:53:07.358805Z node 60 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T17:53:07.358812Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T17:53:07.358815Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [60:380:2372] TestWaitNotification: OK eventTxId 1006 2024-11-21T17:53:07.358849Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:53:07.358861Z node 60 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 16us result status StatusSuccess 2024-11-21T17:53:07.358886Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] Test command err: 2024-11-21T17:52:49.641189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792040021937334:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:49.641229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011bb/r3tmp/tmpwSaIBv/pdisk_1.dat 2024-11-21T17:52:49.744331Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:49.749860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:49.749887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:49.750533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18909, node 1 2024-11-21T17:52:49.775867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:49.775878Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:49.775880Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:49.775917Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:49.816729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:49.817844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:49.817860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:49.818268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:49.818317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:49.818325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:52:49.818642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:49.818649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:52:49.818961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:49.819221Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:52:49.819674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211569865, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:49.819688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:52:49.819743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:52:49.820150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:49.820187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:49.820195Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:52:49.820203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:52:49.820210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:52:49.820220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:52:49.820654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:52:49.820672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:52:49.820680Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:49.820693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:52:49.858622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:49.858680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:52:49.858814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:49.858821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:49.864639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2024-11-21T17:52:49.864730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:49.864807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:49.864827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateKesus, at tablet72057594046644480 2024-11-21T17:52:49.865286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:49.865292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:49.865296Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:49.865337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:49.865341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:49.865342Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:52:49.866773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:52:49.867753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:52:49.867782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T17:52:49.896288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T17:52:49.896790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:49.897592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211569942, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:49.897598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T17:52:49.897621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T17:52:49.897996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:49.898053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:49.898063Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T17:52:49.898072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T17:52:49.898080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T17:52:49.898138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T17:52:49.898289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:49.898294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:49.898298Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:52:49.898319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:52:49.898323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:52:49.898324Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:52:49.898328Z node 1 :FLAT ... 024-11-21T17:52:54.126936Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011bb/r3tmp/tmpcMpOSI/pdisk_1.dat 2024-11-21T17:52:54.138234Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20445, node 13 2024-11-21T17:52:54.155839Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:54.155850Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:54.155852Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:54.155884Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:54.227072Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:54.227096Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:54.228562Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:54.230571Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:54.230652Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:54.230660Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:54.231070Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:54.231115Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:54.231123Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:54.231453Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:54.231462Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:54.231518Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:54.231693Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:54.232291Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211574275, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:54.232303Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:54.232357Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:54.232658Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:54.232696Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:54.232707Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:54.232716Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:54.232730Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:54.232741Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:54.232881Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:54.232894Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:54.232898Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:54.232910Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:52:54.242670Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:54.242710Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:52:54.242783Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:54.242791Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:52:54.243300Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2024-11-21T17:52:54.243335Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:54.243370Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:54.243383Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateKesus, at tablet72057594046644480 2024-11-21T17:52:54.243413Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:52:54.243539Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:54.243547Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:54.243549Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:52:54.243578Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:54.243585Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:54.243586Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:52:54.245187Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:52:54.245208Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:52:54.297699Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:52:54.298161Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:54.299072Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211574345, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:54.299083Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T17:52:54.299104Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:52:54.299489Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:54.299551Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:54.299567Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:52:54.299584Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:52:54.299598Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:52:54.299632Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T17:52:54.299740Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:54.299751Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:54.299755Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:52:54.299789Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:52:54.299795Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:52:54.299796Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:52:54.299800Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::PartitionStatsFields [GOOD] Test command err: 2024-11-21T17:51:36.052621Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791726113179596:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.052899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00197b/r3tmp/tmp7SJqfm/pdisk_1.dat 2024-11-21T17:51:36.109450Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1643, node 1 2024-11-21T17:51:36.128406Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:36.128419Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:36.128421Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:36.128459Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:36.152998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.153032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:9202 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:51:36.197178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:36.218846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.228874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.234739Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439791726601587752:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.241930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.241960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.243626Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T17:51:36.244281Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:36.244451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:36.244654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:36.253308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.253330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.254659Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T17:51:36.255030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:36.312440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.318623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.318290Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791725103311260:2205];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.320413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.320433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.318359Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439791726360716950:2197];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.320471Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:36.321570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.321587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.323956Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:36.323966Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:51:36.324170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:36.324328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:36.326638Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:36.413177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:36.497097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726113180827:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.497119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.497213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791726113180839:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:36.497756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2024-11-21T17:51:36.501663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791726113180841:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2024-11-21T17:51:36.648069Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd7xknhge25xtenw206k90wr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg2NGY4ODktZWI2MGY0MmQtZDFhMzQ3YWUtN2E1M2E0MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:36.653531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:51:36.726804Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd7xknrbfxx54xwtrav404dr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg2NGY4ODktZWI2MGY0MmQtZDFhMzQ3YWUtN2E1M2E0MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:36.734204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2024-11-21T17:51:36.805277Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jd7xkntv8q04q2bcpvsq1q6w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg2NGY4ODktZWI2MGY0MmQtZDFhMzQ3YWUtN2E1M2E0MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:36.820322Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439791726113181173:2337] TxId: 281474976710671. Ctx: { TraceId: 01jd7xknv98qsqxcx4gqwn7w6v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk3ZWU0OTctNmM0ZWExMTUtOGQ1YjkxNGItODZlZThhZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Can not find default state storage group for database 2024-11-21T17:51:36.820394Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jd7xknv98qsqxcx4gqwn7w6v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk3ZWU0OTctNmM0ZWExMTUtOGQ1YjkxNGItODZlZThhZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:51:36.821079Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439791726113181180:2342], owner: [1:7439791726113181176:2340], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:51:36.821222Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439791726113181180:2342], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:51:36.821319Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439791726113181180:2342], row count: 1, finished: 1 2024-11-21T17:51:36.821356Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439791726113181180:2342], owner: [1:7439791726113181176:2340], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:51:36.821917Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211496819, txId: 281474976710670] shutt ... pId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:53.074965Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:53.076752Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:52:53.296146Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439792057267378162:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:53.296177Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439792057267378173:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:53.296186Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:53.296899Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:52:53.301236Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7439792057267378176:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:52:53.392331Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xp0hfam5fwvsye2r47fad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ODdhMGU2OTMtY2VkOTQyOWEtZTM0YWRjNjUtYzQyZWFmZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:53.403983Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xp0mk0a0sv2gbwg3fwt59, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=NDQ2NjYwMC0xY2Y3MDFkOS04ZGY0ODlmOC1mMmMxYTgxMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:53.404448Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439792057267378316:2322], owner: [16:7439792057267378312:2320], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:53.404582Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439792057267378316:2322], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:52:53.404645Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439792057267378316:2322], row count: 1, finished: 1 2024-11-21T17:52:53.404656Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439792057267378316:2322], owner: [16:7439792057267378312:2320], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:53.405107Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211573403, txId: 281474976715662] shutting down 2024-11-21T17:52:54.416913Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xp1m641s393epkqqtk9sn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=M2U4ZGRkZmUtMzhkZTlmYzUtZWFiNTc1OTUtZDZhZWM1MTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:54.417405Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439792061562345684:2333], owner: [16:7439792061562345680:2331], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:54.417562Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439792061562345684:2333], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:52:54.417654Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439792061562345684:2333], row count: 1, finished: 1 2024-11-21T17:52:54.417664Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439792061562345684:2333], owner: [16:7439792061562345680:2331], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:54.418193Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211574416, txId: 281474976715664] shutting down 2024-11-21T17:52:55.428215Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xp2kv6xy09eggmef4k8d0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=NWFmNzg5Zi1hYmEwOGRmZC0xODJlYjAyYy1lMjExODNiMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:55.428683Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439792065857313025:2344], owner: [16:7439792065857313021:2342], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:55.428853Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439792065857313025:2344], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:52:55.428915Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439792065857313025:2344], row count: 1, finished: 1 2024-11-21T17:52:55.428924Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439792065857313025:2344], owner: [16:7439792065857313021:2342], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:55.429407Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211575427, txId: 281474976715666] shutting down 2024-11-21T17:52:56.438069Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd7xp3kefh6v05026wd2agkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YjQxOWJjOTAtYzNhNTJlZWQtM2Y5NjgzYi01NjI3NDFiNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:56.438496Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439792070152280366:2355], owner: [16:7439792070152280362:2353], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:56.438626Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439792070152280366:2355], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:52:56.438701Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439792070152280366:2355], row count: 1, finished: 1 2024-11-21T17:52:56.438715Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439792070152280366:2355], owner: [16:7439792070152280362:2353], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:56.439213Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211576437, txId: 281474976715668] shutting down 2024-11-21T17:52:57.448174Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jd7xp4k0d3k9sztx1cjh9f9q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YjY3YzhkNjctNDU1YTk5NTUtNGFlNDY1ZDItYThlM2MzNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:57.448602Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439792074447247707:2366], owner: [16:7439792074447247703:2364], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:57.448734Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439792074447247707:2366], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:52:57.448784Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439792074447247707:2366], row count: 1, finished: 1 2024-11-21T17:52:57.448793Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439792074447247707:2366], owner: [16:7439792074447247703:2364], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:57.449244Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211577447, txId: 281474976715670] shutting down 2024-11-21T17:52:57.909502Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7439792052972410120:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:57.909528Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:52:58.458882Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jd7xp5jjfvvkge841gs9v158, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=MmUyYmRkY2MtYTFhMDc4MzctOGFlODg2NDgtM2QyZTViYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:58.459317Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439792078742215058:2380], owner: [16:7439792078742215054:2378], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:58.459440Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439792078742215058:2380], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:52:58.459516Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439792078742215058:2380], row count: 1, finished: 1 2024-11-21T17:52:58.459538Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439792078742215058:2380], owner: [16:7439792078742215054:2378], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:58.460017Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211578458, txId: 281474976715672] shutting down 2024-11-21T17:52:58.475834Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jd7xp5jw6f9dtrkshqqza5sp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ZWFlYTFhY2ItN2UyNjhlZDUtYjFkMzMxNzctMzQxNzQ5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:58.476430Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439792078742215089:2389], owner: [16:7439792078742215086:2387], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:58.476635Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439792078742215089:2389], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:52:58.476705Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439792078742215089:2389], row count: 1, finished: 1 2024-11-21T17:52:58.476716Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439792078742215089:2389], owner: [16:7439792078742215086:2387], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T17:52:58.477278Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211578475, txId: 281474976715674] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2024-11-21T17:51:44.413499Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1732211504413493 2024-11-21T17:51:44.542776Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791759922693713:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:44.542841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:44.560118Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791759908931569:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:44.631410Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:44.632992Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:44.636513Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00185e/r3tmp/tmpnaApLz/pdisk_1.dat 2024-11-21T17:51:44.727212Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:44.743146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:44.743168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:44.743693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:44.743701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:44.745698Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:44.745728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:44.746056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28618, node 1 2024-11-21T17:51:44.830627Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/00185e/r3tmp/yandexilXRdv.tmp 2024-11-21T17:51:44.830644Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/00185e/r3tmp/yandexilXRdv.tmp 2024-11-21T17:51:44.830724Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/00185e/r3tmp/yandexilXRdv.tmp 2024-11-21T17:51:44.830744Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:44.844336Z INFO: TTestServer started on Port 11241 GrpcPort 28618 TClient is connected to server localhost:11241 PQClient connected to localhost:28618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:44.901298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:51:44.972039Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:45.143679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791764203898972:2278], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:45.143725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791764203899015:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:45.143736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:45.152856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T17:51:45.162392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791764217661758:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:45.162415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791764217661770:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:45.162478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:45.173653Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791764203899019:2282], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T17:51:45.180385Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:51:45.180537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791764217661819:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T17:51:45.232632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:45.259397Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791764217661961:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:45.266934Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODFjZDA3NzAtNDk1YTNhZmQtMjE1YWUyNDQtMzVkM2NmYWQ=, ActorId: [1:7439791764217661755:2298], ActorState: ExecuteState, TraceId: 01jd7xky072kyhqrhjkwscywzj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:45.267541Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:45.280421Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791764203899090:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:45.280846Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWQ5YTRhMjgtNzc5YmJkOTUtNGY4MjBlNWUtYWNkMjliZWI=, ActorId: [2:7439791764203898970:2277], ActorState: ExecuteState, TraceId: 01jd7xkxzpdhdv7050sfzdcvhf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:45.281005Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:45.337164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:45.378700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:28618", true, true, 1000); 2024-11-21T17:51:45.477539Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xky9310s0r8p7yjrgje6s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA4MmYyOGEtNTliZDVjNTktNDhkNzJiYTctM2MyZWZmNzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439791764217662269:2938] 2024-11-21T17:51:49.542074Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791759922693713:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:49.542122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:51:49.556979Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439791759908931569:2200];send_to=[0:7307199536658146131:7762515]; 2024 ... HOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:52:57.288102Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:52:57.288103Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:52:57.288108Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439792075280266430:2567] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:52:57.288526Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439792075280266430:2567] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:52:57.383221Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720699. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:52:57.383262Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439792075280266394:2560] TxId: 281474976720699. Ctx: { TraceId: 01jd7xp4bf9qywadrgc08fw81z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=NjhkNjNiY2ItZGU1N2ZmYmItOGJmN2I4MTQtZGRjNDllZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:52:57.383368Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=NjhkNjNiY2ItZGU1N2ZmYmItOGJmN2I4MTQtZGRjNDllZDQ=, ActorId: [15:7439792075280266381:2560], ActorState: ExecuteState, TraceId: 01jd7xp4bf9qywadrgc08fw81z, Create QueryResponse for error on request, msg: 2024-11-21T17:52:57.383572Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd7xp4c53g9hwwa2524cs1ed" } } YdbStatus: UNAVAILABLE ConsumedRu: 14 } 2024-11-21T17:52:57.453012Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720700. Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T17:52:57.453057Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439792075280266448:2569] TxId: 281474976720700. Ctx: { TraceId: 01jd7xp4e85ckgcjwmb5x368kh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=Mjc1M2NiNjctYjgyMDZkNWMtODM2NGUxYTItYjVhMTRlNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T17:52:57.453166Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=Mjc1M2NiNjctYjgyMDZkNWMtODM2NGUxYTItYjVhMTRlNDk=, ActorId: [15:7439792075280266435:2569], ActorState: ExecuteState, TraceId: 01jd7xp4e85ckgcjwmb5x368kh, Create QueryResponse for error on request, msg: 2024-11-21T17:52:57.453449Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7439792075280266430:2567] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=Mjc1M2NiNjctYjgyMDZkNWMtODM2NGUxYTItYjVhMTRlNDk=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd7xp4e85ckgcjwmb6wc2af4" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2024-11-21T17:52:57.453471Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=Mjc1M2NiNjctYjgyMDZkNWMtODM2NGUxYTItYjVhMTRlNDk=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd7xp4e85ckgcjwmb6wc2af4" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2024-11-21T17:52:57.453658Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2024-11-21T17:52:57.453861Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f0d57268-e79f69cb-e00d9a3b-beb21707_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=Mjc1M2NiNjctYjgyMDZkNWMtODM2NGUxYTItYjVhMTRlNDk=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd7xp4e85ckgcjwmb6wc2af4" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2024-11-21T17:52:57.453867Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f0d57268-e79f69cb-e00d9a3b-beb21707_0] Write session will restart in 2.000000s 2024-11-21T17:52:57.453886Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f0d57268-e79f69cb-e00d9a3b-beb21707_0] Write session: Do CDS request 2024-11-21T17:52:57.453890Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f0d57268-e79f69cb-e00d9a3b-beb21707_0] Do schedule cds request after 2000 ms 2024-11-21T17:52:57.519097Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715684. Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T17:52:57.519148Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7439792074689573128:2450] TxId: 281474976715684. Ctx: { TraceId: 01jd7xp4ft6wz00jyvwy4a4hzq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=Yjk3Yzc4ZjEtMTc2MmUwOWMtZDQ0YTMzMzMtOGY1NzA5MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T17:52:57.519269Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=Yjk3Yzc4ZjEtMTc2MmUwOWMtZDQ0YTMzMzMtOGY1NzA5MDE=, ActorId: [16:7439792074689573111:2450], ActorState: ExecuteState, TraceId: 01jd7xp4ft6wz00jyvwy4a4hzq, Create QueryResponse for error on request, msg: 2024-11-21T17:52:57.519563Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jd7xp4gg0yk28htmtz45hgje" } } YdbStatus: UNAVAILABLE ConsumedRu: 13 } 2024-11-21T17:52:58.096487Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720703. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:52:58.096564Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439792075280266549:2573] TxId: 281474976720703. Ctx: { TraceId: 01jd7xp52gfr8pamq5b59y4cqa, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MzA1YjllMzAtNWRjZmI1NDktMTZkMzE5YzQtNmE5MjFkNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:52:58.096706Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=MzA1YjllMzAtNWRjZmI1NDktMTZkMzE5YzQtNmE5MjFkNWE=, ActorId: [15:7439792075280266546:2573], ActorState: ExecuteState, TraceId: 01jd7xp52gfr8pamq5b59y4cqa, Create QueryResponse for error on request, msg: 2024-11-21T17:52:58.097002Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd7xp52gfr8pamq5b7jy2bt0" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2024-11-21T17:52:58.194464Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715686. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:52:58.194519Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7439792078984540507:2459] TxId: 281474976715686. Ctx: { TraceId: 01jd7xp55haha2hemfdhnw0j38, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ZTM0OTBiYTEtNDY3NDYzY2YtZDQ4ZmFkZjktOTQ2ZDJmNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T17:52:58.194610Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=ZTM0OTBiYTEtNDY3NDYzY2YtZDQ4ZmFkZjktOTQ2ZDJmNDA=, ActorId: [16:7439792078984540504:2459], ActorState: ExecuteState, TraceId: 01jd7xp55haha2hemfdhnw0j38, Create QueryResponse for error on request, msg: 2024-11-21T17:52:58.194847Z node 16 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd7xp55j3kw3rh6nabne5ve6" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2024-11-21T17:52:58.287563Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f0d57268-e79f69cb-e00d9a3b-beb21707_0] Write session: close. Timeout = 0 ms 2024-11-21T17:52:58.287581Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f0d57268-e79f69cb-e00d9a3b-beb21707_0] Write session will now close 2024-11-21T17:52:58.287590Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f0d57268-e79f69cb-e00d9a3b-beb21707_0] Write session: aborting 2024-11-21T17:52:58.287760Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f0d57268-e79f69cb-e00d9a3b-beb21707_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T17:52:58.287764Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f0d57268-e79f69cb-e00d9a3b-beb21707_0] Write session: destroy 2024-11-21T17:52:58.567527Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720705. Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T17:52:58.567573Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439792079575233916:2575] TxId: 281474976720705. Ctx: { TraceId: 01jd7xp5gg5wf6p4qzf52na4he, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=OTU0NmViYTMtOTdiMDA2YzQtYjUzNmI1OGMtZmQ4MGVkYWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T17:52:58.567684Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=OTU0NmViYTMtOTdiMDA2YzQtYjUzNmI1OGMtZmQ4MGVkYWE=, ActorId: [15:7439792079575233903:2575], ActorState: ExecuteState, TraceId: 01jd7xp5gg5wf6p4qzf52na4he, Create QueryResponse for error on request, msg: 2024-11-21T17:52:58.567970Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jd7xp5h8enx4tvf1b3rx9vk7" } } YdbStatus: UNAVAILABLE ConsumedRu: 16 } |84.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TPersQueueTest::AllEqual [GOOD] |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |84.6%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |84.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |84.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |84.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |84.6%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |84.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |84.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |84.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |84.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |84.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats >> TPersQueueTest::EventBatching [GOOD] >> TPersQueueTest::CreateTopicWithMeteringMode |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> YdbYqlClient::TestTzTypesFullStack |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |84.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |84.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |84.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.7%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::AllEqual [GOOD] Test command err: 2024-11-21T17:51:58.413908Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:58.413929Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:51:58.652941Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:51:58.652965Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST === Server->StartServer(false); 2024-11-21T17:51:58.923785Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439791820354502274:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:58.923959Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:58.926394Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439791822029815294:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:58.926667Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002006/r3tmp/tmpVS7KXt/pdisk_1.dat 2024-11-21T17:51:58.959141Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:58.960359Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:58.986967Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1455, node 3 2024-11-21T17:51:58.999480Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/002006/r3tmp/yandex63Pwlz.tmp 2024-11-21T17:51:58.999496Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/002006/r3tmp/yandex63Pwlz.tmp 2024-11-21T17:51:58.999551Z node 3 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/002006/r3tmp/yandex63Pwlz.tmp 2024-11-21T17:51:58.999597Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:59.003037Z INFO: TTestServer started on Port 7232 GrpcPort 1455 TClient is connected to server localhost:7232 PQClient connected to localhost:1455 === TenantModeEnabled() = 0 === Init PQ - start server on port 1455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:59.023916Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:59.023950Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:59.025499Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:59.030687Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:59.030742Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:59.030799Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:51:59.030845Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:59.030858Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:59.031622Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:59.031645Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:59.031697Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:59.031712Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:59.031714Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2024-11-21T17:51:59.031717Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:51:59.032193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:59.032206Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2024-11-21T17:51:59.032210Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:59.032246Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:59.032256Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:59.032262Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:51:59.032682Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:59.032691Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:59.032695Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:51:59.032699Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2024-11-21T17:51:59.033415Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:59.033877Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2024-11-21T17:51:59.033940Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:51:59.034591Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211519080, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:59.034621Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7439791820354502802 RawX2: 12884904224 } } Step: 1732211519080 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:51:59.034627Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:51:59.034690Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:51:59.034699Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:51:59.034727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:51:59.034766Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:51:59.035167Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:59.035176Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:51:59.035219Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:59.035226Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7439791820354502851:2375], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2024-11-21T17:51:59.035232Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:59.035237Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:51:59.035250Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:51:59.035256Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T17:51:59.035260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published ... cookie: 5 sessionId: describe result for acl check 2024-11-21T17:53:08.872934Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:53:08.872939Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:53:08.872940Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:53:08.872943Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439792120360948261:2592] (SourceId=123, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:53:08.873282Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439792120360948261:2592] (SourceId=123, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:53:08.874887Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439792120360948261:2592] (SourceId=123, PreferedPartition=(NULL)) Update the table 2024-11-21T17:53:08.877007Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439792120360948261:2592] (SourceId=123, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:53:08.877016Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439792120360948261:2592] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:53:08.877018Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439792120360948261:2592] (SourceId=123, PreferedPartition=(NULL)) Start idle 2024-11-21T17:53:08.877022Z node 19 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 5 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:53:08.877112Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037901, NodeId 19, Generation: 1 2024-11-21T17:53:08.877114Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:08.877120Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] server connected, pipe [19:7439792120360948287:2592], now have 1 active actors on pipe 2024-11-21T17:53:08.877126Z node 19 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic' requestId: 2024-11-21T17:53:08.877130Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message batch for topic 'rt3.dc1--account--topic' partition 0 2024-11-21T17:53:08.877145Z node 19 :PERSQUEUE INFO: new Cookie 123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0 generated for partition 0 topic 'rt3.dc1--account--topic' owner 123 2024-11-21T17:53:08.877167Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:53:08.877187Z node 19 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:53:08.877212Z node 19 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic' requestId: 2024-11-21T17:53:08.877218Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message batch for topic 'rt3.dc1--account--topic' partition 0 2024-11-21T17:53:08.877229Z node 19 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:53:08.877246Z node 19 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 0 MaxSeqNo: 2 sessionId: 123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0 2024-11-21T17:53:08.877424Z :INFO: [] MessageGroupId [123] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211588877 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:53:08.877447Z :INFO: [] MessageGroupId [123] SessionId [] Write session established. Init response: last_sequence_number: 2 session_id: "123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0" topic: "account/topic" cluster: "dc1" 2024-11-21T17:53:08.877534Z :DEBUG: [] MessageGroupId [123] SessionId [123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0] Write 1 messages with Id from 1 to 1 2024-11-21T17:53:08.877555Z :DEBUG: [] MessageGroupId [123] SessionId [123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0] Write session: try to update token 2024-11-21T17:53:08.877560Z :DEBUG: [] MessageGroupId [123] SessionId [123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0] Send 1 message(s) (0 left), first sequence number is 3 2024-11-21T17:53:08.877616Z :INFO: [] MessageGroupId [123] SessionId [123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0] Write session: close. Timeout = 10000 ms 2024-11-21T17:53:08.877653Z node 19 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: 123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:53:08.877719Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T17:53:08.877747Z node 19 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic' requestId: 2024-11-21T17:53:08.877753Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message batch for topic 'rt3.dc1--account--topic' partition 0 2024-11-21T17:53:08.877766Z node 19 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T17:53:08.877775Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:53:08.877792Z node 19 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic' requestId: 2024-11-21T17:53:08.877797Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message batch for topic 'rt3.dc1--account--topic' partition 0 2024-11-21T17:53:08.877805Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] got client message topic: rt3.dc1--account--topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 370 offset: -1 2024-11-21T17:53:08.877821Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Send write quota request. Topic: "rt3.dc1--account--topic". Partition: 0. Amount: 374. Cookie: 3 2024-11-21T17:53:09.877361Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439792120360948261:2592] (SourceId=123, PreferedPartition=(NULL)) Update the table 2024-11-21T17:53:09.897568Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439792120360948261:2592] (SourceId=123, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=1 Status=SUCCESS 2024-11-21T17:53:09.897580Z node 19 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [19:7439792120360948261:2592] (SourceId=123, PreferedPartition=(NULL)) Start idle 2024-11-21T17:53:12.371930Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Got quota. Topic: "rt3.dc1--account--topic". Partition: 0: Cookie: 3 2024-11-21T17:53:12.372044Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2024-11-21T17:53:12.372084Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 441 count 1 nextOffset 3 batches 1 2024-11-21T17:53:12.372170Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 429 WTime 1732211592371 2024-11-21T17:53:12.372250Z node 19 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:53:12.373602Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 374 2024-11-21T17:53:12.373623Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:53:12.373634Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] Answering for message sourceid: '\000123', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T17:53:12.373690Z node 19 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T17:53:12.373726Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:53:12.374421Z :DEBUG: [] MessageGroupId [123] SessionId [123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 3494 throttled_on_topic_duration_ms: 3494 } 2024-11-21T17:53:12.374440Z :DEBUG: [] MessageGroupId [123] SessionId [123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0] Write session: acknoledged message 1 2024-11-21T17:53:12.380138Z :INFO: [] MessageGroupId [123] SessionId [123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0] Write session will now close 2024-11-21T17:53:12.380155Z :DEBUG: [] MessageGroupId [123] SessionId [123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0] Write session: aborting 2024-11-21T17:53:12.380741Z :INFO: [] MessageGroupId [123] SessionId [123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:53:12.380751Z :DEBUG: [] MessageGroupId [123] SessionId [123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0] Write session is aborting and will not restart 2024-11-21T17:53:12.380771Z :DEBUG: [] MessageGroupId [123] SessionId [123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0] Write session: destroy 2024-11-21T17:53:12.380839Z node 19 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: 123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0 grpc read done: success: 0 data: 2024-11-21T17:53:12.380853Z node 19 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: 123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0 grpc read failed 2024-11-21T17:53:12.381041Z node 19 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 5 sessionId: 123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0 2024-11-21T17:53:12.381052Z node 19 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: 123|652622bb-6028e437-b7ef8ed5-2c5a0f09_0 is DEAD 2024-11-21T17:53:12.381357Z node 19 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037901 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:53:12.381408Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:12.381425Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901] server disconnected, pipe [19:7439792120360948287:2592] destroyed 2024-11-21T17:53:12.381443Z node 19 :PERSQUEUE DEBUG: [PQ: 72075186224037901, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn >> YdbYqlClient::TestTzTypesFullStack [GOOD] >> YdbYqlClient::TestVariant >> TGRpcYdbTest::CreateTableBadRequest |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> YdbYqlClient::RetryOperationTemplate >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn >> TUserAttrsTestWithReboots::InSubdomain [GOOD] >> TPQCompatTest::CommitOffsets [GOOD] >> TPQCompatTest::LongProducerAndLongMessageGroupId >> TGRpcYdbTest::CreateTableBadRequest [GOOD] >> TGRpcYdbTest::CreateTableBadRequest2 >> YdbYqlClient::TestVariant [GOOD] >> YdbYqlClient::TestTransactionQueryError |84.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |84.8%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TUserAttrsTestWithReboots::InSubdomain [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:52:52.798634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:52.798675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:52.798697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:52.798703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:52.806302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:52.806333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:52.806363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:52.806470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:52.980509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:52.980538Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:52:52.993855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:52.994058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:53.000702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:52:53.014339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:53.014451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:53.014578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:53.024879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:53.044749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:53.086384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:53.086444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:53.094742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:53.094789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:53.094818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:53.094932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:52:53.096759Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:52:53.131025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:53.136897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.137034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:53.145943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:53.145992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.146957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:53.146987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:53.147048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.147060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:53.156912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:53.156966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:53.158060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.158091Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:53.158097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:53.169770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.169793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.169799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:53.184150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:53.184842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:53.188154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:53.188516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:53.197785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:53.197867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:53.197883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:53.197992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:53.198013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:53.198055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:53.198081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:53.198946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:53.198964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:53.199015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:53.199036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:53.199144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.199152Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:53.199166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:53.199170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:53.199175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:53.199180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:53.199186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:53.199189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:53.199206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:53.199212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:53.199215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... eady parts: 1/1 2024-11-21T17:53:14.663709Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2024-11-21T17:53:14.663712Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:53:14.663716Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:53:14.663719Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:53:14.663747Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T17:53:14.663752Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2024-11-21T17:53:14.663755Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2024-11-21T17:53:14.663758Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:53:14.663856Z node 89 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:53:14.663866Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:53:14.663870Z node 89 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:53:14.663874Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T17:53:14.663877Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:53:14.663968Z node 89 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:53:14.663976Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:53:14.663982Z node 89 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:53:14.663986Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:53:14.663989Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:53:14.663997Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2024-11-21T17:53:14.664413Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:53:14.664423Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:53:14.664426Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:53:14.664523Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:53:14.664621Z node 89 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:53:14.664755Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:53:14.664804Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:53:14.665053Z node 89 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:53:14.665123Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:53:14.665151Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:53:14.665188Z node 89 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2024-11-21T17:53:14.665450Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:53:14.665471Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:53:14.665595Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:53:14.665600Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:53:14.665615Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:53:14.665763Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:53:14.665784Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:53:14.665789Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:53:14.665796Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:53:14.665842Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:53:14.665846Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:53:14.666050Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:53:14.666054Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:53:14.666062Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:53:14.666065Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:53:14.666296Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:53:14.666308Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409547 TestWaitNotification wait txId: 1004 2024-11-21T17:53:14.666352Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:53:14.666357Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:53:14.666397Z node 89 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:53:14.666408Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:53:14.666411Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [89:536:2491] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:53:14.666452Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:53:14.666471Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 26us result status StatusPathDoesNotExist 2024-11-21T17:53:14.666491Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:53:14.666519Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:53:14.666531Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 13us result status StatusSuccess 2024-11-21T17:53:14.666563Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain >> TGRpcYdbTest::RemoveNotExistedDirectory >> YdbTableBulkUpsert::Simple >> YdbYqlClient::RetryOperationTemplate [GOOD] >> YdbYqlClient::RetryOperationAsync >> TGRpcYdbTest::CreateTableBadRequest2 [GOOD] >> TGRpcYdbTest::CreateAlterCopyAndDropTable >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain [GOOD] >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism >> TGRpcYdbTest::RemoveNotExistedDirectory [GOOD] >> TGRpcYdbTest::SdkUuid >> YdbYqlClient::TestTransactionQueryError [GOOD] >> YdbYqlClient::TestReadWrongTable >> TGRpcYdbTest::CreateAlterCopyAndDropTable [GOOD] >> TGRpcYdbTest::CreateDeleteYqlSession >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] >> YdbTableBulkUpsert::Simple [GOOD] >> YdbTableBulkUpsert::Types ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] Test command err: 2024-11-21T17:53:13.472567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792144980413153:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:13.472736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011a5/r3tmp/tmplqiKXO/pdisk_1.dat 2024-11-21T17:53:13.517952Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19901, node 1 2024-11-21T17:53:13.537839Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:13.537860Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:13.537862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:13.537897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:13.565656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:13.566593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:13.566613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:13.567263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:13.567317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:13.567319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:53:13.567655Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:13.567665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:53:13.567949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:13.568208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:53:13.568735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211593616, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:13.568746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:53:13.568793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:53:13.569121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:13.569154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:13.569167Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:53:13.569176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:53:13.569187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:53:13.569199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:53:13.569552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:53:13.569568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:53:13.569572Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:13.569583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:53:13.572733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:13.572752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:13.574117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:14.156019Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439792147459697386:2059];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011a5/r3tmp/tmpYcJHjM/pdisk_1.dat 2024-11-21T17:53:14.159873Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:53:14.168894Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29643, node 4 2024-11-21T17:53:14.182444Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:14.182460Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:14.182463Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:14.182511Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:14.256224Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:14.256259Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:14.257882Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:14.258573Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:14.258661Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:14.258669Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:14.259052Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:14.259126Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:14.259138Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:14.259530Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:14.259541Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:14.259588Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:14.259876Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:14.260869Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211594309, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:14.260884Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:14.260938Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:14.261413Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:14.261492Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:14.261513Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:14.261530Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:14.261546Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:14.261568Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:14.261688Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, ... 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:15.590579Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:15.590605Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:15.592085Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:15.592664Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.592763Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:15.592777Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.593219Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:15.593269Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:15.593275Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:15.593626Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:15.593632Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:15.593773Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:15.593954Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.594815Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211595639, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:15.594826Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:15.594882Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:15.595289Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:15.595314Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:15.595324Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:15.595333Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:15.595343Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:15.595350Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:15.595461Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:15.595477Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:15.595479Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:15.595488Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:53:15.651217Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:15.651258Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:15.651393Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 0 2024-11-21T17:53:16.195117Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7439792155375822114:2057];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:16.195176Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011a5/r3tmp/tmp4GGnQY/pdisk_1.dat 2024-11-21T17:53:16.207771Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9414, node 13 2024-11-21T17:53:16.227418Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:16.227435Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:16.227437Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:16.227495Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:16.295227Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:16.295267Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:16.296961Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:16.298837Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:16.298945Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:16.298957Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:16.299344Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:16.299416Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:16.299430Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:16.299791Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:16.299801Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:16.299894Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:16.300109Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:16.300934Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211596346, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:16.300946Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:16.301011Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:16.301434Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:16.301503Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:16.301520Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:16.301539Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:16.301554Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:16.301587Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:16.301767Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:16.301793Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:16.301803Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:16.301821Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:53:16.327313Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:16.327380Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:16.327538Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 0 >> YdbYqlClient::TestReadWrongTable [GOOD] >> TGRpcYdbTest::SdkUuid [GOOD] >> TGRpcYdbTest::ReadTable >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] >> TGRpcYdbTest::BeginTxRequestError >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadWrongTable [GOOD] Test command err: 2024-11-21T17:53:13.367034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792144350969733:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:13.367284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011b1/r3tmp/tmpxVMsVW/pdisk_1.dat 2024-11-21T17:53:13.425076Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11550, node 1 2024-11-21T17:53:13.443206Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:13.443217Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:13.443219Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:13.443265Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:13.464352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:13.465320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:13.465350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:13.466183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:13.466265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:13.466274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:53:13.466683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:53:13.466841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:13.466858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:53:13.467137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:13.467156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:13.467320Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:13.468352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211593511, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:13.468368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:53:13.468429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:53:13.468486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:13.468884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:13.468937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:13.468953Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:53:13.468963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:53:13.468977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:53:13.468993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:53:13.469468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:53:13.469491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:53:13.469496Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:13.469515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:53:13.615959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792144350970658:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:13.615959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792144350970646:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:13.615971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:13.616466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:13.616514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:13.616517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T17:53:13.616533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:13.616535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T17:53:13.616543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:13.616549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:53:13.616588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 1 -> 128 2024-11-21T17:53:13.616644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:13.616649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T17:53:13.617293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:53:13.617348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:13.617423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:13.617444Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, ProgressState 2024-11-21T17:53:13.617472Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:13.617483Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:13.617490Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:13.617511Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:53:13.617736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:13.617750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:13.617754Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:53:13.617795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:13.617804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:13.617805Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:53:13.617820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:13.617822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:13.617824Z node 1 :FLA ... delivered;self_id=[10:7439792157162229785:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:16.519586Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011b1/r3tmp/tmpcVKRGL/pdisk_1.dat 2024-11-21T17:53:16.532350Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20448, node 10 2024-11-21T17:53:16.546679Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:16.546690Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:16.546693Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:16.546748Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:16.620087Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:16.620113Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:16.621618Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:16.623020Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:16.623112Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:16.623121Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:16.623419Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:16.623468Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:16.623476Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:16.623818Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:16.623826Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:16.624012Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:16.624163Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:16.625107Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211596668, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:16.625123Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:16.625190Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:16.625630Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:16.625696Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:16.625719Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:16.625744Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:16.625764Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:16.625781Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:16.625923Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:16.625936Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:16.625945Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:16.625958Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:53:16.633491Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jd7xpqas01fj775w763bmy67, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:56892, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 9.998751s 2024-11-21T17:53:16.634529Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jd7xpqat5378n6znqjak2pdr, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:56892, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T17:53:16.768081Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jd7xpqf03af0rtsq7b3tqz6t, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:56892, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T17:53:16.768341Z node 10 :TX_PROXY ERROR: [ReadTable [10:7439792157162230699:2296] TxId# 281474976715658] Navigate request failed for table 'Root/NoTable' 2024-11-21T17:53:16.768375Z node 10 :TX_PROXY ERROR: [ReadTable [10:7439792157162230699:2296] TxId# 281474976715658] RESPONSE Status# ResolveError shard: 0 table: Root/NoTable 2024-11-21T17:53:16.768523Z node 10 :READ_TABLE_API NOTICE: [10:7439792157162230698:2296] Finish grpc stream, status: 400070
: Error: Failed to resolve table Root/NoTable, code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Failed to resolve table Root/NoTable 2024-11-21T17:53:16.769112Z node 10 :GRPC_SERVER DEBUG: Got grpc request# DeleteSessionRequest, traceId# 01jd7xpqf15gst0sxad71q020b, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:56892, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 2.009121s 2024-11-21T17:53:16.776266Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6e1c00] received request Name# Request ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776298Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6da900] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776357Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6d0900] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776373Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6ec600] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776400Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6d9500] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776424Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6d9000] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776431Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6c5a00] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776451Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6e2600] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776468Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6e2100] received request Name# FillNode ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776475Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6c2800] received request Name# DrainNode ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776500Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6cf000] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776514Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6e1700] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776557Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6c4600] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776570Z node 10 :GRPC_SERVER DEBUG: [0x443b9c301200] received request Name# KeyValue ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776594Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6e1200] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776607Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6c5500] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776635Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6e7b00] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776644Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6e8000] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776671Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6c2300] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776678Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6d5900] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776709Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6e5300] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776716Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6e8500] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776746Z node 10 :GRPC_SERVER DEBUG: [0x443bbec86e00] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2024-11-21T17:53:16.776761Z node 10 :GRPC_SERVER DEBUG: [0x443bbf6d8600] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> YdbTableBulkUpsert::Types [GOOD] >> YdbTableBulkUpsert::SyncIndexShouldSucceed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] >> TGRpcYdbTest::BeginTxRequestError [GOOD] Test command err: 2024-11-21T17:51:57.978992Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791818863561260:2231];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:57.979044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:58.005041Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002011/r3tmp/tmpUomGCa/pdisk_1.dat 2024-11-21T17:51:58.012896Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:58.019150Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:58.036537Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5429, node 1 2024-11-21T17:51:58.054271Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/002011/r3tmp/yandex3q4KQk.tmp 2024-11-21T17:51:58.054281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/002011/r3tmp/yandex3q4KQk.tmp 2024-11-21T17:51:58.054339Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/002011/r3tmp/yandex3q4KQk.tmp 2024-11-21T17:51:58.054379Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:58.059192Z INFO: TTestServer started on Port 13802 GrpcPort 5429 TClient is connected to server localhost:13802 PQClient connected to localhost:5429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:51:58.078587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:58.078638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:58.080292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:58.108083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:58.108109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:58.109087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.109441Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:58.109798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:51:58.123772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:51:58.264077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791823158529332:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:58.264095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:58.264185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791823158529359:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:58.264930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:51:58.267806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791823158529390:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:58.267952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:58.268257Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2024-11-21T17:51:58.268612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791823158529361:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:51:58.292871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.351386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.353149Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791823158529580:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:58.353253Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTQ1ZDlkODUtOGEyNmMwZWUtNzYyNWIxNS1jNjEyZmMyNg==, ActorId: [1:7439791823158529329:2300], ActorState: ExecuteState, TraceId: 01jd7xmasq0tmfq35j8hfyp4b1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:58.353760Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:58.414042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:51:58.443223Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xmaytej8d0es7camnzebv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjU4NzAxYTktYjUyZGU4MDAtNWVjN2U1OTQtY2Q4Y2MyZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439791823158529880:3043] 2024-11-21T17:52:02.979300Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791818863561260:2231];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:02.979330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: /Root/PQ/rt3.dc1--topic1 2024-11-21T17:52:03.485779Z node 1 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=1&id=ZGVlNGNhMDQtYzA4NzFlNzctOTRkMDZmNmItOWQ0MjVjZjM=, workerId: [1:7439791844633366736:2408], local sessions count: 0 2024-11-21T17:52:03.486563Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791823158528614:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T17:52:03.486617Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439791844633366773:3301], recipient# [1:7439791844633366772:2420], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: RootUnknown Kind: KindUnknown DomainInfo }] } Create topic: /Root/PQ/rt3.dc1--topic1 AddTopic: /Root/PQ/rt3.dc1--topic1 ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = /Root/PQ/rt3.dc1/topic1, dc = unknown 2024-11-21T17:52:03.487442Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2024-11-21T17:52:03.487627Z node 1 :KQP_PROXY DEBUG: TraceId: "01jd7xmfwzc664tc8g07y40cgf", Request has 5.009607s seconds to be completed 2024-11-21T17:52:03.487633Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439791823158528614:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo },{ Path: Root/PQ TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false ... 83647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 0 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T17:53:17.085809Z node 25 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 0. Step TInitConfigStep 2024-11-21T17:53:17.085880Z node 25 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:53:17.085920Z node 25 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 0, State: StateInit] bootstrapping 0 [25:7439792163048902474:2451] 2024-11-21T17:53:17.086302Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T17:53:17.086338Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:17.086458Z node 26 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 1. Step TInitConfigStep 2024-11-21T17:53:17.086504Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892] Config applied version 0 actor [25:7439792137279096874:2200] txId 281474976715679 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T17:53:17.086597Z node 26 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 1. Step TInitInternalFieldsStep 2024-11-21T17:53:17.086614Z node 25 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 0. Completed. 2024-11-21T17:53:17.086642Z node 25 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--legacy--topic1' partition 0 generation 1 [25:7439792163048902474:2451] 2024-11-21T17:53:17.086647Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--legacy--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:17.086666Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 1, State: StateInit] bootstrapping 1 [26:7439792159195492471:2379] 2024-11-21T17:53:17.086929Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'rt3.dc1--legacy--topic1' partition 0 user c1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:53:17.086988Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:53:17.087284Z node 26 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 1. Completed. 2024-11-21T17:53:17.087300Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--legacy--topic1' partition 1 generation 1 [26:7439792159195492471:2379] 2024-11-21T17:53:17.087305Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--legacy--topic1 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:17.087567Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Topic 'rt3.dc1--legacy--topic1' partition 1 user c1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:53:17.087618Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 1 TClient::Ls request: /Root/PQ/rt3.dc1--legacy--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--legacy--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976715679 CreateStep: 1732211597137 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186224037894 } PersQueueGroup { Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: ... (TRUNCATED) === PATH DESCRIPTION: Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } YdbDatabasePath: "/Root" PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } Partitions { PartitionId: 0 TabletId: 72075186224037893 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 2 >> TGRpcYdbTest::ReadTable [GOOD] |84.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |84.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard >> TGRpcYdbTest::ReadTablePg >> TGroupMapperTest::Mirror3dc >> TGroupMapperTest::NonUniformCluster >> TGroupMapperTest::SanitizeGroupTest3dc >> TGroupMapperTest::MakeDisksUnusable >> TGroupMapperTest::NonUniformClusterMirror3dc >> TGroupMapperTest::MakeDisksUnusable [GOOD] >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> TGroupMapperTest::Block42_1disk >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> TGroupMapperTest::Mirror3dc [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::BeginTxRequestError [GOOD] Test command err: 2024-11-21T17:53:14.523793Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792147435243187:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:14.524000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011a4/r3tmp/tmpo1fAsO/pdisk_1.dat 2024-11-21T17:53:14.569463Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17909, node 1 2024-11-21T17:53:14.586265Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:14.586278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:14.586280Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:14.586322Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:14.609983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:14.610972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:14.610997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:14.611735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:14.611799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:14.611811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:53:14.612176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:53:14.612243Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:14.612253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:53:14.612522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:14.613224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211594659, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:14.613236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:53:14.613288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:53:14.613632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:14.613688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:14.613702Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:53:14.613722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:53:14.613734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:53:14.613750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:53:14.614130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:53:14.614146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:53:14.614150Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:14.614170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:53:14.624139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:14.624164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:14.625706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:15.169241Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439792153914660250:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:15.169320Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011a4/r3tmp/tmp1lTsAi/pdisk_1.dat 2024-11-21T17:53:15.184934Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13709, node 4 2024-11-21T17:53:15.204092Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:15.204104Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:15.204106Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:15.204143Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:15.269714Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:15.269755Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:15.271265Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:15.272958Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.273047Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:15.273058Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.273514Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:15.273568Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:15.273578Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:15.273953Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:15.273964Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:15.273995Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:15.274256Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.275030Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211595324, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:15.275040Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:15.275096Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:15.275419Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:15.275461Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:15.275473Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:15.275487Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:15.275497Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:15.275509Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:15.275679Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940 ... 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:17.899847Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:17.899852Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:53:17.899899Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:17.899902Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:17.899903Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:53:17.899916Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:17.899918Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:17.899919Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T17:53:17.899931Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:17.899932Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:17.899934Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T17:53:17.899947Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:17.899949Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:17.899950Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T17:53:17.901740Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211597949, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:17.901753Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211597949, at schemeshard: 72057594046644480 2024-11-21T17:53:17.901780Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:53:17.901798Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211597949, at schemeshard: 72057594046644480 2024-11-21T17:53:17.901805Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T17:53:17.901813Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211597949, at schemeshard: 72057594046644480 2024-11-21T17:53:17.901820Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T17:53:17.901829Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732211597949 2024-11-21T17:53:17.901834Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T17:53:17.902254Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:53:17.902341Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:17.902436Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:17.902448Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T17:53:17.902462Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T17:53:17.902491Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T17:53:17.902496Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T17:53:17.902505Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:53:17.902509Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T17:53:17.902520Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T17:53:17.902524Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T17:53:17.902529Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:53:17.902537Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T17:53:17.902540Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T17:53:17.902547Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T17:53:17.902552Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T17:53:17.903065Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:17.903071Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:17.903075Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:53:17.903107Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:17.903109Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:17.903110Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T17:53:17.903122Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:17.903124Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:17.903125Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T17:53:17.903136Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:17.903139Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:17.903140Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:53:17.903151Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:17.903153Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:17.903155Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T17:53:17.903159Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:53:17.904406Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439792160513534011:2298], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:53:17.988771Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:53:17.988813Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:53:17.992868Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:53:17.996802Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NjFiNzkwN2YtNTI5NjJmNmYtNTY0NzI5OTYtMTE1MDk5ZTQ=, ActorId: [13:7439792160513533979:2292], ActorState: ExecuteState, TraceId: 01jd7xprhm01vfjd7nq7w0p3nw, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2024-11-21T17:53:18.008842Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NjFiNzkwN2YtNTI5NjJmNmYtNTY0NzI5OTYtMTE1MDk5ZTQ=, ActorId: [13:7439792160513533979:2292], ActorState: ExecuteState, TraceId: 01jd7xprnmez6sq0egvzp6f3fp, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2024-11-21T17:53:18.016589Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NjFiNzkwN2YtNTI5NjJmNmYtNTY0NzI5OTYtMTE1MDk5ZTQ=, ActorId: [13:7439792160513533979:2292], ActorState: ExecuteState, TraceId: 01jd7xprnw4zd6enw2f9f55g70, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksUnusable [GOOD] |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> YdbTableBulkUpsert::SyncIndexShouldSucceed [GOOD] >> YdbTableBulkUpsert::Timeout |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc [GOOD] >> TGroupMapperTest::MonteCarlo |84.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> TGroupMapperTest::Block42_2disk >> TGRpcYdbTest::ReadTablePg [GOOD] >> TGRpcYdbTest::OperationTimeout >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> TMultiversionObjectMap::MonteCarlo >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge >> TGroupMapperTest::NonUniformCluster2 >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigration [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> YdbYqlClient::SecurityTokenAuth >> YdbTableBulkUpsert::Timeout [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc >> YdbOlapStore::BulkUpsert [GOOD] >> YdbOlapStore::DuplicateRows |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |84.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> TGRpcYdbTest::OperationTimeout [GOOD] |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::Timeout [GOOD] Test command err: 2024-11-21T17:53:15.668821Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792152801349022:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:15.668980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001187/r3tmp/tmpbvMHzn/pdisk_1.dat 2024-11-21T17:53:15.749912Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63650, node 1 2024-11-21T17:53:15.768977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:15.769004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:15.770498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:15.771182Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:15.771193Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:15.771196Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:15.771255Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:15.804370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.805223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:15.805234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.805862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:15.805914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:15.805923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:53:15.806262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:15.806268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:53:15.806360Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:53:15.806623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.807399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211595856, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:15.807408Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:53:15.807463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:53:15.807784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:15.807809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:15.807819Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:53:15.807829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:53:15.807838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:53:15.807846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:53:15.808132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:53:15.808146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:53:15.808149Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:15.808170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:53:15.991580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Logs, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.991825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:53:15.992274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:15.992281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:16.000558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Logs 2024-11-21T17:53:16.000638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:16.000702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:16.000721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:53:16.001322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:16.001331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:16.001335Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:53:16.001386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:16.001389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:16.001390Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:53:16.002108Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:53:16.013107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:16.013567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 2814749 ... NFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715658 Step: 1732211599916 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 131 } } 2024-11-21T17:53:19.881025Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881067Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881078Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881089Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881098Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881107Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881118Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881126Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881135Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881146Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881155Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881164Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881173Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881182Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881192Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881201Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881210Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881220Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881230Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881239Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.881250Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.882859Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037919 Status: COMPLETE TxId: 281474976715658 Step: 1732211599916 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037919 CpuTimeUsec: 143 } } 2024-11-21T17:53:19.882927Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037893 Status: COMPLETE TxId: 281474976715658 Step: 1732211599916 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037893 CpuTimeUsec: 121 } } 2024-11-21T17:53:19.882943Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037907 Status: COMPLETE TxId: 281474976715658 Step: 1732211599916 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037907 CpuTimeUsec: 86 } } 2024-11-21T17:53:19.882959Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037903 Status: COMPLETE TxId: 281474976715658 Step: 1732211599916 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037903 CpuTimeUsec: 108 } } 2024-11-21T17:53:19.882976Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715658 Step: 1732211599916 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 103 } } 2024-11-21T17:53:19.882990Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037895 Status: COMPLETE TxId: 281474976715658 Step: 1732211599916 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037895 CpuTimeUsec: 152 } } 2024-11-21T17:53:19.883006Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037906 Status: COMPLETE TxId: 281474976715658 Step: 1732211599916 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037906 CpuTimeUsec: 161 } } 2024-11-21T17:53:19.883021Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037916 Status: COMPLETE TxId: 281474976715658 Step: 1732211599916 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037916 CpuTimeUsec: 136 } } 2024-11-21T17:53:19.883039Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037897 Status: COMPLETE TxId: 281474976715658 Step: 1732211599916 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037897 CpuTimeUsec: 84 } } 2024-11-21T17:53:19.883056Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037917 Status: COMPLETE TxId: 281474976715658 Step: 1732211599916 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037917 CpuTimeUsec: 105 } } 2024-11-21T17:53:19.883073Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037915 Status: COMPLETE TxId: 281474976715658 Step: 1732211599916 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037915 CpuTimeUsec: 102 } } 2024-11-21T17:53:19.883251Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.883316Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.883368Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.883378Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.883387Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.883396Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.883406Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.883651Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.883670Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.883682Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.883693Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:53:19.883696Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:19.883706Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T17:53:19.885205Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:53:19.885226Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:53:19.885241Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 1 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 2 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 4 usec
: Error: Deadline exceeded 8 usec
: Error: Deadline exceeded 16 usec
: Error: Deadline exceeded 32 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 64 usec
: Error: Deadline exceeded 128 usec
: Error: Deadline exceeded 256 usec
: Error: Deadline exceeded 512 usec
: Error: Deadline exceeded 1024 usec
: Error: Deadline exceeded 2048 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 4096 usec |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::OperationTimeout [GOOD] Test command err: 2024-11-21T17:53:15.664308Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792150940978598:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:15.664484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00118f/r3tmp/tmpe5WS55/pdisk_1.dat 2024-11-21T17:53:15.715561Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30114, node 1 2024-11-21T17:53:15.731679Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:15.731696Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:15.731698Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:15.731748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:15.764494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:15.764534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:15.765828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:15.793452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.794375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:15.794389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.795024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:15.795106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:15.795118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:15.795489Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:15.795499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:15.795679Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:15.795891Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.796791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211595842, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:15.796804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:15.796878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:15.797343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:15.797390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:15.797402Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:15.797426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:15.797437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:15.797447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:15.797866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:15.797884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:15.797891Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:15.797902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:53:16.315549Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439792156972958310:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:16.315804Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00118f/r3tmp/tmpD2Hj1y/pdisk_1.dat 2024-11-21T17:53:16.333377Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6408, node 4 2024-11-21T17:53:16.352774Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:16.352790Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:16.352792Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:16.352841Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:16.416293Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:16.416334Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:16.418148Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:16.420544Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:16.420687Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:16.420698Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:16.421270Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:16.421337Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:16.421347Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:16.421899Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:16.422017Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:16.422027Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:16.422434Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:16.423637Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211596472, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:16.423656Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:16.423760Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:16.424327Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:16.424388Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:16.424405Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:16.424425Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:16.424441Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:16.424459Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:16.424668Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046 ... 815480102:2314] Adding quota request to queue ShardId: 0, TxId: 281474976715664 2024-11-21T17:53:19.294308Z node 10 :READ_TABLE_API DEBUG: [10:7439792168815480102:2314] Assign stream quota to Shard 0, Quota 5, TxId 281474976715664 Reserved: 5 of 25, Queued: 0 2024-11-21T17:53:19.294519Z node 10 :READ_TABLE_API DEBUG: [10:7439792168815480102:2314] got stream part, size: 84, RU required: 128 rate limiter absent 2024-11-21T17:53:19.294726Z node 10 :READ_TABLE_API DEBUG: [10:7439792168815480102:2314] Starting inactivity timer for 600.000000s with tag 3 2024-11-21T17:53:19.295286Z node 10 :READ_TABLE_API NOTICE: [10:7439792168815480102:2314] Finish grpc stream, status: 400000 2024-11-21T17:53:19.296104Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jd7xpsy0djxfc74nryxh9d9q, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:49384, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T17:53:19.303460Z node 10 :READ_TABLE_API DEBUG: [10:7439792168815480124:2316] Adding quota request to queue ShardId: 0, TxId: 281474976715666 2024-11-21T17:53:19.303477Z node 10 :READ_TABLE_API DEBUG: [10:7439792168815480124:2316] Assign stream quota to Shard 0, Quota 5, TxId 281474976715666 Reserved: 5 of 25, Queued: 0 2024-11-21T17:53:19.303715Z node 10 :READ_TABLE_API DEBUG: [10:7439792168815480124:2316] got stream part, size: 210, RU required: 128 rate limiter absent 2024-11-21T17:53:19.303937Z node 10 :READ_TABLE_API DEBUG: [10:7439792168815480124:2316] Starting inactivity timer for 600.000000s with tag 3 2024-11-21T17:53:19.304069Z node 10 :READ_TABLE_API NOTICE: [10:7439792168815480124:2316] Finish grpc stream, status: 400000 2024-11-21T17:53:19.305384Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6df900] received request Name# Request ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305468Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6ead00] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305502Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6dc200] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305532Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6d4000] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305560Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6de000] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305596Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6d9000] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305623Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6c3700] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305646Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6dcc00] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305669Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6e1700] received request Name# FillNode ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305693Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6d3b00] received request Name# DrainNode ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305719Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6d0400] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305743Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6e3f00] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305767Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6e3500] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305791Z node 10 :GRPC_SERVER DEBUG: [0x6b0ab2c1800] received request Name# KeyValue ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305817Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6dae00] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305842Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6c5000] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305868Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6d6300] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305892Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6dd600] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305916Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6e3000] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305941Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6c5500] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305966Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6e1c00] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.305990Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6c5a00] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.306012Z node 10 :GRPC_SERVER DEBUG: [0x6b0bec85d80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.306038Z node 10 :GRPC_SERVER DEBUG: [0x6b0bf6e6200] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2024-11-21T17:53:19.966048Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7439792167720323405:2098];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:19.966106Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00118f/r3tmp/tmpoZ6ivE/pdisk_1.dat 2024-11-21T17:53:19.997992Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28835, node 13 2024-11-21T17:53:20.012494Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:20.012508Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:20.012509Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:20.012544Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:20.065709Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:20.065741Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:20.067272Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:20.070294Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:20.070390Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:20.070400Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:20.070860Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:20.070907Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:20.070915Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:20.071250Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:20.071261Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:20.071311Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:20.071548Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:20.072219Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211600119, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:20.072245Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:20.072303Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:20.072689Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:20.072731Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:20.072744Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:20.072758Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:20.072769Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:20.072781Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:20.072905Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:20.072919Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:20.072922Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:20.072940Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1
: Error: Operation timeout. >> TOlapReboots::CreateStandaloneTable >> TOlapReboots::DropMultipleStandaloneTables >> YdbTableBulkUpsertOlap::UpsertArrowBatch >> YdbYqlClient::SecurityTokenAuth [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK >> TPQCompatTest::LongProducerAndLongMessageGroupId [GOOD] >> TPQCompatTest::ReadWriteSessions >> TGroupMapperTest::MakeDisksNonoperational [GOOD] >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain >> TGroupMapperTest::MapperSequentialCalls >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay [GOOD] >> TPersQueueTest::PartitionsMapping |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksNonoperational [GOOD] >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> TPartitionTests::CorrectRange_Commit >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageTestClean [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> TPartitionTests::CorrectRange_Commit [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] >> TPQTabletTests::Multiple_PQTablets >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync >> TPQTabletTests::Multiple_PQTablets [GOOD] |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> TPartitionTests::CorrectRange_Multiple_Consumers |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowDupField >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] >> TMultiversionObjectMap::MonteCarlo [GOOD] >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TPartitionTests::ConflictingTxIsAborted >> TPQTabletTests::One_Tablet_For_All_Partitions >> TPartitionTests::ConflictingTxIsAborted [GOOD] |84.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TMultiversionObjectMap::MonteCarlo [GOOD] |84.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> TMicrosecondsSlidingWindow::Basic [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> TPartitionTests::ConflictingTxProceedAfterRollback >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> TPQTabletTests::One_New_Partition_In_Another_Tablet >> TPQTest::TestWaitInOwners >> TPartitionTests::OldPlanStep >> TPQTest::TestPQPartialRead >> TPQTest::TestWriteSplit >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync [GOOD] >> YdbYqlClient::SimpleColumnFamilies >> TPartitionTests::OldPlanStep [GOOD] >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] >> TPQTabletTests::Huge_ProposeTransacton |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> TFetchRequestTests::HappyWay >> TPQTabletTests::Partition_Send_Predicate_With_False >> TPartitionTests::IncorrectRange >> YdbTableBulkUpsertOlap::UpsertArrowDupField [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut >> TPartitionTests::IncorrectRange [GOOD] >> TPQTabletTests::ProposeTx_Missing_Operations >> TPartitionTests::ReserveSubDomainOutOfSpace |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |85.0%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] >> TPartitionTests::CommitOffsetRanges >> TMeteringSink::FlushPutEventsV1 [GOOD] >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_1 >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] >> TPartitionTests::CommitOffsetRanges [GOOD] >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TMeteringSink::UsedStorageV1 [GOOD] >> TPartitionTests::ChangeConfig >> TPartitionTests::NonConflictingCommitsBatch >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_2 >> TPartitionTests::ConflictingCommitsInSeveralBatches >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] >> TPartitionTests::ChangeConfig [GOOD] >> TPartitionTests::ConflictingActsInSeveralBatches >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut |85.0%| [LD] {RESULT} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut >> TPQTabletTests::ProposeTx_Command_After_Propose >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] >> TPersQueueTest::CreateTopicWithMeteringMode [GOOD] >> TPersQueueTest::DefaultMeteringMode >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::HeartbeatEmitter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] Test command err: 2024-11-21T17:53:24.686741Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:53:24.687577Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:53:24.687650Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T17:53:24.687665Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:53:24.687669Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T17:53:24.687674Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:53:24.687682Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.687689Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T17:53:24.687693Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:24.692309Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:24.692331Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:204:2210], now have 1 active actors on pipe 2024-11-21T17:53:24.692344Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:53:24.693813Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.694685Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T17:53:24.694710Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.694976Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:24.695020Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep 2024-11-21T17:53:24.695087Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:53:24.695172Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:213:2217] 2024-11-21T17:53:24.695337Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T17:53:24.695345Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:213:2217] 2024-11-21T17:53:24.695352Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:24.695513Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2024-11-21T17:53:24.695520Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2024-11-21T17:53:24.695552Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:24.695588Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:24.695653Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.696614Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:24.696681Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:24.696686Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:220:2222], now have 1 active actors on pipe 2024-11-21T17:53:24.696922Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:24.696928Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:225:2226], now have 1 active actors on pipe 2024-11-21T17:53:24.697086Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 2 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2024-11-21T17:53:24.697093Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T17:53:24.697106Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2024-11-21T17:53:24.697110Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T17:53:24.697115Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2024-11-21T17:53:24.697137Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 135 MaxStep: 30135 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 2 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T17:53:24.697149Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.697852Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:24.697865Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2024-11-21T17:53:24.697869Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2024-11-21T17:53:24.698496Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 175 RawX2: 4294969486 } } Step: 100 2024-11-21T17:53:24.698509Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2024-11-21T17:53:24.698513Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2024-11-21T17:53:24.698518Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2024-11-21T17:53:24.698547Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 135 MaxStep: 30135 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 2 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T17:53:24.698560Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.699292Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:24.699305Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNING 2024-11-21T17:53:24.699310Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2024-11-21T17:53:24.699313Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2024-11-21T17:53:24.699319Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2024-11-21T17:53:24.699327Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2024-11-21T17:53:24.699342Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2024-11-21T17:53:24.699357Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Partition 0 Consumer 'user' Bad request (behind the last offset) EndOffset 0 End 2 2024-11-21T17:53:24.699390Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 0 2024-11-21T17:53:24.699395Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2024-11-21T17:53:24.699400Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2024-11-21T17:53:24.699403Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2024-11-21T17:53:24.699407Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T17:53:24.699411Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2024-11-21T17:53:24.699440Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 135 MaxStep: 30135 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 2 Consumer: "user" Path: "/topic" } Step: 100 Predicate: false Kind: KIND_DATA SourceActor { RawX1: 175 RawX2 ... 56 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 5 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 5 Important: false } 2024-11-21T17:53:26.005747Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:26.005942Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 5 actor [5:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 5 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 5 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:26.005984Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep 2024-11-21T17:53:26.006092Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:53:26.006161Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:183:2196] 2024-11-21T17:53:26.006389Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T17:53:26.006398Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [5:183:2196] 2024-11-21T17:53:26.006406Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:26.006504Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 5 2024-11-21T17:53:26.006514Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 5 done 2024-11-21T17:53:26.006550Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:26.006588Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:26.006694Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:26.012309Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:26.012450Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:26.012461Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:190:2201], now have 1 active actors on pipe 2024-11-21T17:53:26.012958Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:26.012972Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:195:2205], now have 1 active actors on pipe 2024-11-21T17:53:26.012988Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:53:26.012996Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2024-11-21T17:53:26.013093Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] partition {0, {0, 3}, 100000} for WriteId {0, 3} 2024-11-21T17:53:26.013128Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvSubscribeLock for WriteId {0, 3} 2024-11-21T17:53:26.013150Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:26.021420Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:26.021538Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitConfigStep 2024-11-21T17:53:26.021612Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitInternalFieldsStep 2024-11-21T17:53:26.021664Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] bootstrapping {0, {0, 3}, 100000} [5:202:2211] 2024-11-21T17:53:26.021843Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:26.022051Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitMetaStep 2024-11-21T17:53:26.022085Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitInfoRangeStep 2024-11-21T17:53:26.022150Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitDataRangeStep 2024-11-21T17:53:26.022190Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Step TInitDataStep 2024-11-21T17:53:26.022195Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition {0, {0, 3}, 100000}. Completed. 2024-11-21T17:53:26.022203Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {0, 3}, 100000} generation 2 [5:202:2211] 2024-11-21T17:53:26.022212Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] SYNC INIT topic topic partitition {0, {0, 3}, 100000} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:26.022324Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] Topic 'topic' partition {0, {0, 3}, 100000} user user reinit request with generation 5 2024-11-21T17:53:26.022331Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] Topic 'topic' partition {0, {0, 3}, 100000} user user reinit with generation 5 done 2024-11-21T17:53:26.022359Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] Topic 'topic' partition {0, {0, 3}, 100000} user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:53:26.022389Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:26.022458Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId {0, {0, 3}, 100000} 2024-11-21T17:53:26.022497Z node 5 :PERSQUEUE INFO: new Cookie -=[ 0wn3r ]=-|799908e0-e4b6cb0e-6f6f3f99-2830f367_0 generated for partition {0, {0, 3}, 100000} topic 'topic' owner -=[ 0wn3r ]=- Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:26.028644Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:26.028701Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {0, {0, 3}, 100000} 2024-11-21T17:53:26.028729Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 4 2024-11-21T17:53:26.028816Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:26.028823Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server disconnected, pipe [5:195:2205] destroyed 2024-11-21T17:53:26.028832Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:53:26.028856Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:26.028861Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:218:2221], now have 1 active actors on pipe 2024-11-21T17:53:26.028918Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 21474838668 } TxId: 2 Data { Operations { PartitionId: 0 Path: "/topic" SupportivePartition: 100000 } Immediate: false WriteId { NodeId: 0 KeyId: 3 } } 2024-11-21T17:53:26.028924Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PartitionId {0, {0, 3}, 100000} for WriteId {0, 3} 2024-11-21T17:53:26.028930Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2 has WriteId {0, 3} 2024-11-21T17:53:26.028935Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T17:53:26.028948Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, State UNKNOWN 2024-11-21T17:53:26.028952Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T17:53:26.028957Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, NewState PREPARING 2024-11-21T17:53:26.028983Z node 5 :PERSQUEUE DEBUG: [TxId: 2] save tx TxId: 2 State: PREPARED MinStep: 233 MaxStep: 30233 Operations { PartitionId: 0 Path: "/topic" SupportivePartition: 100000 } Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } WriteId { NodeId: 0 KeyId: 3 } Partitions { } 2024-11-21T17:53:26.029000Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:26.029840Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:26.029852Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, State PREPARING 2024-11-21T17:53:26.029856Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, NewState PREPARED Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:26.030632Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:26.030644Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:233:2235], now have 1 active actors on pipe 2024-11-21T17:53:26.030662Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:53:26.030668Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2024-11-21T17:53:26.030675Z node 5 :PERSQUEUE WARN: tablet 72057594037927937 topic 'topic error: it is forbidden to write after a commit 2024-11-21T17:53:26.030687Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 2, Error it is forbidden to write after a commit 2024-11-21T17:53:26.030692Z node 5 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 0 requestId: error: it is forbidden to write after a commit >> TPersQueueTest::CheckDeleteTopic [GOOD] >> TPersQueueTest::CheckDecompressionTasksWithoutSession >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] >> TUserAttrsTestWithReboots::AllowedSymbolsReboots [GOOD] |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> YdbYqlClient::RetryOperationAsync [GOOD] >> YdbYqlClient::RetryOperationSync ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] Test command err: 2024-11-21T17:53:21.966001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792177006084946:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:21.966038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00117a/r3tmp/tmpNv4JTa/pdisk_1.dat 2024-11-21T17:53:22.139517Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22336, node 1 2024-11-21T17:53:22.184094Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:22.184115Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:22.184117Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:22.184167Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:22.273020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:22.274032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:22.274043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:22.279656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:22.279745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:22.279751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:53:22.280706Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:22.280715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:22.281102Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:22.282031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211602331, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:22.282042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:22.282106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:22.282531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:22.282586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:22.282596Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:22.282607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:22.282636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:22.282648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T17:53:22.283274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:22.283288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:22.283292Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:22.283305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2024-11-21T17:53:22.288726Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:22.305888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:22.305917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:22.309081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8156 2024-11-21T17:53:22.325359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:22.325997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:53:22.326061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:22.326067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:22.332646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2024-11-21T17:53:22.332734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:22.332820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:22.332841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateOlapStore, at tablet72057594046644480 2024-11-21T17:53:22.333498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:22.333511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:22.333516Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:53:22.333567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:22.333569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:22.333570Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 waiting... 2024-11-21T17:53:22.349032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:22.349161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:22.349177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:22.349193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:22.349199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:53:22.354765Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:53:22.361104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:53:22.371818Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7439792181301053081:2285];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:53:22.375118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7439792181301053081:2285];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:53:22.375185Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037891 2024-11-21T17:53:22.375918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439792181301053081:2285];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:53:22.375957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439792181301053081:2285];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:53:22.376017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439792181301053081:2285];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:53:22.376035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439792181301053081:2285];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:53:22.376051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439792181301053081:2285];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:53:22.376077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439792181301053081:2285];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11 ... 0 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T17:53:26.751951Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:53:26.751963Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T17:53:26.751966Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T17:53:26.751968Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T17:53:26.751972Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T17:53:26.752127Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:26.752138Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:26.752141Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T17:53:26.752168Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:26.752177Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:26.752178Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T17:53:26.752191Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:26.752199Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:26.752201Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:53:26.752213Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:26.752221Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:26.752223Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T17:53:26.752247Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:26.752256Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:26.752257Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T17:53:26.752261Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T17:53:26.752335Z node 10 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T17:53:26.753254Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439792200300494086:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:53:26.808699Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:53:26.808747Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:53:26.816759Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:53:26.837840Z node 10 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T17:53:26.837902Z node 10 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976715661 at tablet 72075186224037888 2024-11-21T17:53:26.839814Z node 10 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T17:53:26.841338Z node 10 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715661 at step 1732211606888 at tablet 72075186224037888 { Transactions { TxId: 281474976715661 AckTo { RawX1: 7439792200300493559 RawX2: 42949675321 } } Step: 1732211606888 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T17:53:26.841352Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:53:26.841380Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:53:26.841386Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:53:26.841393Z node 10 :TX_DATASHARD DEBUG: Found ready operation [1732211606888:281474976715661] in PlanQueue unit at 72075186224037888 2024-11-21T17:53:26.841436Z node 10 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1732211606888:281474976715661 keys extracted: 0 2024-11-21T17:53:26.841521Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:53:26.842640Z node 10 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1732211606888 txid# 281474976715661} 2024-11-21T17:53:26.842650Z node 10 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1732211606888} 2024-11-21T17:53:26.842660Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:53:26.842676Z node 10 :TX_DATASHARD DEBUG: Complete [1732211606888 : 281474976715661] from 72075186224037888 at tablet 72075186224037888 send result to client [10:7439792200300494185:2781], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:53:26.842683Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T17:53:26.843573Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xq16t8yvm2fm46m0198h2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Yjk3Yzc3OGUtNDI1ODA1ODctZTliZWZiZmUtOGNjNGIwNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:53:26.848371Z node 10 :TX_DATASHARD INFO: Start scan, at: [10:7439792200300494214:2074], tablet: [10:7439792200300493961:2298], scanId: 3, table: /Root/LogsX, gen: 1, deadline: 2024-11-21T18:03:26.844578Z 2024-11-21T17:53:26.848542Z node 10 :TX_DATASHARD DEBUG: Got ScanDataAck, at: [10:7439792200300494214:2074], scanId: 3, table: /Root/LogsX, gen: 1, tablet: [10:7439792200300493961:2298], freeSpace: 8388608;limits:(bytes=0;chunks=0); 2024-11-21T17:53:26.848548Z node 10 :TX_DATASHARD DEBUG: Wakeup driver at: [10:7439792200300494214:2074] 2024-11-21T17:53:26.848688Z node 10 :TX_DATASHARD DEBUG: Range 0 of 1 exhausted: try next one. table: /Root/LogsX range: [(Utf8 : NULL, Timestamp : NULL) ; ()) next range: 2024-11-21T17:53:26.848691Z node 10 :TX_DATASHARD DEBUG: TableRanges is over, at: [10:7439792200300494214:2074], scanId: 3, table: /Root/LogsX 2024-11-21T17:53:26.848697Z node 10 :TX_DATASHARD DEBUG: Finish scan, at: [10:7439792200300494214:2074], scanId: 3, table: /Root/LogsX, reason: 0, abortEvent: 2024-11-21T17:53:26.848703Z node 10 :TX_DATASHARD DEBUG: Send ScanData, from: [10:7439792200300494214:2074], to: [10:7439792200300494208:2317], scanId: 3, table: /Root/LogsX, bytes: 11000, rows: 100, page faults: 0, finished: 1, pageFault: 0 2024-11-21T17:53:26.848896Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T17:53:26.848930Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T17:53:26.848937Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:53:26.848948Z node 10 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T17:53:26.848963Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T17:53:26.850967Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211606888, txId: 281474976715661] shutting down 2024-11-21T17:53:26.897303Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xq1a73zc18kgej3c8kcxd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NWIwYWE5YTgtZDVkN2JhODktYjNiNmQxYzktN2ZhZWViZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 100 rows Negative (wrong format): BAD_REQUEST Negative (wrong data): SCHEME_ERROR FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8016;columns=9; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8016;columns=9; Negative (less columns): BAD_REQUEST FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=8984;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=8984;columns=10; 2024-11-21T17:53:26.907762Z node 10 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:139;event=cannot_parse;message=Invalid: Ran out of field metadata, likely malformed;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2024-11-21T17:53:26.908912Z node 10 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:139;event=cannot_parse;message=Serialization error: batch is not valid: Invalid: Offsets buffer size (bytes): 400 isn't large enough for length: 100;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (reordered columns): BAD_REQUEST >> TPartitionTests::NonConflictingCommitsBatch [GOOD] >> TPQTest::TestSeveralOwners ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TUserAttrsTestWithReboots::AllowedSymbolsReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:52:52.800354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:52:52.800399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:52.800405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:52:52.800411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:52:52.808286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:52:52.808327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:52:52.808360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:52:52.808486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:52.988398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:52.988431Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:52:52.996758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:52.996910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:52:53.000947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:52:53.014340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:52:53.014458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:52:53.014578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:53.022121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:53.034081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:53.086399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:53.086445Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:53.094738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:52:53.094789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:53.094818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:52:53.094931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:52:53.098372Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:52:53.134369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:52:53.137217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.137283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:52:53.145943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:52:53.146001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.146959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:53.146988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:52:53.147057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.147067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:52:53.158109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:52:53.158145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:52:53.159044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.159072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:52:53.159079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:52:53.170845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.170870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.170876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:53.184155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:52:53.184843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:52:53.185867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:52:53.188525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:52:53.197761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:52:53.197817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:52:53.197829Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:53.197904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:52:53.197914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:52:53.197945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:52:53.197963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:52:53.198717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:52:53.198733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:52:53.198786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:52:53.198791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:52:53.198892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:52:53.198901Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:52:53.198913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:52:53.198917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:53.198922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:52:53.198927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:52:53.198932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:52:53.198936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:52:53.198952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:52:53.198959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:52:53.198963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... _SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2024-11-21T17:53:27.526232Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:53:27.526236Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2024-11-21T17:53:27.526239Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2024-11-21T17:53:27.526241Z node 126 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2024-11-21T17:53:27.526244Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2024-11-21T17:53:27.526251Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T17:53:27.526255Z node 126 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 2, subscribers: 0 2024-11-21T17:53:27.526260Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2024-11-21T17:53:27.526262Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 6], 3 2024-11-21T17:53:27.526330Z node 126 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:53:27.526335Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:53:27.526338Z node 126 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:53:27.526341Z node 126 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T17:53:27.526343Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:53:27.526411Z node 126 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:53:27.526416Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1006 2024-11-21T17:53:27.526418Z node 126 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:53:27.526420Z node 126 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T17:53:27.526422Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T17:53:27.526426Z node 126 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T17:53:27.527142Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:53:27.527159Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestModificationResults wait txId: 1007 2024-11-21T17:53:27.527738Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/Dir@" OperationType: ESchemeOpMkDir MkDir { Name: "Dir@" } } TxId: 1007 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:53:27.528051Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1007, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/Dir@/Dir@\', error: symbol \'@\' is not allowed in the path part \'Dir@\', source_location: ydb/core/tx/schemeshard/schemeshard__operation.cpp:935" TxId: 1007 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:53:27.528086Z node 126 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1007, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/Dir@/Dir@', error: symbol '@' is not allowed in the path part 'Dir@', source_location: ydb/core/tx/schemeshard/schemeshard__operation.cpp:935, operation: CREATE DIRECTORY, path: /MyRoot/Dir@/Dir@ TestModificationResult got TxId: 1007, wait until txId: 1007 TestWaitNotification wait txId: 1006 2024-11-21T17:53:27.528128Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T17:53:27.528134Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T17:53:27.528177Z node 126 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T17:53:27.528188Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T17:53:27.528191Z node 126 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [126:549:2508] TestWaitNotification: OK eventTxId 1006 2024-11-21T17:53:27.528254Z node 126 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:53:27.528276Z node 126 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 27us result status StatusSuccess 2024-11-21T17:53:27.528337Z node 126 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dir!" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "Dir0:" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "Dir@" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1005 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:53:27.528372Z node 126 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir@" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:53:27.528378Z node 126 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir@" took 8us result status StatusSuccess 2024-11-21T17:53:27.528398Z node 126 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir@" PathDescription { Self { Name: "Dir@" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1005 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "Dir!" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1006 CreateStep: 5000006 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:53:27.528427Z node 126 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir!" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:53:27.528432Z node 126 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir!" took 7us result status StatusSuccess 2024-11-21T17:53:27.528447Z node 126 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir!" PathDescription { Self { Name: "Dir!" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPartitionTests::CorrectRange_Multiple_Transactions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::NonConflictingCommitsBatch [GOOD] Test command err: 2024-11-21T17:53:24.423583Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.423607Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:24.427346Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:24.427523Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:24.688015Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.688038Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:24.695199Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:24.695384Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:24.922571Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.922593Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:24.925558Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:24.925880Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:177:2192] 2024-11-21T17:53:24.925947Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2024-11-21T17:53:24.925964Z node 3 :PERSQUEUE INFO: new Cookie owner1|e6024f3a-847649ee-ef150223-ac1c7fa6_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:25.212982Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2024-11-21T17:53:25.374605Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:25.374649Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:53:25.381742Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:25.382222Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient-0" Value: "\010\000\020\001\030\001\"\020session-client-0(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient-0" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-client-0" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DEFRAG_SCHEDULER Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Created Tx with id 3 as act# 3 Created Tx with id 4 as act# 4 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 6 Wait batch completion Wait kv request Wait tx committed for tx 3 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> TPartitionTests::CorrectRange_Rollback >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> TPartitionTests::CorrectRange_Rollback [GOOD] >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestReserveBytes >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] >> TPartitionTests::DataTxCalcPredicateOk >> TPartitionTests::ConflictingCommitFails >> TPQCompatTest::ReadWriteSessions [GOOD] >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut >> TPQTabletTests::UpdateConfig_2 >> TOlapReboots::CreateDropStandaloneTable >> TPQTabletTests::UpdateConfig_2 [GOOD] >> TPQTest::DirectReadBadSessionOrPipe >> TGroupMapperTest::NonUniformCluster [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] Test command err: 2024-11-21T17:53:23.100815Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:23.100842Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:23.132955Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:23.133342Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:23.483270Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:23.483295Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:23.486941Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:23.487186Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } 2024-11-21T17:53:23.727631Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:23.727661Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:53:23.974938Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:23.974967Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:53:23.978935Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:23.979262Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create immediate tx with id = 4 and act no: 5 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Wait batch completion Got batch complete: 2 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Wait batch completion Wait kv request Wait tx committed for tx 2 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Wait immediate tx complete 4 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 2024-11-21T17:53:25.634407Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:25.634450Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:53:25.638511Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:25.639073Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient-0" Value: "\010\000\020\001\030\001\"\020session-client-0(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient-0" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-client-0" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Created Tx with id 3 as act# 3 Created Tx with id 4 as act# 4 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 2 Wait batch completion Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Wait kv request Got batch complete: 1 Wait batch completion Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Got batch complete: 1 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Wait batch completion Wait batch completion Got batch complete: 1 Wait kv request Wait tx committed for tx 3 Wait tx committed for tx 4 Create distr tx with id = 6 and act no: 7 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 3 Wait batch completion Wait kv request Wait immediate tx complete 8 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 8 Wait immediate tx complete 9 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 9 >> TPersQueueTest::StoreNoMoreThanXSourceIDs [GOOD] >> TPersQueueTest::SetupWriteSessionOnDisabledCluster |85.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPQCompatTest::ReadWriteSessions [GOOD] Test command err: 2024-11-21T17:51:57.984288Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791818976205474:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:57.984302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:57.986344Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791817985169795:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:57.986378Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:58.013840Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:58.017953Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00200e/r3tmp/tmpsDRvFN/pdisk_1.dat 2024-11-21T17:51:58.051875Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25112, node 1 2024-11-21T17:51:58.061768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/00200e/r3tmp/yandexpF76xN.tmp 2024-11-21T17:51:58.061790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/00200e/r3tmp/yandexpF76xN.tmp 2024-11-21T17:51:58.061868Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/00200e/r3tmp/yandexpF76xN.tmp 2024-11-21T17:51:58.061909Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:58.065687Z INFO: TTestServer started on Port 12221 GrpcPort 25112 TClient is connected to server localhost:12221 PQClient connected to localhost:25112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:51:58.084799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:58.084829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:58.086328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:58.123021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:58.123049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:58.124335Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:58.124527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.124673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:51:58.139626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:51:58.274751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791822280137443:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:58.274771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791822280137452:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:58.274775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:58.275693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T17:51:58.279361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791822280137458:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T17:51:58.308953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.309221Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791823271173821:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:58.309351Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2JkYzBlZmYtOTY0OTUxOTctYjg2YWUwODgtNmQ5NDVkN2E=, ActorId: [1:7439791823271173780:2300], ActorState: ExecuteState, TraceId: 01jd7xmataee7p2k0sjv4gjxv5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:58.309895Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:58.367050Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791822280137499:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:58.367148Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWM5YTQxYjItM2RkZDdlZGEtMTU0MTJhY2QtOGVhZDIzMDc=, ActorId: [2:7439791822280137427:2280], ActorState: ExecuteState, TraceId: 01jd7xmat2dga1wn9e8z7ggxr3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:58.367324Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:58.367411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.430806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:51:58.460009Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jd7xmaza9ev2c06ed5t0hzcb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U2OWU4N2YtZTNiOWExZDQtNTQ2YmI1ZGMtYmNjNGJjMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439791823271174241:3028] 2024-11-21T17:52:02.984506Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791818976205474:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:02.984571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:52:02.986541Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439791817985169795:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:02.986586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: /Root/PQ/rt3.dc1--demo Create topic: /Root/PQ/rt3.dc1--demo AddTopic: /Root/PQ/rt3.dc1--demo ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = /Root/PQ/rt3.dc1/demo, dc = unknown 2024-11-21T17:52:04.472573Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2024-11-21T17:52:04.478071Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439791849040978716:3430] connected; active server actors: 1 2024-11-21T17:52:04.478214Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--demo] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T17:52:04.478360Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--demo] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T17:52:04.479034Z node 1 :PERSQUEUE_R ... 27 :PQ_READ_PROXY INFO: session cookie 5 consumer shared/user session shared/user_27_5_549066650044749532_v1 grpc read failed 2024-11-21T17:53:28.886256Z node 27 :PQ_READ_PROXY INFO: session cookie 5 consumer shared/user session shared/user_27_5_549066650044749532_v1 grpc closed 2024-11-21T17:53:28.886267Z node 27 :PQ_READ_PROXY INFO: session cookie 5 consumer shared/user session shared/user_27_5_549066650044749532_v1 is DEAD 2024-11-21T17:53:28.886318Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:28.886331Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_27_5_549066650044749532_v1 2024-11-21T17:53:28.886335Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [27:7439792210094228160:2576] destroyed 2024-11-21T17:53:28.886343Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_5_549066650044749532_v1 2024-11-21T17:53:28.886370Z node 27 :PQ_READ_PROXY DEBUG: new grpc connection 2024-11-21T17:53:28.886377Z node 27 :PQ_READ_PROXY DEBUG: new session created cookie 6 2024-11-21T17:53:28.886445Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2024-11-21T17:53:28.886479Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 read init: from# ipv6:[::1]:42878, request# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2024-11-21T17:53:28.886500Z node 28 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic2] pipe [27:7439792210094228158:2573] disconnected; active server actors: 1 2024-11-21T17:53:28.886523Z node 28 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic2] pipe [27:7439792210094228158:2573] client user disconnected session shared/user_27_5_549066650044749532_v1 2024-11-21T17:53:28.886538Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 auth for : user 2024-11-21T17:53:28.886549Z node 27 :PQ_METACACHE DEBUG: Handle describe topics 2024-11-21T17:53:28.886550Z node 27 :PQ_METACACHE DEBUG: SendSchemeCacheRequest 2024-11-21T17:53:28.886557Z node 27 :PQ_METACACHE DEBUG: send request for 1 topics, got 1 requests infly 2024-11-21T17:53:28.886659Z node 27 :PQ_METACACHE DEBUG: Handle SchemeCache response: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 12 ResultSet [{ Path: Root/LbCommunal/account/topic2-mirrored-from-dc2 TableId: [72057594046644480:18:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T17:53:28.886678Z node 27 :PQ_METACACHE DEBUG: Got describe topics SC response 2024-11-21T17:53:28.886690Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 Handle describe topics response 2024-11-21T17:53:28.886710Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 auth is DEAD 2024-11-21T17:53:28.886715Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 auth ok: topics# 1, initDone# 0 2024-11-21T17:53:28.886912Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 register session: topic# rt3.dc2--account--topic2 ===Got response: status: SUCCESS init_response { session_id: "shared/user_27_6_4559074410670160848_v1" } 2024-11-21T17:53:28.887007Z node 28 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7439792210094228169:2577] connected; active server actors: 1 2024-11-21T17:53:28.887092Z node 28 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] consumer "user" register session for pipe [27:7439792210094228169:2577] session shared/user_27_6_4559074410670160848_v1 2024-11-21T17:53:28.887100Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user register readable partition 0 2024-11-21T17:53:28.887106Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user family created family=1 (Status=Free, Partitions=[0]) 2024-11-21T17:53:28.887112Z node 28 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user register reading session ReadingSession "shared/user_27_6_4559074410670160848_v1" (Sender=[27:7439792210094228166:2577], Pipe=[27:7439792210094228169:2577], Partitions=[], ActiveFamilyCount=0) 2024-11-21T17:53:28.887120Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user rebalancing was scheduled 2024-11-21T17:53:28.887127Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2024-11-21T17:53:28.887137Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_27_6_4559074410670160848_v1" (Sender=[27:7439792210094228166:2577], Pipe=[27:7439792210094228169:2577], Partitions=[], ActiveFamilyCount=0) 2024-11-21T17:53:28.887148Z node 28 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user family 1 status Active partitions [0] session "shared/user_27_6_4559074410670160848_v1" sender [27:7439792210094228166:2577] lock partition 0 for ReadingSession "shared/user_27_6_4559074410670160848_v1" (Sender=[27:7439792210094228166:2577], Pipe=[27:7439792210094228169:2577], Partitions=[], ActiveFamilyCount=1) generation 1 step 3 2024-11-21T17:53:28.887163Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2024-11-21T17:53:28.887170Z node 28 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing duration: 0.000036s 2024-11-21T17:53:28.887341Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 assign: record# { Partition: 0 TabletId: 72075186224037898 Topic: "topic2-mirrored-from-dc2" Generation: 1 Step: 3 Session: "shared/user_27_6_4559074410670160848_v1" ClientId: "user" PipeClient { RawX1: 7439792210094228169 RawX2: 4503715591490065 } Path: "/Root/LbCommunal/account/topic2-mirrored-from-dc2" } 2024-11-21T17:53:28.887356Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 INITING TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) 2024-11-21T17:53:28.887412Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037898 Generation: 1 2024-11-21T17:53:28.887421Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:28.887424Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server connected, pipe [27:7439792210094228171:2580], now have 1 active actors on pipe 2024-11-21T17:53:28.887429Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic2-mirrored-from-dc2' requestId: 2024-11-21T17:53:28.887432Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898] got client message batch for topic 'rt3.dc2--account--topic2' partition 0 2024-11-21T17:53:28.887436Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Created session shared/user_27_6_4559074410670160848_v1 on pipe: [27:7439792210094228171:2580] 2024-11-21T17:53:28.887441Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/user_27_6_4559074410670160848_v1:1 with generation 1 2024-11-21T17:53:28.887457Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] Topic 'rt3.dc2--account--topic2' partition 0 user user session is set to 0 (startOffset 0) session shared/user_27_6_4559074410670160848_v1 2024-11-21T17:53:28.887484Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:53:28.888034Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'topic2-mirrored-from-dc2' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2024-11-21T17:53:28.888059Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:28.888064Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 0 WriteTimestampMS: 1732211608878 CreateTimestampMS: 1732211608878 SizeLag: 0 WriteTimestampEstimateMS: 0 } Cookie: 18446744073709551615 } 2024-11-21T17:53:28.888068Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 INIT DONE TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2024-11-21T17:53:28.888077Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 sending to client partition status ===Got response: status: SUCCESS start_partition_session_request { partition_session { partition_session_id: 1 path: "account/topic2-mirrored-from-dc2" } partition_offsets { } } 2024-11-21T17:53:28.888421Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 grpc read done: success# 0, data# { } 2024-11-21T17:53:28.888428Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 grpc read failed 2024-11-21T17:53:28.888431Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 grpc closed 2024-11-21T17:53:28.888441Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_4559074410670160848_v1 is DEAD 2024-11-21T17:53:28.888487Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:28.888497Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Destroy direct read session shared/user_27_6_4559074410670160848_v1 2024-11-21T17:53:28.888501Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [27:7439792210094228171:2580] destroyed 2024-11-21T17:53:28.888504Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_6_4559074410670160848_v1 2024-11-21T17:53:28.888582Z node 28 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7439792210094228169:2577] disconnected; active server actors: 1 2024-11-21T17:53:28.888591Z node 28 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7439792210094228169:2577] client user disconnected session shared/user_27_6_4559074410670160848_v1 >> TPQTest::DirectReadBadSessionOrPipe [GOOD] >> TPQTest::DirectReadOldPipe >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault [GOOD] >> TPersQueueTest::ReadWithoutConsumerFederation >> TPersQueueTest::PartitionsMapping [GOOD] >> TPersQueueTest::MessageMetadata >> TPQRBDescribes::PartitionLocations [GOOD] >> TPQTabletTests::All_New_Partitions_In_Another_Tablet >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] >> TPQTest::DirectReadOldPipe [GOOD] >> TPQTest::TestAccountReadQuota >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts >> TPQTestInternal::TestPartitionedBigTest |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |85.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2024-11-21T17:53:24.088500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792191747155235:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:24.098773Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792189789528025:2254];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000dbd/r3tmp/tmpkQkPf5/pdisk_1.dat 2024-11-21T17:53:24.117674Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:53:24.118786Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:53:24.123135Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:53:24.124286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:53:24.149074Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63736, node 1 2024-11-21T17:53:24.172857Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000dbd/r3tmp/yandexK1211K.tmp 2024-11-21T17:53:24.172879Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000dbd/r3tmp/yandexK1211K.tmp 2024-11-21T17:53:24.172944Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000dbd/r3tmp/yandexK1211K.tmp 2024-11-21T17:53:24.172999Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:53:24.176901Z INFO: TTestServer started on Port 63721 GrpcPort 63736 TClient is connected to server localhost:63721 PQClient connected to localhost:63736 WaitRootIsUp 'Root'... TClient::Ls request: 2024-11-21T17:53:24.187097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:24.187124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Root TClient::Ls response: 2024-11-21T17:53:24.194979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:24.224161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:53:24.228808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:24.228838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:24.234293Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 waiting... 2024-11-21T17:53:24.235644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:24.240051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:53:24.408656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792191747156084:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:24.408675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792191747156088:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:24.408679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:24.409337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:53:24.412792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792191747156098:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:53:24.431966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:53:24.441839Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439792189789528201:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:53:24.441976Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjQ5NzdhNi00ZDdkMjgxNi1iNmY1YjVjYS00NWRhYTFhNg==, ActorId: [2:7439792189789528162:2280], ActorState: ExecuteState, TraceId: 01jd7xpyynaq5x8jyfn9bdjz3c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:53:24.442511Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:53:24.491348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:53:24.509733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:53:24.513342Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439792191747156475:2334], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:53:24.513733Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWIzNDIwYjYtMmI1M2JkOTktOTcyZDI1NTktODE3MGYyMzU=, ActorId: [1:7439792191747156081:2300], ActorState: ExecuteState, TraceId: 01jd7xpyxpccawd47j4y4szj1a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:53:24.513866Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:53:24.536907Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xpz1914tbnzksz13xx89j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGNjNjMzMWQtYzFkZTY2MGUtOTljODcwN2UtMzRhYWY1ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439792191747156625:3063] 2024-11-21T17:53:29.087970Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439792191747155235:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:29.088015Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:53:29.095345Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439792189789528025:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:29.095390Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic with 5 partitions CallPersQueueGRPC request to localhost:63736 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2024-11-21T17:53:29.710287Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:63736 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } ... count--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:53:31.265895Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-3 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:31.265929Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:31.265988Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:31.266563Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:31.266754Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:31.266760Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [3:338:2315], now have 1 active actors on pipe 2024-11-21T17:53:31.266787Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2024-11-21T17:53:31.266792Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2024-11-21T17:53:31.266796Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2024-11-21T17:53:31.266801Z node 3 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:31.399095Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 100, TxId 67890 2024-11-21T17:53:31.399166Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvProposePartitionConfigResult Step 100, TxId 67890, Partition 1 2024-11-21T17:53:31.399175Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvProposePartitionConfigResult 2024-11-21T17:53:31.399182Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 2/2 2024-11-21T17:53:31.399188Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2024-11-21T17:53:31.399192Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 2, Expected 2 2024-11-21T17:53:31.399198Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2024-11-21T17:53:31.399321Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 135 MaxStep: 18446744073709551615 PredicatesReceived { TabletId: 22222 Predicate: true } Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 175 RawX2: 12884904078 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2024-11-21T17:53:31.399357Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:31.400682Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:31.400699Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2024-11-21T17:53:31.400703Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2024-11-21T17:53:31.400718Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2024-11-21T17:53:31.400722Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2024-11-21T17:53:31.400741Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2024-11-21T17:53:31.400745Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 2 2024-11-21T17:53:31.400768Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2024-11-21T17:53:31.400798Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2024-11-21T17:53:31.400802Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2024-11-21T17:53:31.400806Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user drop done 2024-11-21T17:53:31.400843Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2024-11-21T17:53:31.400892Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:53:31.400966Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:31.401982Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:31.402027Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2024-11-21T17:53:31.402034Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2024-11-21T17:53:31.402037Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 2 2024-11-21T17:53:31.402432Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:31.402457Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 1 2024-11-21T17:53:31.402462Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2024-11-21T17:53:31.402465Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 2, Expected 2 2024-11-21T17:53:31.402473Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2024-11-21T17:53:31.402541Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } 2024-11-21T17:53:31.402550Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:31.402558Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2024-11-21T17:53:31.402562Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2024-11-21T17:53:31.402624Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 135 MaxStep: 18446744073709551615 PredicatesReceived { TabletId: 22222 Predicate: true } Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 175 RawX2: 12884904078 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2024-11-21T17:53:31.402655Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:31.403453Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:31.403466Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2024-11-21T17:53:31.403473Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2024-11-21T17:53:31.403480Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2024-11-21T17:53:31.403485Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2024-11-21T17:53:31.403489Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/0 2024-11-21T17:53:31.403493Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2024-11-21T17:53:31.403496Z node 3 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/0 2024-11-21T17:53:31.403499Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2024-11-21T17:53:31.403504Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState DELETING 2024-11-21T17:53:31.403508Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete key for TxId 67890 2024-11-21T17:53:31.403516Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:31.405020Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:31.405032Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State DELETING 2024-11-21T17:53:31.405036Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T17:53:31.405039Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete TxId 67890 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWritePQCompact >> YdbOlapStore::DuplicateRows [GOOD] >> YdbOlapStore::LogCountByResource |85.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction >> TPartitionTests::DataTxCalcPredicateOk [GOOD] >> TPartitionTests::DataTxCalcPredicateError >> YdbS3Internal::TestS3Listing >> TPQTest::TestPartitionTotalQuota >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> TGroupMapperTest::Block42_1disk [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [GOOD] >> TPartitionGraphTest::BuildGraph [GOOD] >> TPartitionTests::Batching |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk [GOOD] >> TPartitionTests::Batching [GOOD] |85.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |85.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 >> TPartitionTests::AfterRestart_1 >> TFetchRequestTests::HappyWay [GOOD] >> TFetchRequestTests::BadTopicName >> YdbS3Internal::TestS3Listing [GOOD] >> YdbS3Internal::TestAccessCheck >> TPartitionTests::AfterRestart_1 [GOOD] >> TPartitionTests::DataTxCalcPredicateError [GOOD] >> YdbYqlClient::RetryOperationSync [GOOD] >> YdbYqlClient::RetryOperationLimitedDuration >> TPartitionTests::AfterRestart_2 [GOOD] >> TPartitionTests::DataTxCalcPredicateOrder >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts >> TPQTest::TestPQPartialRead [GOOD] >> TPQTest::TestPQRead >> TPQTest::TestAccountReadQuota [GOOD] >> TPQTest::TestAlreadyWritten ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::AfterRestart_2 [GOOD] Test command err: 2024-11-21T17:53:26.108376Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792198680666324:2197];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:26.117483Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792198465841582:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:26.161288Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000d61/r3tmp/tmpG9Zrnz/pdisk_1.dat 2024-11-21T17:53:26.162116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:53:26.160648Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:53:26.161940Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:53:26.306872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:26.306901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:26.307198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:26.307211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:26.308159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:26.308991Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:53:26.312209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27558, node 1 2024-11-21T17:53:26.402922Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:53:26.436213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000d61/r3tmp/yandexxWR2KU.tmp 2024-11-21T17:53:26.436242Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000d61/r3tmp/yandexxWR2KU.tmp 2024-11-21T17:53:26.436309Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000d61/r3tmp/yandexxWR2KU.tmp 2024-11-21T17:53:26.436375Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:53:26.443057Z INFO: TTestServer started on Port 1306 GrpcPort 27558 TClient is connected to server localhost:1306 PQClient connected to localhost:27558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:26.495214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:53:26.546785Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:53:26.549670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:53:26.632556Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:53:26.785564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792198680667165:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:26.785617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:26.787403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792198680667200:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:26.788360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:53:26.798894Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2024-11-21T17:53:26.799732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792198680667202:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:53:26.844495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:53:26.869285Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439792198680667388:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:53:26.869704Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQzZDk5ODktNDdmZTU3NjctNDA3MjgzMGItZGU2NGZmY2M=, ActorId: [1:7439792198680667161:2299], ActorState: ExecuteState, TraceId: 01jd7xq17y9ajje5ze0n7ksc5j, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:53:26.870246Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:53:26.934988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:53:26.943811Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439792198465841811:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:53:26.944332Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTllYTdhYWItNjc0MzdmMDctNjIxNzM3OTctZjM5YWYzZDg=, ActorId: [2:7439792198465841760:2279], ActorState: ExecuteState, TraceId: 01jd7xq1chav3eq1zysh366bkt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:53:26.944497Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:53:27.032783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:53:27.141052Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd7xq1jfa8sy82en1f8gjxbc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjY0ZTI4NzItOWE2MWRjYmItOTkyNjgyMzAtNTA2NWYyMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439792202975635010:3025] 2024-11-21T17:53:31.092790Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439792198680666324:2197];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:31.092833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:53:31.117977Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439792198465841582:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:31.118020Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:53:33.086040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:1, at schemeshard: 72057594046644480 2024-11-21T17:53:33.157160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T17:53:33.216937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTab ... tax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:53:33.471089Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439792228745440088:3789] (SourceId=A_Source_5, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2024-11-21T17:53:33.471123Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [1:7439792228745440089:3789], Recipient [1:7439792228745439404:3364]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T17:53:33.473595Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [1:7439792228745440088:3789], Recipient [1:7439792228745439404:3364]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_5" 2024-11-21T17:53:33.483167Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [1:7439792228745439404:3364], Recipient [1:7439792228745440088:3789]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2024-11-21T17:53:33.483193Z node 1 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [1:7439792228745440088:3789] (SourceId=A_Source_5, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_5 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2024-11-21T17:53:33.483249Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [1:7439792228745440088:3789], Recipient [1:7439792228745439404:3364]: NActors::TEvents::TEvPoison 2024-11-21T17:53:33.483280Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [1:7439792198680666168:2049], Recipient [1:7439792228745440088:3789]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2024-11-21T17:53:33.483294Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439792228745440088:3789] (SourceId=A_Source_5, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:53:33.483836Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [1:7439792198680666248:2126], Recipient [1:7439792228745440088:3789]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=1&id=YmViYzBlOWMtMTVhMTUwNTEtNTFkOTY5ZDEtMzNiN2IzZmM=" NodeId: 1 } YdbStatus: SUCCESS ResourceExhausted: false 2024-11-21T17:53:33.483854Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439792228745440088:3789] (SourceId=A_Source_5, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:53:33.499975Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [1:7439792198680666248:2126], Recipient [1:7439792228745440088:3789]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=1&id=YmViYzBlOWMtMTVhMTUwNTEtNTFkOTY5ZDEtMzNiN2IzZmM=" PreparedQuery: "c2414c3e-179cd3c4-c14811c1-58e6168f" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jd7xq7ssccmn1evfvy5zd9ke" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1732211613456 } items { uint64_value: 1732211613456 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 9 2024-11-21T17:53:33.500031Z node 1 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [1:7439792228745440088:3789] (SourceId=A_Source_5, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2024-11-21T17:53:33.500040Z node 1 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [1:7439792228745440088:3789] (SourceId=A_Source_5, PreferedPartition=(NULL)) OnPartitionChosen 2024-11-21T17:53:33.500076Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [1:7439792228745440113:3789], Recipient [1:7439792228745439404:3364]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T17:53:33.500091Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [1:7439792228745440088:3789], Recipient [1:7439792228745439404:3364]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2024-11-21T17:53:33.500110Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [1:7439792228745439404:3364], Recipient [1:7439792228745440088:3789]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2024-11-21T17:53:33.500119Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439792228745440088:3789] (SourceId=A_Source_5, PreferedPartition=(NULL)) Update the table 2024-11-21T17:53:33.500157Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [1:7439792228745440088:3789], Recipient [1:7439792228745439404:3364]: NActors::TEvents::TEvPoison Received TEvChooseResult: 1 2024-11-21T17:53:33.512368Z node 1 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [1:7439792198680666248:2126], Recipient [1:7439792228745440088:3789]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=1&id=YmViYzBlOWMtMTVhMTUwNTEtNTFkOTY5ZDEtMzNiN2IzZmM=" PreparedQuery: "be89d9f0-7ebb8061-168ed8d3-8fa5ca1f" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 6 2024-11-21T17:53:33.512382Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439792228745440088:3789] (SourceId=A_Source_5, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:53:33.512391Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439792228745440088:3789] (SourceId=A_Source_5, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 2024-11-21T17:53:33.512398Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7439792228745440088:3789] (SourceId=A_Source_5, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 11131928866524144434 AND Topic = "Root" AND ProducerId = "00415F536F757263655F35" 2024-11-21T17:53:33.527237Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710705. Ctx: { TraceId: 01jd7xq7tacfnp22hbpqsgfgrg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWEyZDQzNDQtYTY0MmM0ODUtNGJkYWQzYmUtNjhlZWU1NGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:53:33.965223Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:33.965243Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:33.967774Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:177:2192] 2024-11-21T17:53:33.967896Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:34.200377Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:34.200397Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:34.203190Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [4:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:34.203417Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [4:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\316\255\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\004\020\000\030\000\"\007session(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session" StorageChannel: INLINE } 2024-11-21T17:53:34.449079Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:34.449101Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:34.451791Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [5:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:34.452065Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [5:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test >> TPartitionTests::ConflictingCommitFails [GOOD] >> YdbS3Internal::TestAccessCheck [GOOD] >> YdbS3Internal::BadRequests >> TPartitionTests::ConflictingCommitProccesAfterRollback >> TPersQueueTest::CheckDecompressionTasksWithoutSession [GOOD] >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse >> YdbOlapStore::LogCountByResource [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> TTableProfileTests::UseDefaultProfile >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2024-11-21T17:53:25.132332Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:25.132372Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:25.137370Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:25.137579Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:25.440452Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:25.440490Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:25.514453Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send change config 2024-11-21T17:53:25.515039Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait cmd write (initial) Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } Wait commit 1 done Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-1@\000H\000\252\002\016\n\010client-3@\007H\000" StorageChannel: INLINE } Wait config changed Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:25.869262Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:25.869291Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:53:25.873101Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:25.873383Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:176:2191] 2024-11-21T17:53:25.873436Z node 3 :PERSQUEUE INFO: new Cookie src1|95edb797-182c988a-775cab14-89fab14c_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 2024-11-21T17:53:25.873506Z node 3 :PERSQUEUE INFO: new Cookie src4|dd9f113d-a9e8ab00-3367321c-dba210f8_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create distr tx with id = 4 and act no: 5 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 11 and act no: 12 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NOD ... NC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 2 Wait batch completion Got batch complete: 1 Wait kv request Wait tx committed for tx 2 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Wait for no tx committed Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 4 and act no: 5 Created Tx with id 7 as act# 7 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 2 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Wait batch completion Wait kv request Got batch complete: 1 Wait batch completion Wait kv request Create distr tx with id = 8 and act no: 9 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 3 Wait kv request Wait immediate tx complete 10 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 10 Wait immediate tx complete 11 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 11 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2024-11-21T17:53:35.539151Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:35.539172Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:53:35.542004Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:35.542289Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Created Tx with id 0 as act# 0 Created Tx with id 1 as act# 1 Got batch complete: 1 Wait batch completion Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Wait batch completion Wait kv request Wait tx committed for tx 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Wait for no tx committed Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase >> YdbS3Internal::BadRequests [GOOD] >> YdbScripting::BasicV0 >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSourceIdDropByUserWrites >> TGRpcClientLowTest::SimpleRequest >> TGroupMapperTest::Block42_2disk [GOOD] >> TTableProfileTests::UseDefaultProfile [GOOD] >> TTableProfileTests::UseTableProfilePreset >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] >> YdbScripting::BasicV0 [GOOD] >> YdbScripting::BasicV1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] Test command err: 2024-11-21T17:53:28.658097Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:28.658119Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:28.661731Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:28.662002Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:28.903899Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:28.903919Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:28.906398Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:28.906601Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:29.195178Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:29.195205Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:53:29.199696Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:29.200002Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait first predicate result Got batch complete: 1 Create distr tx with id = 2 and act no: 3 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait second predicate result Got batch complete: 1 Send disk status response with cookie: 0 2024-11-21T17:53:31.696514Z node 3 :PERSQUEUE INFO: new Cookie owner1|ab56f5de-f8b0b6c4-8b8df8cb-bffe2a40_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Got batch complete: 1 Got batch complete: 1 Send disk status response with cookie: 0 Wait third predicate result Create distr tx with id = 4 and act no: 5 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSyste ... 594037927937] doesn't have tx writes info 2024-11-21T17:53:33.067121Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:33.067388Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:33.067470Z node 4 :PERSQUEUE INFO: new Cookie SourceId|e3c1fcc0-a91ef92c-6121d2db-27c6e2ec_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner SourceId Got batch complete: 1 Wait write response Wait kv request Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Wait second predicate result Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 2024-11-21T17:53:34.511473Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:34.511498Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:53:34.514201Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:34.514483Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:174:2189] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Create distr tx with id = 2 and act no: 3 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Send disk status response with cookie: 0 Wait tx committed for tx 0 Wait tx committed for tx 2 >> test.py::test[table_range-range_tables_with_view--Plan] |85.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogCountByResource [GOOD] Test command err: 2024-11-21T17:52:44.656526Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792021264946746:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:44.656540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011c6/r3tmp/tmpqQXMpP/pdisk_1.dat 2024-11-21T17:52:44.737226Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2143, node 1 2024-11-21T17:52:44.752625Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:44.752639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:44.752641Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:44.752676Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:44.756869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:44.756891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:44.758724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:44.816176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:44.816906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:44.816922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:44.817664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:44.817715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:52:44.817724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:52:44.819952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:52:44.820294Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:52:44.820304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:52:44.820728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:44.821892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211564867, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:52:44.821906Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:52:44.821982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:52:44.822416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:52:44.822472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:52:44.822487Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:52:44.822502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:52:44.822514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:52:44.822532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:52:44.822942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:52:44.822960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:52:44.822970Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:52:44.822991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-1" reason: "YELLOW-9a33-e9e2-2" reason: "YELLOW-9a33-e9e2-3" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 1 host: "::1" port: 12001 } 2024-11-21T17:52:45.530013Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439792021700280065:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:45.530216Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:52:45.531985Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792022120648460:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:45.532296Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:52:45.532743Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439792021751987587:2222];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0011c6/r3tmp/tmpO1FMB5/pdisk_1.dat 2024-11-21T17:52:45.537041Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:52:45.541891Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62511, node 4 2024-11-21T17:52:45.560317Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:45.560329Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:45.560332Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:45.560360Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:45.630407Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:45.630447Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:45.632132Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:45.632602Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976725657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:45.632716Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976725657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:52:45.632730Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976725657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:45.633271Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976725657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:52:45.633323Z node ... -11-21T17:53:35.214471Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038372:3552], TxId: 281474976715773, task: 9. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Send stats to executor actor [47:7439792239078038356:3538] TaskId: 9 Stats: CpuTimeUs: 50 Tasks { TaskId: 9 CpuTimeUs: 34 ComputeCpuTimeUs: 1 BuildCpuTimeUs: 33 HostName: "ghrun-2rqap2spfm" NodeId: 47 StartTimeMs: 1732211615210 } MaxMemoryUsage: 1048576 2024-11-21T17:53:35.214480Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038372:3552], TxId: 281474976715773, task: 9. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Received channels info: Update { Id: 9 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 9 DstTaskId: 65 SrcEndpoint { ActorId { RawX1: 7439792239078038372 RawX2: 4503801490836960 } } DstEndpoint { ActorId { RawX1: 7439792239078038445 RawX2: 4503801490837016 } } InMemory: true DstStageId: 1 } 2024-11-21T17:53:35.214482Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038372:3552], TxId: 281474976715773, task: 9. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Update output channelId: 9, peer: [47:7439792239078038445:3608] 2024-11-21T17:53:35.214485Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038372:3552], TxId: 281474976715773, task: 9. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. POLL_SOURCES:START:0;fs=8388608 2024-11-21T17:53:35.214486Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038372:3552], TxId: 281474976715773, task: 9. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. POLL_SOURCES:FINISH 2024-11-21T17:53:35.214493Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038441:3604], TxId: 281474976715773, task: 61. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Start compute actor [47:7439792239078038441:3604], task: 61 2024-11-21T17:53:35.214496Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038441:3604], TxId: 281474976715773, task: 61. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Set periodic stats 0.100000s 2024-11-21T17:53:35.214498Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038441:3604], TxId: 281474976715773, task: 61. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. EVLOGKQP START 2024-11-21T17:53:35.214565Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038441:3604], TxId: 281474976715773, task: 61. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Received channels info: Update { Id: 61 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 61 DstTaskId: 65 SrcEndpoint { ActorId { RawX1: 7439792239078038441 RawX2: 4503801490837012 } } DstEndpoint { ActorId { RawX1: 7439792239078038445 RawX2: 4503801490837016 } } InMemory: true DstStageId: 1 } 2024-11-21T17:53:35.214567Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038441:3604], TxId: 281474976715773, task: 61. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Update output channelId: 61, peer: [47:7439792239078038445:3608] 2024-11-21T17:53:35.214569Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038441:3604], TxId: 281474976715773, task: 61. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. POLL_SOURCES:START:0;fs=8388608 2024-11-21T17:53:35.214571Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038441:3604], TxId: 281474976715773, task: 61. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. POLL_SOURCES:FINISH 2024-11-21T17:53:35.214582Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038441:3604], TxId: 281474976715773, task: 61. Ctx: { SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Send stats to executor actor [47:7439792239078038356:3538] TaskId: 61 Stats: CpuTimeUs: 66 Tasks { TaskId: 61 CpuTimeUs: 47 BuildCpuTimeUs: 47 HostName: "ghrun-2rqap2spfm" NodeId: 47 StartTimeMs: 1732211615214 } MaxMemoryUsage: 1048576 2024-11-21T17:53:35.214589Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038442:3605], TxId: 281474976715773, task: 62. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Start compute actor [47:7439792239078038442:3605], task: 62 2024-11-21T17:53:35.214592Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038442:3605], TxId: 281474976715773, task: 62. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Set periodic stats 0.100000s 2024-11-21T17:53:35.214594Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038442:3605], TxId: 281474976715773, task: 62. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. EVLOGKQP START 2024-11-21T17:53:35.214659Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038442:3605], TxId: 281474976715773, task: 62. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Received channels info: Update { Id: 62 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 62 DstTaskId: 65 SrcEndpoint { ActorId { RawX1: 7439792239078038442 RawX2: 4503801490837013 } } DstEndpoint { ActorId { RawX1: 7439792239078038445 RawX2: 4503801490837016 } } InMemory: true DstStageId: 1 } 2024-11-21T17:53:35.214661Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038442:3605], TxId: 281474976715773, task: 62. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Update output channelId: 62, peer: [47:7439792239078038445:3608] 2024-11-21T17:53:35.214663Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038442:3605], TxId: 281474976715773, task: 62. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. POLL_SOURCES:START:0;fs=8388608 2024-11-21T17:53:35.214664Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038442:3605], TxId: 281474976715773, task: 62. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. POLL_SOURCES:FINISH 2024-11-21T17:53:35.214675Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038442:3605], TxId: 281474976715773, task: 62. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xq9e650vjbt81xhgh8qaw. SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [47:7439792239078038356:3538] TaskId: 62 Stats: CpuTimeUs: 63 Tasks { TaskId: 62 CpuTimeUs: 35 BuildCpuTimeUs: 35 HostName: "ghrun-2rqap2spfm" NodeId: 47 StartTimeMs: 1732211615214 } MaxMemoryUsage: 1048576 2024-11-21T17:53:35.214683Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038443:3606], TxId: 281474976715773, task: 63. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Start compute actor [47:7439792239078038443:3606], task: 63 2024-11-21T17:53:35.214686Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038443:3606], TxId: 281474976715773, task: 63. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Set periodic stats 0.100000s 2024-11-21T17:53:35.214687Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038443:3606], TxId: 281474976715773, task: 63. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. EVLOGKQP START 2024-11-21T17:53:35.214736Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439792239078038443:3606], TxId: 281474976715773, task: 63. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=47&id=Mzg1ZTk2OTUtZWFkNDRmZjAtZTY4ODZjOS0xZjlhYWVhZA==. TraceId : 01jd7xq9e650vjbt81xhgh8qaw. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Received channels info: Update { Id: 63 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 63 DstTaskId: 65 SrcEndpoint { ActorId { RawX1: 7439792239078038443 RawX2: 4503801490837014 } } DstEndpoint { ActorId { RawX1: 7439792239078038445 RawX2: 4503801490837016 } } InMemory: true DstStageId: 1 } |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_2disk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] Test command err: 2024-11-21T17:53:31.907615Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792221654861572:2059];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:31.907730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00116f/r3tmp/tmpXF4CDy/pdisk_1.dat 2024-11-21T17:53:31.969642Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19239, node 1 2024-11-21T17:53:31.988186Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:31.988198Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:31.988200Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:31.988255Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5525 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:53:32.007493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:32.007524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:53:32.010460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:32.039169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:32.040214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:32.040255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:32.042469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:32.042534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:32.042542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:32.042955Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:32.042968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:32.043066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:32.043408Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:32.044449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211612089, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:32.044462Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:32.044525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:32.044864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:32.044920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:32.044934Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:32.044945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:32.044961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:32.044973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:32.045506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:32.045521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:32.045526Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:32.045583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:53:32.075894Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:34414) has now valid token of root@builtin 2024-11-21T17:53:32.097542Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2024-11-21T17:53:32.727396Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439792226339452835:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:32.727410Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00116f/r3tmp/tmp48CjYT/pdisk_1.dat 2024-11-21T17:53:32.743546Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18309, node 4 2024-11-21T17:53:32.761619Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:32.761632Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:32.761633Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:32.761669Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:32.826129Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:32.826157Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:32.827718Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:32.831898Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:32.832004Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:32.832015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:32.832518Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:32.832575Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:32.832602Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:32.832976Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:32.832987Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:32.833081Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:32.833344Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:32.834109Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211612880, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:32.834122Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:32.834184Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:32.834571Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:32.834642Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:32.834660Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:32.834677Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:32.834692Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 28147497 ... eTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:36.109082Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:36.109121Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:36.110628Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:36.113330Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:36.113463Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:36.113475Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:36.113954Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:36.114049Z node 19 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:36.114068Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:36.114489Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:36.114503Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:36.114552Z node 19 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:36.114898Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:36.115827Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211616163, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:36.115843Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:36.115928Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:36.116306Z node 19 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:36.116349Z node 19 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:36.116363Z node 19 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:36.116375Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:36.116381Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:36.116399Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:36.116589Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:36.116606Z node 19 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:36.116610Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:36.116624Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1732218816020840 Nodes { NodeId: 1024 Host: "localhost" Port: 8372 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1732218816020840 } Nodes { NodeId: 19 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 20 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 21 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2024-11-21T17:53:36.665419Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7439792243219876718:2122];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:36.665488Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00116f/r3tmp/tmpqY4zMR/pdisk_1.dat 2024-11-21T17:53:36.683600Z node 22 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29286, node 22 2024-11-21T17:53:36.706546Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:36.706568Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:36.706570Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:36.706619Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:36.765488Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:36.765525Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:36.768825Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:36.777901Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:36.778046Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:36.778053Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:53:36.784468Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:36.784580Z node 22 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:36.784586Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:53:36.788134Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:36.788150Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:36.790318Z node 22 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:36.794246Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:36.796428Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211616842, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:36.796455Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:36.796530Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:36.799303Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:36.799393Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:36.799410Z node 22 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:36.799432Z node 22 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:36.799444Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:36.799469Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:36.799777Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:36.799785Z node 22 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:36.799788Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:36.799805Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 Trying to register node 2024-11-21T17:53:36.873315Z node 22 :TICKET_PARSER ERROR: Ticket 32703F3CD8BD685EC3BDD5A710708FB8C22F8844: Cannot create token from certificate. Client certificate failed verification Register node result Status { Code: ERROR Reason: "Cannot create token from certificate. Client certificate failed verification" } >> TPersQueueTest::MessageMetadata [GOOD] >> TPersQueueTest::LOGBROKER_7820 >> TPersQueueTest::ReadWithoutConsumerFederation [GOOD] >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen >> TGRpcClientLowTest::SimpleRequest [GOOD] >> TGRpcClientLowTest::SimpleRequestDummyService |85.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |85.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> TPQTabletTests::UpdateConfig_1 >> YdbScripting::BasicV1 [GOOD] >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TPersQueueTest::DefaultMeteringMode [GOOD] >> TPersQueueTest::DisableWrongSettings >> TPQTest::TestDirectReadHappyWay >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] >> TGRpcClientLowTest::SimpleRequestDummyService [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken >> TPQTest::TestMessageNo ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbScripting::BasicV1 [GOOD] Test command err: 2024-11-21T17:53:33.170514Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792229307723406:2250];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:33.170533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00115e/r3tmp/tmpSMkAZG/pdisk_1.dat 2024-11-21T17:53:33.223363Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16686, node 1 2024-11-21T17:53:33.237723Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:33.237733Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:33.237735Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:33.237763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:33.260925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:33.261658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:33.261670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:33.262238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:33.262307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:33.262317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:53:33.262762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:33.262771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:53:33.262806Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:53:33.263117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:33.264136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211613307, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:33.264154Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:53:33.264251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:53:33.264750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:33.264807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:33.264823Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:53:33.264834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:53:33.264845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:53:33.264863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:53:33.265263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:53:33.265278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:53:33.265282Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:33.265313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:53:33.270708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:33.270739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:33.272159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:33.434920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ListingObjects, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:33.435143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:53:33.435469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:33.435478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:33.436095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/ListingObjects 2024-11-21T17:53:33.436157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:33.436199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:33.436218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:53:33.436373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:53:33.436478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:33.436501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:33.436510Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:53:33.436547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:33.436556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:33.436557Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:53:33.438883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.438973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.438993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:33.439291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreate ... registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T17:53:37.875941Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:37.875948Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:37.875950Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:53:37.875971Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:37.875976Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:37.875977Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T17:53:37.875987Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:37.875988Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:37.875989Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T17:53:37.876000Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:37.876006Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:37.876007Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T17:53:37.876015Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:37.876021Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:37.876021Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T17:53:37.877826Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211617927, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:37.877836Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211617927, at schemeshard: 72057594046644480 2024-11-21T17:53:37.877855Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T17:53:37.877869Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211617927, at schemeshard: 72057594046644480 2024-11-21T17:53:37.877874Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T17:53:37.877881Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211617927, at schemeshard: 72057594046644480 2024-11-21T17:53:37.877886Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T17:53:37.877897Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732211617927 2024-11-21T17:53:37.877902Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T17:53:37.878232Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:37.878310Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:37.878329Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T17:53:37.878345Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T17:53:37.878371Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T17:53:37.878381Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T17:53:37.878390Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T17:53:37.878400Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T17:53:37.878414Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T17:53:37.878422Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T17:53:37.878427Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:53:37.878439Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T17:53:37.878442Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T17:53:37.878444Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T17:53:37.878448Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T17:53:37.878549Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:37.878559Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:37.878561Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T17:53:37.878581Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:37.878587Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:37.878588Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T17:53:37.878608Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:37.878615Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:37.878616Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:53:37.878626Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:37.878630Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:37.878631Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T17:53:37.878639Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:53:37.878646Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:53:37.878647Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T17:53:37.878651Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T17:53:37.879129Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439792248036834269:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:53:37.941437Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:53:37.941480Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:53:37.942168Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:53:37.949530Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xqbx70f0f1td1rjdgkzv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjA3MTg3ZGQtYTdjOGFjZmUtNTY5ZTVkN2EtY2E4NmZjMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:53:37.964877Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xqc5039t1mnaxs2ncsash, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWI2NjBhMjctZjVlMGM0ZjQtNDUwM2I1MWUtNmFhMjljODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:53:37.966421Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211618011, txId: 281474976715662] shutting down >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients |85.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients >> TPQTabletTests::DropTablet_And_Tx >> TTableProfileTests::UseTableProfilePreset [GOOD] >> TTableProfileTests::UseTableProfilePresetViaSdk >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test >> TPQTest::TestDirectReadHappyWay [GOOD] >> TPQTest::TestDescribeBalancer >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] >> TPQTabletTests::DropTablet [GOOD] >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestLowWatermark >> TPQTabletTests::DropTablet_Before_Write >> TPQTabletTests::DropTablet_Before_Write [GOOD] >> TPQTest::TestUserInfoCompatibility >> TPQTabletTests::Cancel_Tx >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] Test command err: 2024-11-21T17:53:38.676774Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:53:38.678049Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:53:38.678133Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T17:53:38.678147Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:53:38.678151Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T17:53:38.678156Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:53:38.678162Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:38.678169Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T17:53:38.678173Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:38.681617Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:38.681642Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2024-11-21T17:53:38.681658Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:53:38.683688Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:38.684822Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T17:53:38.684846Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:38.685260Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:38.685298Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:38.685319Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitConfigStep 2024-11-21T17:53:38.685402Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:53:38.685476Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2024-11-21T17:53:38.685647Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T17:53:38.685653Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:185:2198] 2024-11-21T17:53:38.685660Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:38.685799Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2024-11-21T17:53:38.685805Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2024-11-21T17:53:38.685836Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:38.685886Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:38.685956Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T17:53:38.685982Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2200] 2024-11-21T17:53:38.686096Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Completed. 2024-11-21T17:53:38.686100Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:187:2200] 2024-11-21T17:53:38.686105Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:38.686181Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2024-11-21T17:53:38.686186Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2024-11-21T17:53:38.686198Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:38.686212Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:38.686265Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:53:38.686297Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:38.686904Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:38.686928Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:38.686983Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:38.686989Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2024-11-21T17:53:38.687281Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:38.687290Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:203:2211], now have 1 active actors on pipe 2024-11-21T17:53:38.687458Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Config { TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "client-1" Generation: 0 Important: false } Consumers { Name: "client-3" Generation: 7 Important: false } } BootstrapConfig { } } 2024-11-21T17:53:38.687479Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2024-11-21T17:53:38.687484Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T17:53:38.687489Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2024-11-21T17:53:38.687524Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 18446744073709551615 Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T17:53:38.687543Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:38.688242Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:38.688253Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2024-11-21T17:53:38.688256Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2024-11-21T17:53:38.688759Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 175 RawX2: 4294969486 } } Step: 100 2024-11-21T17:53:38.688767Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 678 ... : 0 Generation: 5 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:39.630762Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep 2024-11-21T17:53:39.630804Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:53:39.630842Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:239:2234] 2024-11-21T17:53:39.630954Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T17:53:39.630958Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [5:239:2234] 2024-11-21T17:53:39.630963Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:39.631017Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 5 2024-11-21T17:53:39.631021Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 5 done 2024-11-21T17:53:39.631040Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:39.631064Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:39.631099Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.631470Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:39.631534Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:39.631538Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:246:2239], now have 1 active actors on pipe 2024-11-21T17:53:39.631762Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:39.631768Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:251:2243], now have 1 active actors on pipe 2024-11-21T17:53:39.631796Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 173 RawX2: 21474838668 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } ReceivingShards: 33333 ReceivingShards: 33334 Immediate: false } 2024-11-21T17:53:39.631801Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T17:53:39.631813Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2024-11-21T17:53:39.631819Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T17:53:39.631822Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2024-11-21T17:53:39.631842Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T17:53:39.631855Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.632354Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:39.632363Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2024-11-21T17:53:39.632366Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2024-11-21T17:53:39.632397Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 173 RawX2: 21474838668 } } Step: 100 2024-11-21T17:53:39.632402Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2024-11-21T17:53:39.632404Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2024-11-21T17:53:39.632407Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2024-11-21T17:53:39.632423Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T17:53:39.632430Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.632946Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:39.632956Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNING 2024-11-21T17:53:39.632958Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2024-11-21T17:53:39.632960Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2024-11-21T17:53:39.632963Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2024-11-21T17:53:39.632969Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2024-11-21T17:53:39.632977Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2024-11-21T17:53:39.633012Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2024-11-21T17:53:39.633015Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2024-11-21T17:53:39.633017Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2024-11-21T17:53:39.633019Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2024-11-21T17:53:39.633021Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T17:53:39.633023Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2024-11-21T17:53:39.633043Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T17:53:39.633053Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.634002Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:39.634007Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2024-11-21T17:53:39.634010Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2024-11-21T17:53:39.634015Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 2 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2024-11-21T17:53:39.634017Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 33334 2024-11-21T17:53:39.634032Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 33333 2024-11-21T17:53:39.634038Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2024-11-21T17:53:39.634042Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2024-11-21T17:53:39.634044Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2024-11-21T17:53:39.634050Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2024-11-21T17:53:39.634055Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2024-11-21T17:53:39.634071Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.634402Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2024-11-21T17:53:39.634407Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 33334 2024-11-21T17:53:39.634503Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2024-11-21T17:53:39.634505Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 33333 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.634703Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:39.634719Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2024-11-21T17:53:39.634722Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2024-11-21T17:53:39.634724Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T17:53:39.634728Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2024-11-21T17:53:39.634732Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2024-11-21T17:53:39.634734Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2024-11-21T17:53:39.634752Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { } 2024-11-21T17:53:39.634760Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.635146Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:39.635151Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2024-11-21T17:53:39.635154Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2024-11-21T17:53:39.635156Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2024-11-21T17:53:39.635158Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/2 2024-11-21T17:53:39.635160Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2024-11-21T17:53:39.635162Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TPQTabletTests::Cancel_Tx [GOOD] >> TPQTest::TestDescribeBalancer [GOOD] >> TPQTest::TestCheckACL >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore [GOOD] >> TGRpcClientLowTest::MultipleSimpleRequests >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWritePQBigMessage >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestTimeRetention ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] Test command err: 2024-11-21T17:53:39.422971Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:53:39.423763Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:53:39.423822Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T17:53:39.423847Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:53:39.423850Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T17:53:39.423854Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:53:39.423858Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:39.423863Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T17:53:39.423867Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:39.426296Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:39.426313Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2024-11-21T17:53:39.426329Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:53:39.427811Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.428477Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T17:53:39.428498Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:39.428785Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:39.428815Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:39.428822Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitConfigStep 2024-11-21T17:53:39.428872Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:53:39.428933Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2024-11-21T17:53:39.429051Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T17:53:39.429056Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:185:2198] 2024-11-21T17:53:39.429062Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:39.429141Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2024-11-21T17:53:39.429145Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2024-11-21T17:53:39.429172Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:39.429199Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:39.429248Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T17:53:39.429263Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2200] 2024-11-21T17:53:39.429342Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Completed. 2024-11-21T17:53:39.429345Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:187:2200] 2024-11-21T17:53:39.429349Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:39.429425Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2024-11-21T17:53:39.429429Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2024-11-21T17:53:39.429438Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:39.429450Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:39.429489Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:53:39.429515Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.430069Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:39.430094Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:39.430136Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:39.430140Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2024-11-21T17:53:39.430372Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:39.430378Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:203:2211], now have 1 active actors on pipe 2024-11-21T17:53:39.430513Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Immediate: false } 2024-11-21T17:53:39.430521Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T17:53:39.430533Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2024-11-21T17:53:39.430537Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T17:53:39.430541Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2024-11-21T17:53:39.430560Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 30231 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T17:53:39.430573Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T17:53:39.430611Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvDropTablet Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.431488Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:39.431498Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2024-11-21T17:53:39.431501Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2024-11-21T17:53:39.431564Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67891 Data { Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Immediate: true } 2024-11-21T17:53:39.431569Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 invalid PQ tablet state (EDropped) 2024-11-21T17:53:39.431573Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2024-11-21T17:53:39.431982Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 175 RawX2: 4294969486 } } Step: 100 2024-11-21T17:53:39.431992Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2024-11-21T17:53:39.431997Z node 1 :PERSQUEUE DEBUG: ... tate: StateInit] bootstrapping 0 [5:308:2294] 2024-11-21T17:53:40.378265Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:40.378448Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitMetaStep 2024-11-21T17:53:40.378476Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInfoRangeStep 2024-11-21T17:53:40.378548Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitDataRangeStep 2024-11-21T17:53:40.378567Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitDataStep 2024-11-21T17:53:40.378569Z node 5 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T17:53:40.378573Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [5:308:2294] 2024-11-21T17:53:40.378578Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:40.378603Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 5 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:40.378642Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNED 2024-11-21T17:53:40.378645Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2024-11-21T17:53:40.378647Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2024-11-21T17:53:40.378661Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2024-11-21T17:53:40.378669Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 100, TxId 67890 2024-11-21T17:53:40.378704Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:53:40.378720Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvProposePartitionConfigResult Step 100, TxId 67890, Partition 0 2024-11-21T17:53:40.378724Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvProposePartitionConfigResult 2024-11-21T17:53:40.378727Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2024-11-21T17:53:40.378729Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2024-11-21T17:53:40.378732Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T17:53:40.378734Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2024-11-21T17:53:40.378772Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 18446744073709551615 PredicatesReceived { TabletId: 22222 } Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 1 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ChildPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { Partition { PartitionId: 0 } } 2024-11-21T17:53:40.378787Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:40.379353Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:40.379362Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2024-11-21T17:53:40.379364Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2024-11-21T17:53:40.379375Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2024-11-21T17:53:40.379378Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:40.379949Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:40.379959Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:332:2311], now have 1 active actors on pipe 2024-11-21T17:53:40.379996Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2024-11-21T17:53:40.380003Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2024-11-21T17:53:40.380007Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2024-11-21T17:53:40.380011Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2024-11-21T17:53:40.380013Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2024-11-21T17:53:40.380022Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2024-11-21T17:53:40.380025Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2024-11-21T17:53:40.380031Z node 5 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2024-11-21T17:53:40.380038Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2024-11-21T17:53:40.380057Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2024-11-21T17:53:40.380060Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2024-11-21T17:53:40.380062Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user drop done 2024-11-21T17:53:40.380101Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:40.380642Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:40.380668Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2024-11-21T17:53:40.380673Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2024-11-21T17:53:40.380675Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2024-11-21T17:53:40.380680Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2024-11-21T17:53:40.380723Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 1 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ChildPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } 2024-11-21T17:53:40.380729Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:40.380735Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2024-11-21T17:53:40.380737Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2024-11-21T17:53:40.380765Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 18446744073709551615 PredicatesReceived { TabletId: 22222 Predicate: true } Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 1 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ChildPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 173 RawX2: 21474838668 } Partitions { Partition { PartitionId: 0 } } 2024-11-21T17:53:40.380787Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:40.381394Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:40.381402Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2024-11-21T17:53:40.381405Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2024-11-21T17:53:40.381411Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2024-11-21T17:53:40.381416Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2024-11-21T17:53:40.381420Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/0 2024-11-21T17:53:40.381423Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2024-11-21T17:53:40.381426Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/0 2024-11-21T17:53:40.381430Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2024-11-21T17:53:40.381433Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState DELETING 2024-11-21T17:53:40.381438Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete key for TxId 67890 2024-11-21T17:53:40.381445Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:40.382946Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:40.382957Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State DELETING 2024-11-21T17:53:40.382962Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T17:53:40.382965Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete TxId 67890 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TTableProfileTests::UseTableProfilePresetViaSdk [GOOD] >> TTableProfileTests::OverwriteStoragePolicy >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] >> TOlapReboots::CreateStandaloneTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] Test command err: 2024-11-21T17:53:37.520409Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792245275392895:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:37.520629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001155/r3tmp/tmp8PvlAz/pdisk_1.dat 2024-11-21T17:53:37.564908Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15992, node 1 2024-11-21T17:53:37.580913Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:37.580930Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:37.580932Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:37.580969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:37.603363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:37.604396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:37.604417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:37.605018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:37.605064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:37.605072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:53:37.605478Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:37.605493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:53:37.605578Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:53:37.605881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:37.606936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211617654, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:37.606950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:53:37.607011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:53:37.607404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:37.607449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:37.607463Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:53:37.607473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:53:37.607486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:53:37.607499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:53:37.607832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:53:37.607855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:53:37.607859Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:37.607873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:53:37.620790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:37.620820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:37.622422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:38.374654Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439792249490903797:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:38.374831Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001155/r3tmp/tmpTQxkkq/pdisk_1.dat 2024-11-21T17:53:38.385766Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8640, node 4 2024-11-21T17:53:38.402833Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:38.402846Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:38.402849Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:38.402893Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:38.474874Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:38.474918Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:38.476474Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:38.476941Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:38.477058Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:38.477068Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:38.477527Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:38.477585Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:38.477594Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:38.477952Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:38.477960Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:38.478116Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:38.478288Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:38.479240Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211618522, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:38.479255Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:38.479351Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:38.479780Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:38.479851Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:38.479866Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:38.479883Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:38.479896Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:38.479913Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:38.480063Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 7205759404 ... PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:39.771219Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:39.771247Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:39.772857Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:39.774244Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:39.774335Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:39.774348Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:39.774831Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:39.774874Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:39.774882Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:39.775319Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:39.775332Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:39.775411Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:39.775642Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:39.776422Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211619824, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:39.776433Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:39.776500Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:39.776860Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:39.776905Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:39.776921Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:39.776939Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:39.776952Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:39.776970Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:39.777082Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:39.777103Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:39.777106Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:39.777116Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2024-11-21T17:53:39.943712Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T17:53:39.945342Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T17:53:39.947025Z node 10 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {SUCCESS, 0} 2024-11-21T17:53:39.949056Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T17:53:39.950810Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T17:53:40.530130Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7439792261754108037:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:40.530277Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001155/r3tmp/tmpcmFjxP/pdisk_1.dat 2024-11-21T17:53:40.540417Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15958, node 13 2024-11-21T17:53:40.558582Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:40.558604Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:40.558606Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:40.558648Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:40.630334Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:40.630372Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:40.631774Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:40.632255Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:40.632353Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:40.632363Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:40.632664Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:40.632706Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:40.632713Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:40.632972Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:40.632981Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:40.633126Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:40.633286Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:40.634016Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211620678, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:40.634026Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:40.634075Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:40.634412Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:40.634452Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:40.634466Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:40.634476Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:40.634488Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:40.634502Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:40.634643Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:40.634663Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:40.634666Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:40.634676Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWritePQ ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateStandaloneTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:53:22.736380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:53:22.736428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:53:22.736435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:53:22.736440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:53:22.736456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:53:22.736460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:53:22.736476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:53:22.736604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:53:22.789102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:53:22.789128Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:53:22.791929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:53:22.792068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:53:22.792099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:53:22.811172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:53:22.811255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:53:22.831481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:53:22.840629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:53:22.852483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:53:22.892496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:53:22.892539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:53:22.892567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:53:22.892586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:53:22.892595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:53:22.892660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:53:22.895876Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:53:22.980794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:53:22.993134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:22.993207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:53:22.993267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:53:22.993278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:22.993902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:53:22.993947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:53:22.994000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:22.994009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:53:22.994018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:53:22.994022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:53:22.994716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:22.994729Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:53:22.994733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:53:22.995380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:22.995393Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:22.995401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:53:22.995409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:53:22.997829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:53:22.998730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:53:23.008975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:53:23.009260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:53:23.009311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:53:23.009327Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:53:23.009399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:53:23.009409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:53:23.009441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:53:23.009455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:53:23.010694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:53:23.010714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:53:23.010773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:53:23.010779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:53:23.010883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:23.010893Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:53:23.010909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:53:23.010913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:53:23.010920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:53:23.010927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:53:23.010932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:53:23.010938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:53:23.010955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:53:23.010962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:53:23.010967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... tep: 5000003 2024-11-21T17:53:41.706670Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:53:41.706692Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 292057778277 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:53:41.706698Z node 68 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TPropose operationId#1002:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000003 2024-11-21T17:53:41.706765Z node 68 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 129 2024-11-21T17:53:41.706788Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:53:41.706799Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:53:41.708098Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:53:41.708108Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:53:41.708147Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:53:41.708173Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:53:41.708177Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2024-11-21T17:53:41.708181Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T17:53:41.708220Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:53:41.708225Z node 68 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId#1002:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:53:41.708253Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId#1002:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T17:53:41.708384Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:53:41.708394Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:53:41.708397Z node 68 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:53:41.708401Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:53:41.708405Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:53:41.708530Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:53:41.708538Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:53:41.708540Z node 68 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:53:41.708542Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:53:41.708544Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:53:41.708553Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2024-11-21T17:53:41.708815Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T17:53:41.709102Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:53:41.709262Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:53:41.720261Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1002 MinStep: 0 Step: 5000003 2024-11-21T17:53:41.720288Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T17:53:41.720312Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1002 MinStep: 0 Step: 5000003 2024-11-21T17:53:41.720320Z node 68 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1002 MinStep: 0 Step: 5000003 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T17:53:41.720405Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 2024-11-21T17:53:41.720408Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T17:53:41.720415Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 2024-11-21T17:53:41.721032Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:53:41.721100Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:53:41.721120Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:53:41.721127Z node 68 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T17:53:41.721141Z node 68 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:53:41.721144Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:53:41.721148Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2024-11-21T17:53:41.721162Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [68:360:2340] message: TxId: 1002 2024-11-21T17:53:41.721168Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:53:41.721173Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:53:41.721177Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:53:41.721206Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:53:41.721516Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:53:41.721526Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [68:361:2341] TestWaitNotification: OK eventTxId 1002 2024-11-21T17:53:41.721616Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:53:41.721673Z node 68 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 67us result status StatusSuccess 2024-11-21T17:53:41.721797Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" ColumnCodec: ColumnCodecPlain } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TFetchRequestTests::BadTopicName [GOOD] >> TFetchRequestTests::CheckAccess >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged >> TPersQueueTest::SetupWriteSessionOnDisabledCluster [GOOD] >> TPersQueueTest::SetupReadSession >> TTableProfileTests::OverwriteStoragePolicy [GOOD] >> TTableProfileTests::WrongTableProfile >> TPQTest::TestCheckACL [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeStampEstimate >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTest::TestWriteTimeLag >> THiveTest::TestLocalReplacement >> TPQTest::TestWriteTimeLag [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T17:53:24.450678Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.450704Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.455528Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.458581Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2024-11-21T17:53:24.458874Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T17:53:24.459506Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T17:53:24.460358Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T17:53:24.460844Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.471275Z node 1 :PERSQUEUE INFO: new Cookie default|218834bd-750ff2a8-29b7099c-382bf89c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:216:2057] recipient: [1:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:219:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:220:2057] recipient: [1:218:2222] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:221:2223] sender: [1:222:2057] recipient: [1:218:2222] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.494226Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.494244Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:53:24.494373Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:272:2266] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:24.494955Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:273:2267] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:24.496522Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:273:2267] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:24.506739Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:272:2266] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:221:2223] sender: [1:299:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T17:53:24.708298Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.708325Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.714478Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.714668Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "user1" Generation: 2 Important: true } 2024-11-21T17:53:24.714782Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2024-11-21T17:53:24.715313Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] 2024-11-21T17:53:24.715945Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2024-11-21T17:53:24.716489Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvPersQueue::TEvRequest ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:203:2057] recipient: [2:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:206:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:207:2057] recipient: [2:205:2211] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:208:2212] sender: [2:209:2057] recipient: [2:205:2211] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.727139Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.727159Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:53:24.727279Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:263:2259] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:24.727938Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:264:2260] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:24.729748Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:263:2259] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:24.729849Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:264:2260] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:208:2212] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents: ... n: 0, State: StateIdle] Reading cookie 2. Send blob request. 2024-11-21T17:53:43.965635Z node 54 :PERSQUEUE DEBUG: No blob in L1. Partition 0 offset 12 actorID [54:530:2504] 2024-11-21T17:53:43.965642Z node 54 :PERSQUEUE DEBUG: Reading cookie 2. Have to read 1 of 1 from KV 2024-11-21T17:53:43.965674Z node 54 :PERSQUEUE DEBUG: PQ Cache (L2). Missed blob. tabletId '72057594037927937' partition 0 offset 12 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:43.966074Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:43.966136Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2024-11-21T17:53:43.966181Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 57 actor [54:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 57 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 ReadRuleGenerations: 55 ReadRuleGenerations: 57 ReadRuleGenerations: 56 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } Consumers { Name: "aaa" Generation: 55 Important: false } Consumers { Name: "another1" Generation: 57 Important: true } Consumers { Name: "important" Generation: 56 Important: true } 2024-11-21T17:53:43.969540Z node 54 :PERSQUEUE DEBUG: Got results. 1 of 1 from KV. Status 1 2024-11-21T17:53:43.969556Z node 54 :PERSQUEUE DEBUG: Got results. result 0 from KV. Status 0 2024-11-21T17:53:43.969565Z node 54 :PERSQUEUE DEBUG: Prefetched blob in L1. Partition 0 offset 12 count 6 size 6292734 actorID [54:530:2504] 2024-11-21T17:53:43.969604Z node 54 :PERSQUEUE DEBUG: FormAnswer 1 2024-11-21T17:53:43.969756Z node 54 :PERSQUEUE DEBUG: FormAnswer processing batch offset 12 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2024-11-21T17:53:43.969889Z node 54 :PERSQUEUE DEBUG: FormAnswer processing batch offset 12 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2024-11-21T17:53:43.969903Z node 54 :PERSQUEUE DEBUG: FormAnswer processing batch offset 12 totakecount 6 count 1 size 24713 from pos 0 cbcount 1 2024-11-21T17:53:43.969944Z node 54 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 12 size 6292734 2024-11-21T17:53:43.969954Z node 54 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user another1 readTimeStamp done, result 282 queuesize 0 startOffset 12 2024-11-21T17:53:43.970056Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:43.970061Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:631:2581], now have 1 active actors on pipe 2024-11-21T17:53:43.970379Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:43.970387Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:636:2585], now have 1 active actors on pipe 2024-11-21T17:53:43.970393Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:53:43.970456Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 58(current 57) received from actor [54:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 58 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 ReadRuleGenerations: 55 ReadRuleGenerations: 57 ReadRuleGenerations: 56 ReadRuleGenerations: 58 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } Consumers { Name: "aaa" Generation: 55 Important: false } Consumers { Name: "another1" Generation: 57 Important: true } Consumers { Name: "important" Generation: 56 Important: true } Consumers { Name: "another" Generation: 58 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:43.971271Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 58 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 ReadRuleGenerations: 55 ReadRuleGenerations: 57 ReadRuleGenerations: 56 ReadRuleGenerations: 58 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } Consumers { Name: "aaa" Generation: 55 Important: false } Consumers { Name: "another1" Generation: 57 Important: true } Consumers { Name: "important" Generation: 56 Important: true } Consumers { Name: "another" Generation: 58 Important: false } 2024-11-21T17:53:43.971281Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:43.971327Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user another reinit with generation 58 done 2024-11-21T17:53:43.971364Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user another reinit with generation 58 done 2024-11-21T17:53:43.971384Z node 54 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:53:43.971453Z node 54 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:43.972496Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:43.972560Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2024-11-21T17:53:43.972795Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:43.972835Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2024-11-21T17:53:43.972872Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 58 actor [54:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 58 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 ReadRuleGenerations: 55 ReadRuleGenerations: 57 ReadRuleGenerations: 56 ReadRuleGenerations: 58 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } Consumers { Name: "aaa" Generation: 55 Important: false } Consumers { Name: "another1" Generation: 57 Important: true } Consumers { Name: "important" Generation: 56 Important: true } Consumers { Name: "another" Generation: 58 Important: false } 2024-11-21T17:53:43.972984Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:43.972991Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:654:2600], now have 1 active actors on pipe 2024-11-21T17:53:43.973278Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:43.973284Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:659:2604], now have 1 active actors on pipe 2024-11-21T17:53:43.973296Z node 54 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:53:43.973300Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2024-11-21T17:53:43.973318Z node 54 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:53:43.973360Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:43.973365Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:661:2606], now have 1 active actors on pipe 2024-11-21T17:53:43.973376Z node 54 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:53:43.973379Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2024-11-21T17:53:43.973391Z node 54 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:53:43.973435Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:43.973438Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:663:2608], now have 1 active actors on pipe 2024-11-21T17:53:43.973449Z node 54 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:53:43.973454Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2024-11-21T17:53:43.973462Z node 54 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:53:43.973511Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:43.973515Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [54:665:2610], now have 1 active actors on pipe 2024-11-21T17:53:43.973523Z node 54 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2024-11-21T17:53:43.973525Z node 54 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2024-11-21T17:53:43.973532Z node 54 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >> TTableProfileTests::WrongTableProfile [GOOD] >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestLockTabletExecution >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TTableProfileTests::WrongTableProfile [GOOD] Test command err: 2024-11-21T17:53:36.102145Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792244725918380:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:36.102209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001159/r3tmp/tmp6djXvX/pdisk_1.dat 2024-11-21T17:53:36.159653Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1350, node 1 2024-11-21T17:53:36.176619Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:36.176626Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:36.176627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:36.176658Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23775 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:36.196870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:36.197831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:36.197842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:36.198567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:36.198633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:36.198641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:53:36.199043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:53:36.199106Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:36.199114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:53:36.199474Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:36.200240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211616247, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:36.200252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:53:36.200296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:53:36.200608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:36.200644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:36.200653Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:53:36.200661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:53:36.200668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:53:36.200684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:53:36.201033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:53:36.201056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:53:36.201059Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:36.201088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:53:36.202400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:36.202423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:36.203834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23775 2024-11-21T17:53:36.223542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:36.223640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:36.223651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:36.224377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T17:53:36.224420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:36.224575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:36.224597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateSubDomain, at tablet72057594046644480 2024-11-21T17:53:36.224602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 waiting... 2024-11-21T17:53:36.224774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:36.224791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:36.224795Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:53:36.224833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:36.224835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:36.224838Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:53:36.224990Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:53:36.225170Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:36.225179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T17:53:36.225489Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:36.226125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211616275, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:36.226134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet 72057594046644480 2024-11-21T17:53:36.226181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T17:53:36.226504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:36.226556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:36.226571Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T17:53:36.226581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T17:53:36.226592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T17:53:36.226617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T17:53:36.226731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:36.226743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:36.226745Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:53:36.226767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:36.226774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:36.226775Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:53:36.226780Z node 1 :FLAT_TX_SC ... Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:53:43.443722Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:43.444290Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211623492, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:43.444300Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715658:0, at tablet 72057594046644480 2024-11-21T17:53:43.444343Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:53:43.444644Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:43.444686Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:43.444699Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:53:43.444710Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:53:43.444720Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:53:43.444730Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T17:53:43.444883Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:43.444898Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:43.444901Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:53:43.444930Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:43.444938Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:43.444940Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:53:43.444945Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:53:43.445663Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:53:43.445768Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:43.445777Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:53:43.446079Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T17:53:43.446108Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 waiting... 2024-11-21T17:53:43.446350Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T17:53:43.447382Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:43.447427Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:43.447436Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2024-11-21T17:53:43.447671Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:43.947202Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7439792273412005797:2234];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:43.948269Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:43.948292Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:43.949745Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2024-11-21T17:53:43.949955Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:43.952005Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TClient is connected to server localhost:19498 2024-11-21T17:53:43.965901Z node 13 :FLAT_TX_SCHEMESHARD INFO: Got new config: TableProfilesConfig { TableProfiles { Name: "default" CompactionPolicy: "default" ExecutionPolicy: "default" PartitioningPolicy: "default" StoragePolicy: "default" ReplicationPolicy: "default" CachingPolicy: "default" } TableProfiles { Name: "profile1" CompactionPolicy: "compaction1" ExecutionPolicy: "execution1" PartitioningPolicy: "partitioning1" StoragePolicy: "storage1" ReplicationPolicy: "replication1" CachingPolicy: "caching1" } TableProfiles { Name: "profile2" CompactionPolicy: "compaction2" ExecutionPolicy: "execution2" PartitioningPolicy: "partitioning2" StoragePolicy: "storage2" ReplicationPolicy: "replication2" CachingPolicy: "caching2" } CompactionPolicies { Name: "default" } CompactionPolicies { Name: "compaction1" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction2" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } ExecutionPolicies { Name: "default" } ExecutionPolicies { Name: "execution1" PipelineConfig { NumActiveTx: 1 EnableOutOfOrder: false DisableImmediate: false EnableSoftUpdates: true } ResourceProfile: "profile1" EnableFilterByKey: true ExecutorFastLogPolicy: false TxReadSizeLimit: 10000000 } ExecutionPolicies { Name: "execution2" PipelineConfig { NumActiveTx: 8 EnableOutOfOrder: true DisableImmediate: true EnableSoftUpdates: false } ResourceProfile: "profile2" EnableFilterByKey: false ExecutorFastLogPolicy: true TxReadSizeLimit: 20000000 } PartitioningPolicies { Name: "default" } PartitioningPolicies { Name: "partitioning1" UniformPartitionsCount: 10 AutoSplit: true AutoMerge: false SizeToSplit: 123456 } PartitioningPolicies { Name: "partitioning2" UniformPartitionsCount: 20 AutoSplit: true AutoMerge: true SizeToSplit: 1000000000 } StoragePolicies { Name: "default" } StoragePolicies { Name: "storage1" ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecLZ4 StorageConfig { SysLog { PreferredPoolKind: "hdd" } Log { PreferredPoolKind: "hdd" } Data { PreferredPoolKind: "hdd" } External { PreferredPoolKind: "hdd" } ExternalThreshold: 4294967295 } } } StoragePolicies { Name: "storage2" ColumnFamilies { Id: 0 ColumnCache: ColumnCacheEver StorageConfig { SysLog { PreferredPoolKind: "ssd" } Log { PreferredPoolKind: "ssd" } Data { PreferredPoolKind: "ssd" } External { PreferredPoolKind: "ssd" } DataThreshold: 30000 } } } ReplicationPolicies { Name: "default" } ReplicationPolicies { Name: "replication1" FollowerCount: 1 AllowFollowerPromotion: false CrossDataCenter: true } ReplicationPolicies { Name: "replication2" FollowerCount: 2 AllowFollowerPromotion: true CrossDataCenter: false } CachingPolicies { Name: "default" } CachingPolicies { Name: "caching1" ExecutorCacheSize: 10000000 } CachingPolicies { Name: "caching2" ExecutorCacheSize: 20000000 } } 2024-11-21T17:53:43.985605Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2024-11-21T17:53:43.985742Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:53:44.195921Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:53:44.196076Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:7439792277706973237:2278], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> THiveTest::TestReCreateTablet >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test >> TPersQueueTest::DisableWrongSettings [GOOD] >> TPersQueueTest::DisableDeduplication >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] Test command err: 2024-11-21T17:51:56.971990Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791812353360484:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:56.972200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:56.973836Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791814247524041:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:56.973876Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002031/r3tmp/tmpQCGWEJ/pdisk_1.dat 2024-11-21T17:51:57.006098Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:57.008544Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:57.028299Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1759, node 1 2024-11-21T17:51:57.051492Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/002031/r3tmp/yandexFfZaEG.tmp 2024-11-21T17:51:57.051509Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/002031/r3tmp/yandexFfZaEG.tmp 2024-11-21T17:51:57.051580Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/002031/r3tmp/yandexFfZaEG.tmp 2024-11-21T17:51:57.051628Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:57.056582Z INFO: TTestServer started on Port 5117 GrpcPort 1759 TClient is connected to server localhost:5117 PQClient connected to localhost:1759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:51:57.072208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.072249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:57.073990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:57.101732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.101764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:57.102096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.103230Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:57.103520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:51:57.115712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:51:57.269990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791816648328764:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.270015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791816648328775:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.270022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.271438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2024-11-21T17:51:57.274742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791816648328778:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2024-11-21T17:51:57.312867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.314738Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791818542491710:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:57.315288Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTFkZWEwZjQtNjZiNzAyNTktZGNhNTQwOWEtMWQ2NWU3NDc=, ActorId: [2:7439791818542491671:2280], ActorState: ExecuteState, TraceId: 01jd7xm9vp5gx3v7bw5mq59h7f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:57.315693Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:57.376161Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791816648328979:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:57.376309Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODA5OTA1MTItYzAyZjlkMjQtYWFhY2UwODYtNGQyZDg0MDA=, ActorId: [1:7439791816648328761:2300], ActorState: ExecuteState, TraceId: 01jd7xm9tk0r49ar821bpj0xza, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:57.376449Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:57.380703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.430522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:51:57.511214Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jd7xma134gzwhzk9fsxjfx3g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjMyY2MzMjMtZDBkMTI1NWYtNWJlNGJhOGQtNzNmN2FkYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439791816648329271:3033] 2024-11-21T17:52:01.972474Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791812353360484:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:01.972516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:52:01.973997Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439791814247524041:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:01.974028Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 10 partitions CallPersQueueGRPC request to localhost:1759 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2024-11-21T17:52:03.555144Z node 1 :PQ_METACACHE DEBUG: HandleDescribeAllTopics 2024-11-21T17:52:03.555161Z node 1 :PQ_METACACHE DEBUG: ProcessDescribeAllTopics 2024-11-21T17:52:03.555164Z node 1 :PQ_METACACHE DEBUG: Describe all topics - send empty response 2024-11-21T17:52:03.555167Z node 1 :PQ_METACACHE DEBUG: Send describe all topics response with 0 topics 2024-11-21T17:52:03.555205Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:1759 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 10 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 ... 21T17:53:44.948706Z node 25 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:53:44.948725Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 34 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 37 count 3 size 320 endOffset 40 max time lag 0ms effective offset 37 2024-11-21T17:53:44.948731Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 34 added 0 blobs, size 0 count 0 last offset 37 2024-11-21T17:53:44.948732Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 37 2024-11-21T17:53:44.948736Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 34. All data is from uncompacted head. 2024-11-21T17:53:44.948737Z node 25 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:53:44.948749Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 37 2024-11-21T17:53:44.948819Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16927602806946008342_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 37 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 39 WriteTimestampMS: 1732211624932 CreateTimestampMS: 1732211624932 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 38 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 40 WriteTimestampMS: 1732211624933 CreateTimestampMS: 1732211624933 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 39 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 41 WriteTimestampMS: 1732211624934 CreateTimestampMS: 1732211624934 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 60 RealReadOffset: 39 WaitQuotaTimeMs: 0 } Cookie: 37 } 2024-11-21T17:53:44.948834Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16927602806946008342_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset40 2024-11-21T17:53:44.948836Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_8948392538405831067_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 37 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 39 WriteTimestampMS: 1732211624932 CreateTimestampMS: 1732211624932 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 38 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 40 WriteTimestampMS: 1732211624933 CreateTimestampMS: 1732211624933 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 39 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 41 WriteTimestampMS: 1732211624934 CreateTimestampMS: 1732211624934 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 60 RealReadOffset: 39 WaitQuotaTimeMs: 0 } Cookie: 37 } 2024-11-21T17:53:44.948838Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16927602806946008342_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 40 ReadOffset 40 ReadGuid 6b4dced7-7cd4a47d-ab0f2cdd-72dcc68b has messages 1 2024-11-21T17:53:44.948850Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16927602806946008342_v1 read done: guid# 6b4dced7-7cd4a47d-ab0f2cdd-72dcc68b, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 522 2024-11-21T17:53:44.948856Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16927602806946008342_v1 response to read: guid# 6b4dced7-7cd4a47d-ab0f2cdd-72dcc68b 2024-11-21T17:53:44.948858Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_8948392538405831067_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset40 2024-11-21T17:53:44.948863Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_8948392538405831067_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 40 ReadOffset 40 ReadGuid 5df4f261-1b851c2b-a88e1660-6c0efe66 has messages 1 2024-11-21T17:53:44.948881Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_8948392538405831067_v1 read done: guid# 5df4f261-1b851c2b-a88e1660-6c0efe66, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 522 2024-11-21T17:53:44.948892Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_8948392538405831067_v1 response to read: guid# 5df4f261-1b851c2b-a88e1660-6c0efe66 2024-11-21T17:53:44.948902Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16927602806946008342_v1 Process answer. Aval parts: 0 Bytes readed: 522 Offset: 37 from session 1 2024-11-21T17:53:44.948956Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_8948392538405831067_v1 Process answer. Aval parts: 0 Offset: 38 from session 1 Offset: 39 from session 1 Bytes readed: 522 Offset: 37 from session 1 Offset: 38 from session 1 Offset: 39 from session 1 2024-11-21T17:53:44.949083Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_16927602806946008342_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { end: 39 } } } } 2024-11-21T17:53:44.949091Z node 24 :PQ_READ_PROXY INFO: session cookie 2 consumer session _24_2_16927602806946008342_v1 closed with error: reason# can't commit when reading without a consumer 2024-11-21T17:53:44.949124Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_8948392538405831067_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { end: 39 } } } } 2024-11-21T17:53:44.949130Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer session _24_1_8948392538405831067_v1 closed with error: reason# can't commit when reading without a consumer 2024-11-21T17:53:44.949150Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer session _24_1_8948392538405831067_v1 is DEAD 2024-11-21T17:53:44.949170Z node 24 :PQ_READ_PROXY INFO: session cookie 2 consumer session _24_2_16927602806946008342_v1 is DEAD 2024-11-21T17:53:44.949266Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:44.949276Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_8948392538405831067_v1 2024-11-21T17:53:44.949286Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792277208758294:2534] destroyed 2024-11-21T17:53:44.949291Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:44.949297Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_8948392538405831067_v1 2024-11-21T17:53:44.949299Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792277208758293:2533] destroyed 2024-11-21T17:53:44.949301Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:44.949303Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_8948392538405831067_v1 2024-11-21T17:53:44.949305Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792277208758291:2530] destroyed 2024-11-21T17:53:44.949306Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:44.949308Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_8948392538405831067_v1 2024-11-21T17:53:44.949309Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792277208758289:2529] destroyed 2024-11-21T17:53:44.949310Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:44.949312Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_8948392538405831067_v1 2024-11-21T17:53:44.949312Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_8948392538405831067_v1 2024-11-21T17:53:44.949313Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792277208758287:2526] destroyed 2024-11-21T17:53:44.949314Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:44.949316Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_16927602806946008342_v1 2024-11-21T17:53:44.949317Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_8948392538405831067_v1 2024-11-21T17:53:44.949319Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792277208758292:2532] destroyed 2024-11-21T17:53:44.949319Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_8948392538405831067_v1 2024-11-21T17:53:44.949320Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:44.949321Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_16927602806946008342_v1 2024-11-21T17:53:44.949322Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_8948392538405831067_v1 2024-11-21T17:53:44.949323Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792277208758290:2531] destroyed 2024-11-21T17:53:44.949324Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_8948392538405831067_v1 2024-11-21T17:53:44.949324Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:44.949326Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_16927602806946008342_v1 2024-11-21T17:53:44.949327Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_16927602806946008342_v1 2024-11-21T17:53:44.949328Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792277208758288:2528] destroyed 2024-11-21T17:53:44.949329Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:44.949330Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_16927602806946008342_v1 2024-11-21T17:53:44.949331Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_16927602806946008342_v1 2024-11-21T17:53:44.949333Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792277208758286:2527] destroyed 2024-11-21T17:53:44.949333Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_16927602806946008342_v1 2024-11-21T17:53:44.949334Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:44.949335Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_16927602806946008342_v1 2024-11-21T17:53:44.949336Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_16927602806946008342_v1 2024-11-21T17:53:44.949337Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792277208758284:2525] destroyed 2024-11-21T17:53:44.949338Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_16927602806946008342_v1 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace >> THiveTest::TestUpdateChannelValues >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode >> THiveTest::TestDrain >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestStorageBalancer >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionPerConsumerQuota >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestRestartsWithFollower |85.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap >> THiveTest::TestLocalDisconnect >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestRestartTablets >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestHiveRestart >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError >> HttpRequest::Probe [GOOD] >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed [GOOD] >> TTopicYqlTest::CreateAndAlterTopicYql ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [GOOD] Test command err: 2024-11-21T17:51:34.438436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:34.438479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:34.438488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001697/r3tmp/tmpHM4vkS/pdisk_1.dat 2024-11-21T17:51:34.521262Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64395, node 1 2024-11-21T17:51:34.612754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:34.612769Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:34.612771Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:34.612831Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:34.617876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:34.692994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:34.693028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:34.704830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18212 2024-11-21T17:51:35.105043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:35.850676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:35.850705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:35.883618Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:35.884554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:35.936342Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:35.943776Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:51:35.943795Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:51:35.949423Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:51:35.949551Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:51:35.949572Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:51:35.949577Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:51:35.949583Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:51:35.949588Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:51:35.949594Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:51:35.949599Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:51:35.949708Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:51:36.131969Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:51:36.132001Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:51:36.133338Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T17:51:36.135658Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T17:51:36.135857Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T17:51:36.136613Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T17:51:36.142800Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:51:36.142818Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:51:36.142827Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T17:51:36.144315Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.144343Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.145739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:51:36.147620Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:51:36.147657Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:51:36.150772Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:51:36.163314Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:36.186840Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:51:36.333118Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:51:36.491506Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:51:37.228484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:37.228524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:37.231459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T17:51:37.277130Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:51:37.277199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:51:37.277242Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:51:37.277265Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:51:37.277290Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:51:37.277310Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:51:37.277330Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:51:37.277351Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:51:37.277375Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:51:37.277395Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:51:37.277415Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:51:37.277434Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:51:37.291649Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:51:37.291709Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:51:37.291772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:51:37.291793Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:51:37.291815Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:51:37.291834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cl ... 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:53:47.208988Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T17:53:47.209034Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T17:53:47.792414Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T17:53:47.792441Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=O 2-1d 2024-11-21T17:53:47.792446Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T17:53:48.997201Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T17:53:48.997281Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:53:49.049580Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:53:49.049641Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T17:53:49.049646Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:53:49.049839Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T17:53:49.072832Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T17:53:49.072992Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T17:53:49.073009Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T17:53:49.073138Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T17:53:49.084296Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T17:53:49.084348Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T17:53:49.084566Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8583:6447], server id = [2:8588:6452], tablet id = 72075186224037899, status = OK 2024-11-21T17:53:49.084598Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8583:6447], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T17:53:49.084625Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8584:6448], server id = [2:8589:6453], tablet id = 72075186224037900, status = OK 2024-11-21T17:53:49.084630Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8584:6448], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T17:53:49.084780Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8585:6449], server id = [2:8590:6454], tablet id = 72075186224037901, status = OK 2024-11-21T17:53:49.084786Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8585:6449], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T17:53:49.084864Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T17:53:49.084923Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8586:6450], server id = [2:8591:6455], tablet id = 72075186224037902, status = OK 2024-11-21T17:53:49.084934Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8586:6450], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T17:53:49.085027Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8587:6451], server id = [2:8592:6456], tablet id = 72075186224037903, status = OK 2024-11-21T17:53:49.085034Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8587:6451], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T17:53:49.085050Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T17:53:49.085152Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8583:6447], server id = [2:8588:6452], tablet id = 72075186224037899 2024-11-21T17:53:49.085155Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:53:49.085170Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T17:53:49.085205Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8593:6457], server id = [2:8594:6458], tablet id = 72075186224037904, status = OK 2024-11-21T17:53:49.085211Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8593:6457], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T17:53:49.085221Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T17:53:49.085291Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T17:53:49.085303Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8584:6448], server id = [2:8589:6453], tablet id = 72075186224037900 2024-11-21T17:53:49.085304Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:53:49.085343Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8585:6449], server id = [2:8590:6454], tablet id = 72075186224037901 2024-11-21T17:53:49.085345Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:53:49.085371Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8595:6459], server id = [2:8597:6461], tablet id = 72075186224037905, status = OK 2024-11-21T17:53:49.085378Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8595:6459], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T17:53:49.085389Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T17:53:49.085437Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8586:6450], server id = [2:8591:6455], tablet id = 72075186224037902 2024-11-21T17:53:49.085439Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:53:49.085457Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8587:6451], server id = [2:8592:6456], tablet id = 72075186224037903 2024-11-21T17:53:49.085459Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:53:49.085474Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8596:6460], server id = [2:8599:6463], tablet id = 72075186224037906, status = OK 2024-11-21T17:53:49.085478Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8596:6460], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T17:53:49.085491Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8598:6462], server id = [2:8601:6465], tablet id = 72075186224037907, status = OK 2024-11-21T17:53:49.085494Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8598:6462], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T17:53:49.085519Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8600:6464], server id = [2:8602:6466], tablet id = 72075186224037908, status = OK 2024-11-21T17:53:49.085523Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8600:6464], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T17:53:49.085553Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T17:53:49.085614Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8593:6457], server id = [2:8594:6458], tablet id = 72075186224037904 2024-11-21T17:53:49.085617Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:53:49.085641Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T17:53:49.085652Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T17:53:49.085673Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T17:53:49.085677Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T17:53:49.085696Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T17:53:49.085717Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T17:53:49.085776Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:53:49.085806Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8595:6459], server id = [2:8597:6461], tablet id = 72075186224037905 2024-11-21T17:53:49.085807Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:53:49.086339Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8598:6462], server id = [2:8601:6465], tablet id = 72075186224037907 2024-11-21T17:53:49.086345Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:53:49.086432Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T17:53:49.086493Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8600:6464], server id = [2:8602:6466], tablet id = 72075186224037908 2024-11-21T17:53:49.086495Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:53:49.086515Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8596:6460], server id = [2:8599:6463], tablet id = 72075186224037906 2024-11-21T17:53:49.086517Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:53:49.099415Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2I5YmQyN2MtODM0MDQ4ZC1hZWNhZWE0Yy1mZjBiYmU3ZA==, TxId: 2024-11-21T17:53:49.099435Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2I5YmQyN2MtODM0MDQ4ZC1hZWNhZWE0Yy1mZjBiYmU3ZA==, TxId: 2024-11-21T17:53:49.099617Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:53:49.110891Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:53:49.110911Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=O 2-1d, ActorId=[1:3395:3252] 2024-11-21T17:53:49.111227Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:8620:5532]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T17:53:49.111274Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:53:49.111278Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T17:53:49.111756Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:53:49.111771Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T17:53:49.111781Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 2 ] 2024-11-21T17:53:49.114306Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestLockTabletExecutionTimeout >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability >> test.py::test[table_range-range_tables_with_view--Plan] [GOOD] >> test.py::test[table_range-range_tables_with_view--Results] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionStealLock >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] Test command err: 2024-11-21T17:53:14.742729Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792146738996795:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:14.742909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001196/r3tmp/tmp8lkFHI/pdisk_1.dat 2024-11-21T17:53:14.805533Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14393, node 1 2024-11-21T17:53:14.825067Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:14.825081Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:14.825083Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:14.825125Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:53:14.842800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:14.842829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:6968 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:53:14.844460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:14.855355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:14.856387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:14.856402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:14.857064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:14.857130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:14.857142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:53:14.857697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:53:14.857730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:14.857739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:53:14.858145Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:14.859239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211594904, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:14.859256Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:53:14.859320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:53:14.859738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:14.859807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:14.859824Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:53:14.859841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:53:14.859853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:53:14.859870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:53:14.860330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:53:14.860348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:53:14.860352Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:14.860366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:53:15.042270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792151033965007:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:15.042294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:15.068561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.068687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:53:15.068833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:15.068840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:15.069562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:53:15.069622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:15.069675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:15.069700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:53:15.069760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:53:15.069910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:15.069927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:15.069931Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:53:15.069979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:15.069987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:15.069988Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:53:15.071337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:15.071361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T17:53:15.071804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:53:15.124520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:53:15.124531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:53:15.124558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T17:53:15.125042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:53:15.125743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211595170, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:15.125755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211595170 2024-11-21T17:53:15.125785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T17:53:15.126121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:15.126193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:15.126210Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:53:15.126418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:53:15.126436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:53:15.126439Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [ ... INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:30.948214Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:53:30.948245Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:30.948247Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:30.948248Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T17:53:30.948254Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:53:30.949166Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439792218215089109:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T17:53:31.017033Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:53:31.017077Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:53:31.024621Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 2024-11-21T17:53:32.731426Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7439792205330186197:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:32.731468Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2024-11-21T17:53:34.470828Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7439792232813241713:2124];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:34.470896Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001196/r3tmp/tmpgMnLiG/pdisk_1.dat 2024-11-21T17:53:34.491756Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20554, node 10 2024-11-21T17:53:34.512002Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:34.512013Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:34.512015Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:34.512059Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:34.570730Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:34.570770Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:34.572327Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:34.578208Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:34.578324Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:34.578334Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:34.578772Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:34.578830Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:34.578842Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:53:34.579188Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:34.579200Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:34.579312Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:34.579546Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:34.580352Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211614623, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:34.580365Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:34.580424Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:34.580859Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:34.580906Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:34.580921Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:34.580934Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:34.580946Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:34.580958Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:53:34.581087Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:34.581107Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:34.581116Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:34.581127Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 2024-11-21T17:53:39.470870Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439792232813241713:2124];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:39.470904Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 2024-11-21T17:53:49.488655Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:53:49.488669Z node 10 :IMPORT WARN: Table profiles were not loaded Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> THeavyPerfTest::TTestLoadEverything >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test >> TPersQueueTest::DisableDeduplication [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 >> TPersQueueTest::LOGBROKER_7820 [GOOD] >> TPersQueueTest::InflightLimit >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestServerlessComputeResourcesMode >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDrainWithMaxTabletsScheduled >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: (1,1): 1 on 2 (1,1): 1 on 1 RemoveNode 7 (1,1): 1 on 3 (1,3): 1 on 5 (1,2): 1 on 6 (1,1): 1 on 0 (1,2): 1 on 9 RemoveNode 0 (1,3): 1 on 9 RemoveNode 2 (1,3): 1 on 3 (1,3): 1 on 4 (1,1): -1 on 0 (1,2): 1 on 6 RemoveNode 1 (1,3): 1 on 6 (1,3): 1 on 2 (1,2): 1 on 8 (1,2): -1 on 6 (1,2): 1 on 6 (1,2): 1 on 8 (1,1): 1 on 1 (1,1): 1 on 3 AddNode 0 (1,1): 1 on 0 (1,3): 1 on 2 (1,1): 1 on 2 (1,1): 1 on 3 (1,2): 1 on 5 (1,3): 1 on 7 (1,2): 1 on 7 (1,3): 1 on 4 (1,1): 1 on 1 (1,3): 1 on 8 (1,2): -1 on 6 RemoveNode 6 (1,1): 1 on 3 (1,1): 1 on 4 (1,1): -1 on 3 AddNode 2 (1,2): 1 on 8 RemoveNode 8 (1,1): 1 on 3 (1,2): 1 on 7 (1,1): 1 on 0 (1,1): 1 on 3 RemoveNode 5 (1,3): 1 on 8 RemoveNode 9 (1,3): 1 on 5 (1,1): 1 on 0 AddNode 6 (1,2): -1 on 7 (1,1): 1 on 2 (1,2): 1 on 9 AddNode 1 (1,1): 1 on 3 RemoveNode 6 (1,3): 1 on 7 (1,1): 1 on 3 (1,3): 1 on 0 (1,3): -1 on 5 (1,3): 1 on 3 (1,3): -1 on 3 (1,1): -1 on 1 (1,1): 1 on 0 (1,2): 1 on 7 (1,1): 1 on 3 (1,2): 1 on 8 (1,3): 1 on 8 (1,3): 1 on 3 (1,3): 1 on 0 (1,2): -1 on 7 (1,1): -1 on 4 (1,2): 1 on 7 RemoveNode 0 (1,1): 1 on 4 (1,3): 1 on 2 (1,3): 1 on 2 AddNode 5 (1,2): 1 on 9 (1,3): 1 on 1 (1,1): 1 on 3 RemoveNode 2 (1,3): -1 on 0 (1,2): 1 on 9 (1,1): 1 on 4 (1,1): 1 on 2 RemoveNode 4 (1,3): 1 on 0 (1,1): 1 on 1 (1,2): 1 on 6 (1,2): 1 on 7 (1,1): 1 on 0 (1,1): 1 on 1 AddNode 8 (1,1): -1 on 3 (1,1): -1 on 3 (1,2): 1 on 8 (1,2): 1 on 6 AddNode 2 (1,1): 1 on 1 (1,2): 1 on 6 RemoveNode 5 (1,3): -1 on 5 (1,1): 1 on 3 (1,2): 1 on 9 (1,3): 1 on 6 (1,2): 1 on 8 (1,3): 1 on 2 AddNode 4 (1,2): 1 on 9 (1,1): 1 on 0 (1,2): 1 on 5 (1,1): 1 on 4 AddNode 9 (1,3): -1 on 4 RemoveNode 4 (1,3): 1 on 0 (1,1): -1 on 3 (1,1): 1 on 3 (1,3): 1 on 9 (1,2): -1 on 8 (1,2): 1 on 5 AddNode 7 (1,3): 1 on 1 (1,3): 1 on 3 (1,1): 1 on 0 (1,3): 1 on 0 RemoveNode 1 (1,1): 1 on 2 RemoveNode 9 (1,1): -1 on 1 (1,3): -1 on 8 (1,2): 1 on 9 (1,1): 1 on 4 AddNode 4 (1,1): 1 on 2 AddNode 1 (1,1): 1 on 4 (1,3): -1 on 8 (1,1): 1 on 3 RemoveNode 2 (1,1): 1 on 3 (1,1): 1 on 3 (1,3): -1 on 9 AddNode 2 (1,1): 1 on 3 (1,3): -1 on 8 (1,2): 1 on 6 (1,3): -1 on 1 (1,2): -1 on 8 RemoveNode 7 (1,2): 1 on 5 RemoveNode 4 (1,1): 1 on 4 (1,1): 1 on 0 (1,3): 1 on 8 (1,2): 1 on 8 (1,2): 1 on 7 RemoveNode 2 (1,1): 1 on 4 RemoveNode 3 (1,1): 1 on 0 (1,2): 1 on 5 AddNode 7 (1,1): 1 on 4 (1,1): 1 on 4 (1,1): -1 on 2 (1,3): 1 on 3 (1,1): 1 on 0 (1,3): 1 on 8 (1,2): 1 on 8 AddNode 9 (1,2): 1 on 6 AddNode 4 (1,1): 1 on 3 AddNode 0 (1,1): 1 on 0 (1,1): -1 on 4 (1,2): 1 on 7 (1,2): 1 on 6 (1,1): 1 on 4 (1,1): 1 on 1 (1,1): 1 on 1 (1,3): -1 on 0 AddNode 2 (1,3): -1 on 0 (1,1): 1 on 4 (1,1): 1 on 4 (1,3): 1 on 0 RemoveNode 0 (1,3): 1 on 3 (1,2): -1 on 8 (1,2): 1 on 6 (1,2): 1 on 8 (1,2): 1 on 7 (1,3): 1 on 6 (1,2): 1 on 7 (1,2): 1 on 6 (1,3): 1 on 2 (1,2): 1 on 5 AddNode 6 (1,2): 1 on 7 RemoveNode 9 (1,1): 1 on 1 (1,1): 1 on 1 (1,2): 1 on 7 RemoveNode 8 (1,3): -1 on 6 RemoveNode 7 (1,1): 1 on 2 (1,2): 1 on 9 (1,1): 1 on 1 RemoveNode 4 (1,2): 1 on 5 RemoveNode 1 (1,1): 1 on 1 AddNode 8 (1,3): -1 on 8 (1,3): 1 on 0 AddNode 0 (1,3): 1 on 3 (1,2): 1 on 6 (1,2): -1 on 9 RemoveNode 2 (1,3): 1 on 2 (1,3): 1 on 1 (1,3): 1 on 8 RemoveNode 6 (1,1): -1 on 0 (1,1): 1 on 0 (1,2): 1 on 7 AddNode 3 (1,3): 1 on 5 (1,1): 1 on 3 (1,2): 1 on 8 AddNode 7 (1,3): 1 on 5 AddNode 5 (1,1): -1 on 3 RemoveNode 7 (1,3): -1 on 8 AddNode 7 (1,1): -1 on 3 (1,3): 1 on 3 RemoveNode 7 (1,2): 1 on 7 (1,3): 1 on 7 (1,1): 1 on 1 RemoveNode 0 (1,1): 1 on 3 (1,2): -1 on 9 (1,1): -1 on 2 (1,2): 1 on 9 AddNode 7 (1,2): -1 on 8 AddNode 0 (1,1): 1 on 2 (1,3): 1 on 0 (1,2): 1 on 9 AddNode 2 (1,3): 1 on 0 RemoveNode 7 (1,3): 1 on 8 RemoveNode 2 (1,1): 1 on 4 (1,2): 1 on 8 (1,2): 1 on 7 (1,3): 1 on 0 (1,3): -1 on 1 AddNode 2 (1,3): 1 on 2 (1,3): -1 on 7 (1,3): 1 on 0 (1,1): 1 on 0 (1,3): 1 on 0 (1,2): 1 on 9 RemoveNode 5 (1,1): -1 on 3 (1,3): 1 on 7 (1,1): 1 on 1 (1,2): 1 on 7 AddNode 9 (1,2): 1 on 6 (1,1): 1 on 1 (1,3): 1 on 3 (1,1): 1 on 1 (1,1): -1 on 2 (1,2): -1 on 7 AddNode 4 (1,2): 1 on 8 (1,3): 1 on 5 (1,1): 1 on 0 (1,1): 1 on 4 (1,1): 1 on 1 (1,2): 1 on 7 (1,3): -1 on 2 (1,2): 1 on 9 (1,3): -1 on 5 (1,1): 1 on 0 (1,2): 1 on 8 (1,3): -1 on 0 (1,3): 1 on 7 (1,1): 1 on 0 (1,3): 1 on 1 (1,2): 1 on 6 (1,2): -1 on 7 (1,1): 1 on 2 (1,2): 1 on 6 (1,2): -1 on 9 RemoveNode 2 (1,3): 1 on 7 (1,3): 1 on 2 (1,2): 1 on 7 (1,2): 1 on 7 (1,2): 1 on 9 (1,2): 1 on 6 (1,3): 1 on 2 (1,2): 1 on 5 (1,2): 1 on 6 RemoveNode 3 (1,1): 1 on 4 (1,2): 1 on 9 (1,2): -1 on 8 (1,3): -1 on 6 (1,3): 1 on 0 (1,1): 1 on 0 (1,3): 1 on 3 AddNode 1 (1,3): 1 on 4 (1,1): 1 on 1 RemoveNode 0 (1,1): 1 on 0 RemoveNode 4 (1,2): 1 on 9 (1,3): 1 on 7 (1,1): 1 on 1 (1,2): -1 on 6 AddNode 5 (1,3): -1 on 0 (1,2): 1 on 9 (1,1): 1 on 2 (1,2): 1 on 9 AddNode 3 (1,3): -1 on 2 (1,3): 1 on 1 RemoveNode 8 (1,1): 1 on 0 (1,2): -1 on 5 AddNode 4 (1,3): -1 on 5 (1,3): 1 on 0 (1,3): -1 on 0 (1,3): 1 on 7 (1,1): 1 on 0 RemoveNode 9 (1,1): -1 on 4 (1,3): 1 on 0 (1,2): 1 on 9 (1,2): 1 on 7 (1,2): 1 on 8 (1,2): 1 on 5 RemoveNode 1 (1,3): 1 on 4 (1,3): 1 on 6 (1,1): 1 on 0 (1,1): 1 on 1 AddNode 2 (1,2): -1 on 6 AddNode 1 (1,3): 1 on 6 (1,1): 1 on 4 (1,3): -1 on 8 (1,3): 1 on 3 RemoveNode 2 (1,1): 1 on 1 (1,1): 1 on 0 (1,3): 1 on 0 AddNode 9 (1,1): -1 on 4 (1,3): 1 on 7 (1,2): -1 on 9 (1,3): 1 on 7 RemoveNode 4 (1,3): -1 on 9 AddNode 8 (1,1): 1 on 1 (1,1): 1 on 0 (1,1): 1 on 4 (1,2): 1 on 5 (1,2): 1 on 9 RemoveNode 8 (1,2): 1 on 9 (1,3): 1 on 8 (1,2): 1 on 5 (1,3): 1 on 1 AddNode 7 (1,3): 1 on 4 AddNode 4 (1,1): 1 on 3 RemoveNode 7 (1,1): 1 on 4 (1,2): 1 on 7 (1,3): 1 on 7 (1,1): 1 on 4 (1,2): 1 on 8 AddNode 6 (1,1): 1 on 2 RemoveNode 6 (1,2): 1 on 6 (1,3): 1 on 1 (1,1): -1 on 3 AddNode 0 (1,1): 1 on 0 (1,1): -1 on 2 (1,3): 1 on 9 (1,2): -1 on 8 (1,1): 1 on 3 RemoveNode 3 (1,3): -1 on 0 (1,2): 1 on 5 RemoveNode 1 (1,2): 1 on 9 AddNode 3 (1,1): -1 on 3 (1,2): 1 on 7 (1,2): 1 on 6 AddNode 8 (1,3): 1 on 6 AddNode 1 (1,3): -1 on 3 (1,1): 1 on 3 (1,3): 1 on 4 (1,1): 1 on 4 (1,2): 1 on 6 (1,1): 1 on 3 (1,3): -1 on 7 (1,1): 1 on 4 (1,2): 1 on 8 RemoveNode 4 (1,2): 1 on 7 (1,2): 1 on 5 (1,1): -1 on 0 (1,1): 1 on 4 (1,1): 1 on 0 (1,2): 1 on 7 (1,2): 1 on 5 (1,1): 1 on 0 RemoveNode 5 (1,2): 1 on 8 (1,2): 1 on 8 RemoveNode 8 (1,1): -1 on 0 (1,3): 1 on 1 (1,2): 1 on 6 RemoveNode 0 (1,3): -1 on 2 (1,1): 1 on 0 (1,2): 1 on 8 (1,3): 1 on 6 (1,2): 1 on 6 (1,3): 1 on 8 RemoveNode 1 (1,2): 1 on 8 (1,1): 1 on 2 (1,1): 1 on 4 AddNode 2 (1,2): 1 on 6 (1,1): -1 on 2 (1,3): 1 on 5 (1,1): 1 on 4 (1,1): 1 on 3 (1,2): 1 on 5 (1,2): 1 on 9 (1,3): 1 on 1 RemoveNode 2 (1,3): 1 on 9 (1,1): 1 on 1 AddNode 2 (1,2): 1 on 8 (1,2): 1 on 6 AddNode 8 (1,2): 1 on 8 (1,3): 1 on 8 AddNode 0 (1,3): 1 on 8 (1,1): 1 on 4 (1,1): -1 on 2 RemoveNode 9 (1,1): 1 on 1 (1,1): 1 on 3 (1,1): -1 on 3 (1,3): 1 on 4 (1,3): 1 on 5 AddNode 1 (1,2): 1 on 6 (1,2): -1 on 9 (1,1): 1 on 4 (1,3): 1 on 9 (1,3): 1 on 1 (1,3): 1 on 7 (1,2): -1 on 8 (1,2): 1 on 6 (1,1): 1 on 0 (1,2): -1 on 9 (1,1): 1 on 1 (1,2): 1 on 5 (1,1): 1 on 3 (1,1): 1 on 0 (1,1): -1 on 4 (1,2): -1 on 6 (1,1): 1 on 0 (1,1): 1 on 4 (1,2): 1 on 9 (1,3): 1 on 5 (1,3): 1 on 2 AddNode 5 (1,3): 1 on 8 (1,2): 1 on 9 (1,1): 1 on 0 RemoveNode 5 (1,2): -1 on 7 (1,2): 1 on 6 (1,2): 1 on 6 (1,2): -1 on 5 (1,1): 1 on 3 (1,3): 1 on 5 (1,3): 1 on 4 (1,3): 1 on 4 (1,3): -1 on 2 (1,2): -1 on 7 (1,1): 1 on 3 (1,3): -1 on 7 (1,2): 1 on 6 (1,1): 1 on 2 AddNode 6 (1,1): -1 on 0 (1,2): -1 on 5 (1,3): 1 on 6 (1,1): 1 on 1 AddNode 9 (1,1): 1 on 4 (1,1): 1 on 1 AddNode 7 (1,3): 1 on 3 (1,2): -1 on 7 (1,2): 1 on 9 (1,2): 1 on 5 AddNode 4 (1,3): 1 on 2 (1,2): 1 on 5 RemoveNode 8 (1,2): 1 on 7 (1,2): 1 on 9 RemoveNode 6 (1,2): 1 on 7 (1,3): 1 on 3 (1,3): 1 on 8 (1,1): 1 on 3 RemoveNode 4 (1,3): 1 on 4 (1,3): 1 on 5 (1,2): 1 on 6 (1,1): 1 on 2 (1,2): 1 on 8 AddNode 8 (1,3): 1 on 9 (1,1): 1 on 3 (1,2): 1 on 9 AddNode 5 (1,1): 1 on 3 RemoveNode 8 (1,2): 1 on 9 RemoveNode 7 (1,3): 1 on 0 (1,2): -1 on 9 RemoveNode 1 (1,1): -1 on 1 (1,1): 1 on 0 (1,2): 1 on 6 RemoveNode 2 (1,2): 1 on 7 (1,2): 1 on 8 (1,2): 1 on 9 (1,2): 1 on 7 (1,1): -1 on 4 (1,3): 1 on 1 (1,2): -1 on 5 (1,1): 1 on 3 (1,2): 1 on 9 (1,2): 1 on 5 AddNode 4 (1,2): 1 on 8 RemoveNode 3 (1,2): 1 on 9 AddNode 1 (1,3): -1 on 2 (1,2): -1 on 6 (1,2): 1 on 9 (1,3): -1 on 2 AddNode 2 (1,3): 1 on 0 RemoveNode 0 (1,1): -1 on 3 (1,2): 1 on 6 (1,2): 1 on 9 (1,2): 1 on 9 AddNode 6 (1,2): -1 on 7 RemoveNode 4 (1,2): 1 on 6 AddNode 4 (1,2): 1 on 6 (1,1): 1 on 4 AddNode 0 (1,3): 1 on 4 RemoveNode 9 (1,2): 1 on 8 (1,2): 1 on 7 (1,2): 1 on 6 AddNode 8 (1,1): 1 on 0 (1,1): 1 on 0 AddNode 7 (1,1): 1 on 3 (1,3): 1 on 5 (1,3): -1 on 7 (1,1): -1 on 4 RemoveNode 6 (1,3): 1 on 0 RemoveNode 7 (1,1): 1 on 4 (1,3): 1 on 3 (1,3): 1 on 2 (1,1): 1 on 4 AddNode 9 (1,2): 1 on 8 (1,1): 1 on 0 RemoveNode 0 (1,2): -1 on 8 (1,2): 1 on 6 AddNode 7 (1,2): 1 on 9 (1,2): 1 on 7 (1,2): 1 on 5 (1,2): 1 on 5 AddNode 3 (1,1): 1 on 3 RemoveNode 7 (1,1): 1 on 2 (1,3): 1 on 0 RemoveNode 4 (1,1): 1 on 4 (1,2): -1 on 8 (1,2): 1 on 7 RemoveNode 1 (1,2): 1 on 9 (1,2): 1 on 7 (1,2): 1 on 7 (1,3): -1 on 5 AddNode 6 (1,3): 1 on 8 RemoveNode 9 (1,2): 1 on 5 (1,3): 1 on 9 (1,1): 1 on 2 (1,1): 1 on 0 (1,2): 1 on 6 RemoveNode 8 (1,2): 1 on 9 (1,3): 1 on 6 (1,2): 1 on 8 (1,2): 1 on 7 (1,2): 1 on 9 (1,1): 1 on 4 (1,1): -1 on 2 RemoveNode 6 (1,2): 1 on 6 RemoveNode 2 (1,1): 1 on 2 (1,2): 1 on 6 (1,3): 1 on 3 (1,1): 1 on 1 (1,3): 1 on 8 AddNode 0 (1,2): 1 on 8 RemoveNode 5 (1,3): 1 on 6 AddNode 8 (1,1): -1 on 1 (1,1): 1 on 4 AddNode 2 (1,1): 1 on 2 RemoveNode 2 (1,1): -1 on 4 (1,1): 1 on 1 AddNode 9 (1,2): 1 on 6 (1,2): 1 on 5 RemoveNode 8 (1,3): 1 on 3 AddNode 7 (1,2): 1 on 8 (1,2): 1 on 6 (1,2): 1 on 5 RemoveNode 3 (1,3): 1 on 3 AddNode 4 (1,2): 1 on 5 (1,3): 1 on 1 (1,3): -1 on 6 (1,2): 1 on 5 RemoveNode 4 (1,3): -1 on 6 (1,3): 1 on 2 (1,1): -1 on 4 (1,3): 1 on 9 (1,1): -1 on 0 (1,2): 1 on 7 (1,1): 1 on 1 (1,1): -1 on 1 (1,1): 1 on 1 (1,3): 1 on 6 (1,2): 1 on 8 AddNode 2 (1,3): 1 on 0 (1,2): 1 on 8 RemoveNode 7 (1,1): 1 on 3 (1,1): 1 on 1 AddNode 1 (1,1): 1 on 2 (1,3): 1 on 5 (1,3): 1 on 1 (1,3): 1 on 7 AddNode 8 (1,1): 1 on 4 (1,3): -1 on 7 (1,2): 1 on 8 (1,1): 1 on 2 (1,2): 1 on 5 AddNode 7 (1,1): -1 on 3 (1,2): -1 on 7 (1,2): 1 on 5 AddNode 4 (1,2): -1 on 9 (1,2): -1 on 7 (1,1): -1 on 2 (1,2): 1 on 6 (1,1): 1 on 3 (1,2): 1 on 7 (1,2): 1 on 8 RemoveNode 1 (1,2): 1 on 7 RemoveNode 4 (1,2): 1 on 5 (1,1): -1 on 2 (1,1): 1 on 0 (1,3): -1 on 2 (1,2): 1 on 6 AddNode 6 (1,2): 1 on 5 (1,3): 1 on 6 (1,2): 1 on 5 AddNode 4 (1,3): 1 on 8 (1,2): 1 on 8 (1,3): 1 on 1 (1,3): -1 on 6 (1,2): 1 on 8 (1,3): -1 on 3 (1,2): 1 on 6 (1,1): 1 on 2 (1,3): -1 on 8 (1,2): 1 on 5 (1,3): 1 on 3 AddNode 3 (1,1): 1 on 0 RemoveNode 8 (1,2): 1 on 7 AddNode 8 (1,3): 1 on 3 (1,1): -1 on 0 RemoveNode 0 (1,2): 1 on 8 (1,2): 1 on 9 RemoveNode 3 (1,1): -1 on 2 RemoveNode 8 (1,1): 1 on 0 RemoveNode 7 (1,1): 1 on 4 (1,2): 1 on 8 (1,3): 1 on 9 (1,1): 1 on 2 (1,3): 1 on 3 AddNode 3 (1,2): 1 on 8 AddNode 1 (1,2): 1 on 7 RemoveNode 6 (1,2): 1 on 5 (1,2): -1 on 6 RemoveNode 9 (1,1): 1 on 0 (1,2): 1 on 7 AddNode 0 (1,1): 1 on 4 AddNode 7 (1,3): 1 on 3 (1,1): 1 on 4 (1,3): 1 on 5 (1,1): 1 on 0 (1,3): 1 on 6 (1,3): -1 on 2 RemoveNode 0 (1,3): 1 on 7 AddNode 0 (1,1): 1 on 1 (1,1): -1 on 2 AddNode 6 (1,1): 1 on 3 (1,2): 1 on 5 RemoveNode 2 (1,3): 1 on 7 (1,2): 1 on 7 (1,3): 1 on 8 (1,1): 1 on 1 (1,1): 1 on 0 (1,2): 1 on 7 AddNode 2 (1,2): 1 on 6 (1,3): 1 on 0 RemoveNode 4 (1,1): 1 on 0 (1,1): 1 on 3 (1,3): 1 on 6 (1,2): 1 on 8 (1,3): -1 on 8 (1,2): -1 on 7 (1,1): -1 on 1 (1,1): 1 on 0 (1,1): 1 on 1 RemoveNode 0 (1,1): 1 on 4 RemoveNode 7 (1,2): 1 on 5 RemoveNode 1 (1,1): 1 on 2 (1,2): -1 on 7 (1,3): -1 on 9 (1,1): -1 on 4 (1,1): 1 on 4 (1,3): 1 on 5 (1,1): 1 on 4 RemoveNode 3 (1,1): 1 on 4 (1,1): 1 on 4 (1,1): 1 on 4 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 3 (1,3): 1 on 0 (1,3): 1 on 1 (1,1): 1 on 2 (1,1): 1 on 1 (1,1): 1 on 0 AddNode 4 (1,2): 1 on 7 (1,2): 1 on 9 RemoveNode 4 (1,1): 1 on 3 (1,2): 1 on 5 (1,2): 1 on 6 AddNode 7 (1,3): 1 on 8 (1,1): 1 on 2 (1,3): 1 on 3 AddNode 8 (1,1 ... 4 (1,3): 1 on 3 (1,2): 1 on 8 AddNode 5 (1,2): 1 on 7 (1,2): -1 on 8 RemoveNode 9 (1,2): -1 on 6 (1,2): 1 on 6 (1,3): 1 on 9 RemoveNode 6 (1,1): 1 on 2 (1,3): -1 on 2 (1,1): -1 on 4 RemoveNode 5 (1,2): 1 on 8 (1,2): 1 on 5 AddNode 2 (1,1): 1 on 4 (1,3): 1 on 3 RemoveNode 2 (1,3): 1 on 6 (1,2): 1 on 6 (1,3): 1 on 2 RemoveNode 7 (1,1): 1 on 3 AddNode 2 (1,2): 1 on 6 (1,2): 1 on 9 (1,3): 1 on 0 (1,2): 1 on 7 RemoveNode 4 (1,2): 1 on 5 (1,2): 1 on 9 AddNode 4 (1,3): 1 on 2 (1,3): 1 on 5 (1,3): 1 on 3 (1,2): 1 on 7 (1,1): 1 on 4 (1,3): 1 on 6 (1,3): 1 on 4 (1,1): 1 on 1 (1,3): 1 on 5 (1,1): -1 on 0 RemoveNode 3 (1,1): 1 on 3 (1,1): 1 on 3 (1,3): -1 on 1 RemoveNode 4 (1,3): 1 on 1 (1,1): 1 on 3 (1,1): 1 on 0 AddNode 0 (1,3): 1 on 2 (1,2): 1 on 5 AddNode 1 (1,2): 1 on 7 (1,2): 1 on 9 AddNode 4 (1,1): 1 on 2 (1,3): 1 on 0 AddNode 9 (1,3): -1 on 6 AddNode 7 (1,2): 1 on 8 (1,1): 1 on 2 RemoveNode 4 (1,3): 1 on 6 (1,1): 1 on 2 (1,1): 1 on 3 (1,2): 1 on 8 (1,2): 1 on 6 (1,2): 1 on 9 RemoveNode 7 (1,3): 1 on 9 (1,3): 1 on 3 (1,3): 1 on 5 (1,3): 1 on 5 (1,2): 1 on 6 (1,3): 1 on 7 (1,3): -1 on 2 (1,2): -1 on 9 (1,1): -1 on 4 (1,2): 1 on 7 RemoveNode 9 (1,3): 1 on 0 RemoveNode 1 (1,1): 1 on 0 AddNode 7 (1,3): 1 on 8 (1,2): 1 on 6 (1,1): 1 on 1 RemoveNode 2 (1,3): -1 on 0 (1,2): -1 on 6 (1,3): 1 on 5 AddNode 3 (1,1): -1 on 3 AddNode 4 (1,3): 1 on 1 (1,1): 1 on 2 (1,2): 1 on 5 AddNode 9 (1,1): 1 on 4 (1,2): 1 on 6 RemoveNode 7 (1,3): -1 on 5 (1,1): 1 on 1 (1,3): 1 on 6 RemoveNode 9 (1,3): 1 on 9 RemoveNode 8 (1,1): 1 on 2 AddNode 6 (1,1): 1 on 2 (1,3): 1 on 7 (1,2): 1 on 7 AddNode 8 (1,2): 1 on 5 AddNode 5 (1,2): 1 on 7 (1,2): 1 on 6 (1,2): 1 on 5 (1,3): 1 on 5 (1,1): 1 on 4 (1,2): -1 on 5 RemoveNode 4 (1,2): 1 on 5 (1,3): 1 on 2 (1,1): 1 on 1 (1,3): 1 on 3 (1,2): -1 on 9 (1,2): -1 on 6 AddNode 4 (1,3): 1 on 9 RemoveNode 4 (1,3): -1 on 1 RemoveNode 0 (1,3): 1 on 8 (1,2): 1 on 7 AddNode 2 (1,3): 1 on 1 (1,2): 1 on 6 AddNode 7 (1,2): 1 on 9 AddNode 1 (1,2): 1 on 9 (1,2): 1 on 8 (1,1): 1 on 0 (1,3): 1 on 9 RemoveNode 6 (1,2): 1 on 8 AddNode 6 (1,3): -1 on 7 (1,2): 1 on 8 (1,3): -1 on 5 (1,2): 1 on 8 AddNode 0 (1,1): 1 on 2 (1,1): 1 on 1 (1,2): 1 on 5 RemoveNode 0 (1,3): -1 on 9 (1,3): 1 on 0 AddNode 0 (1,3): 1 on 8 RemoveNode 7 (1,2): -1 on 5 (1,1): 1 on 1 (1,1): -1 on 3 RemoveNode 2 (1,1): 1 on 0 (1,2): -1 on 7 (1,3): 1 on 2 (1,1): 1 on 2 (1,3): 1 on 1 (1,1): -1 on 1 (1,2): 1 on 6 (1,3): 1 on 4 (1,2): 1 on 9 (1,3): -1 on 4 RemoveNode 3 (1,2): 1 on 6 (1,3): 1 on 4 RemoveNode 5 (1,1): 1 on 0 (1,3): 1 on 3 RemoveNode 1 (1,3): -1 on 0 (1,1): 1 on 2 (1,2): 1 on 6 (1,3): 1 on 0 (1,2): -1 on 5 AddNode 3 (1,2): 1 on 9 (1,1): 1 on 1 AddNode 2 (1,2): 1 on 8 RemoveNode 0 (1,3): 1 on 8 RemoveNode 2 (1,3): 1 on 0 RemoveNode 6 (1,2): 1 on 6 (1,3): 1 on 5 (1,1): 1 on 1 AddNode 7 (1,1): 1 on 0 (1,2): 1 on 6 (1,1): 1 on 3 (1,2): 1 on 5 RemoveNode 8 (1,2): 1 on 8 (1,1): 1 on 3 (1,1): 1 on 0 AddNode 4 (1,2): 1 on 8 AddNode 8 (1,2): 1 on 6 RemoveNode 8 (1,3): 1 on 9 AddNode 9 (1,2): 1 on 9 RemoveNode 9 (1,2): 1 on 9 AddNode 8 (1,2): 1 on 8 (1,3): -1 on 5 AddNode 6 (1,3): 1 on 3 RemoveNode 7 (1,3): 1 on 8 (1,2): 1 on 7 AddNode 0 (1,2): -1 on 9 (1,3): 1 on 4 (1,2): 1 on 7 (1,3): -1 on 5 (1,1): -1 on 1 (1,1): 1 on 2 (1,3): 1 on 6 AddNode 5 (1,3): 1 on 7 RemoveNode 3 (1,2): 1 on 8 (1,2): 1 on 5 (1,1): 1 on 3 (1,3): 1 on 5 (1,1): 1 on 4 (1,3): 1 on 8 (1,3): 1 on 7 (1,2): -1 on 8 AddNode 3 (1,1): 1 on 0 RemoveNode 0 (1,2): 1 on 6 (1,1): 1 on 2 (1,3): 1 on 8 RemoveNode 4 (1,3): 1 on 2 (1,2): -1 on 6 (1,3): 1 on 3 AddNode 2 (1,3): 1 on 5 (1,1): 1 on 2 (1,3): 1 on 2 RemoveNode 3 (1,3): 1 on 3 (1,2): 1 on 6 RemoveNode 5 (1,2): 1 on 9 (1,3): -1 on 9 (1,2): 1 on 6 (1,2): -1 on 6 AddNode 0 (1,2): 1 on 5 AddNode 3 (1,3): -1 on 4 (1,3): 1 on 7 RemoveNode 2 (1,1): 1 on 1 (1,3): 1 on 5 RemoveNode 8 (1,1): 1 on 2 (1,2): 1 on 7 (1,2): 1 on 9 RemoveNode 0 (1,1): -1 on 2 RemoveNode 6 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 4 (1,1): 1 on 2 (1,1): 1 on 2 (1,2): 1 on 5 (1,1): 1 on 0 (1,2): 1 on 6 (1,3): -1 on 8 (1,3): 1 on 5 (1,3): 1 on 1 (1,1): 1 on 3 AddNode 4 (1,1): -1 on 3 (1,1): 1 on 2 (1,3): -1 on 5 RemoveNode 4 (1,3): 1 on 2 (1,1): 1 on 0 (1,3): -1 on 6 (1,1): 1 on 1 (1,2): 1 on 6 (1,2): -1 on 6 (1,2): 1 on 7 (1,3): -1 on 3 AddNode 7 (1,2): 1 on 7 RemoveNode 0 (1,3): 1 on 0 (1,1): 1 on 2 (1,2): 1 on 6 (1,3): 1 on 7 (1,1): 1 on 2 (1,2): 1 on 9 RemoveNode 7 (1,2): -1 on 6 AddNode 7 (1,2): 1 on 7 (1,2): 1 on 7 (1,2): -1 on 6 RemoveNode 3 (1,3): 1 on 9 (1,2): 1 on 5 (1,1): 1 on 3 (1,2): 1 on 5 AddNode 0 (1,1): 1 on 3 (1,1): 1 on 1 (1,2): 1 on 5 RemoveNode 7 (1,3): -1 on 7 AddNode 7 (1,1): 1 on 1 (1,2): 1 on 9 (1,2): -1 on 6 AddNode 5 (1,1): 1 on 1 (1,3): 1 on 2 (1,3): 1 on 2 (1,2): 1 on 9 AddNode 4 (1,3): 1 on 9 (1,3): -1 on 2 RemoveNode 4 (1,3): -1 on 4 RemoveNode 7 (1,1): -1 on 4 (1,3): 1 on 2 (1,3): -1 on 2 (1,1): 1 on 4 (1,1): 1 on 0 (1,2): 1 on 9 (1,3): 1 on 0 (1,2): -1 on 7 AddNode 9 (1,1): 1 on 4 (1,3): 1 on 7 (1,3): 1 on 8 (1,1): 1 on 3 AddNode 2 (1,3): 1 on 2 RemoveNode 0 (1,2): -1 on 7 (1,1): 1 on 2 (1,3): 1 on 7 (1,2): 1 on 6 (1,2): -1 on 6 AddNode 6 (1,1): 1 on 3 (1,1): 1 on 4 (1,2): -1 on 6 (1,3): 1 on 0 AddNode 7 (1,1): 1 on 0 (1,3): -1 on 8 RemoveNode 9 (1,2): 1 on 7 (1,2): 1 on 5 (1,1): 1 on 2 (1,1): 1 on 0 (1,3): 1 on 4 (1,1): 1 on 0 AddNode 0 (1,1): 1 on 3 RemoveNode 7 (1,3): 1 on 0 (1,2): 1 on 7 (1,2): 1 on 9 (1,2): 1 on 5 AddNode 7 (1,3): 1 on 8 (1,1): 1 on 1 RemoveNode 0 (1,2): 1 on 9 (1,2): -1 on 5 AddNode 1 (1,2): 1 on 5 (1,2): 1 on 6 (1,1): 1 on 4 (1,1): 1 on 3 (1,3): 1 on 0 (1,2): 1 on 9 (1,1): 1 on 1 (1,1): 1 on 1 AddNode 9 (1,3): 1 on 2 RemoveNode 7 (1,2): 1 on 5 RemoveNode 1 (1,1): -1 on 1 (1,1): -1 on 3 (1,3): 1 on 2 AddNode 0 (1,2): 1 on 7 (1,3): -1 on 0 (1,1): 1 on 3 AddNode 8 (1,2): 1 on 7 (1,3): 1 on 5 (1,2): 1 on 6 (1,3): 1 on 2 (1,3): 1 on 2 RemoveNode 0 (1,2): 1 on 5 AddNode 0 (1,1): -1 on 1 RemoveNode 2 (1,1): 1 on 2 (1,1): -1 on 2 (1,3): 1 on 8 (1,2): 1 on 9 (1,3): -1 on 6 (1,3): -1 on 8 (1,1): 1 on 2 RemoveNode 0 (1,1): -1 on 3 (1,2): 1 on 9 (1,1): 1 on 4 (1,1): 1 on 0 (1,1): 1 on 0 (1,2): -1 on 8 (1,2): 1 on 5 (1,1): 1 on 3 (1,2): 1 on 7 (1,2): 1 on 6 (1,2): 1 on 9 (1,1): 1 on 2 (1,2): 1 on 8 (1,3): 1 on 9 RemoveNode 5 (1,2): 1 on 7 (1,2): 1 on 5 (1,2): -1 on 9 (1,3): 1 on 5 (1,2): 1 on 8 (1,3): 1 on 8 RemoveNode 6 (1,2): -1 on 6 (1,3): 1 on 6 (1,3): 1 on 3 (1,2): 1 on 8 (1,1): 1 on 1 (1,3): 1 on 1 (1,1): 1 on 1 AddNode 6 (1,1): 1 on 4 AddNode 3 (1,2): 1 on 8 (1,1): 1 on 2 RemoveNode 9 (1,3): 1 on 1 AddNode 2 (1,1): 1 on 0 (1,3): 1 on 7 AddNode 9 (1,1): -1 on 2 AddNode 1 (1,1): -1 on 1 (1,2): 1 on 8 RemoveNode 2 (1,1): 1 on 3 (1,2): 1 on 7 (1,2): 1 on 7 (1,2): 1 on 9 AddNode 2 (1,2): 1 on 8 (1,2): 1 on 9 (1,3): 1 on 3 RemoveNode 2 (1,1): 1 on 4 AddNode 7 (1,1): 1 on 1 RemoveNode 9 (1,2): 1 on 9 (1,3): 1 on 7 AddNode 4 (1,2): 1 on 6 (1,3): -1 on 7 (1,2): -1 on 6 (1,3): 1 on 5 (1,2): -1 on 8 (1,1): 1 on 3 AddNode 2 (1,1): 1 on 1 (1,2): 1 on 8 (1,3): 1 on 2 (1,1): 1 on 4 (1,3): -1 on 8 (1,1): 1 on 3 (1,1): 1 on 4 RemoveNode 8 (1,1): 1 on 3 RemoveNode 4 (1,2): 1 on 8 (1,2): 1 on 9 (1,3): -1 on 2 (1,1): -1 on 0 (1,2): 1 on 5 AddNode 0 (1,1): 1 on 3 (1,1): 1 on 3 (1,1): 1 on 4 (1,1): -1 on 1 (1,1): 1 on 0 (1,1): -1 on 4 (1,2): 1 on 9 (1,3): 1 on 7 (1,3): 1 on 8 (1,1): 1 on 1 (1,3): -1 on 4 (1,1): 1 on 0 (1,1): 1 on 3 (1,1): 1 on 3 RemoveNode 1 (1,3): 1 on 3 (1,3): 1 on 0 (1,1): 1 on 3 RemoveNode 2 (1,3): 1 on 5 (1,1): -1 on 2 (1,2): 1 on 8 (1,1): 1 on 1 RemoveNode 7 (1,3): -1 on 2 (1,1): 1 on 2 (1,1): 1 on 0 (1,1): 1 on 3 (1,1): 1 on 3 (1,1): 1 on 0 AddNode 9 (1,3): -1 on 7 (1,1): 1 on 1 RemoveNode 0 (1,3): -1 on 9 AddNode 2 (1,1): 1 on 3 (1,1): -1 on 0 (1,1): 1 on 0 (1,3): -1 on 1 (1,2): 1 on 8 (1,2): -1 on 8 (1,2): 1 on 9 (1,1): -1 on 4 RemoveNode 2 (1,3): 1 on 2 (1,3): 1 on 3 (1,2): 1 on 8 (1,3): 1 on 5 (1,2): 1 on 9 AddNode 2 (1,2): -1 on 8 RemoveNode 9 (1,3): -1 on 3 (1,3): -1 on 1 RemoveNode 3 (1,1): 1 on 0 AddNode 5 (1,3): 1 on 4 RemoveNode 6 (1,2): 1 on 7 (1,1): 1 on 2 AddNode 1 (1,1): 1 on 1 RemoveNode 2 (1,3): -1 on 1 AddNode 2 (1,3): -1 on 7 (1,2): -1 on 5 (1,1): -1 on 1 (1,1): -1 on 1 (1,1): 1 on 1 AddNode 8 (1,1): 1 on 3 AddNode 3 (1,2): 1 on 9 (1,3): 1 on 5 (1,1): 1 on 2 (1,2): 1 on 6 (1,2): -1 on 7 AddNode 6 (1,3): 1 on 9 (1,1): 1 on 0 AddNode 0 (1,2): 1 on 9 AddNode 7 (1,2): 1 on 7 (1,3): 1 on 1 (1,3): 1 on 1 (1,2): -1 on 9 (1,1): -1 on 3 RemoveNode 7 (1,2): 1 on 8 (1,3): 1 on 0 RemoveNode 0 (1,3): 1 on 1 (1,1): -1 on 3 RemoveNode 1 (1,1): 1 on 4 (1,3): 1 on 0 (1,3): 1 on 5 (1,1): 1 on 0 (1,3): 1 on 8 (1,1): 1 on 2 AddNode 9 (1,3): 1 on 6 (1,3): 1 on 6 (1,1): -1 on 1 (1,1): 1 on 4 AddNode 7 (1,2): 1 on 9 (1,2): -1 on 8 (1,2): 1 on 8 (1,2): -1 on 5 AddNode 4 (1,1): 1 on 0 (1,3): 1 on 5 (1,3): 1 on 2 RemoveNode 4 (1,3): -1 on 9 (1,1): 1 on 4 (1,2): 1 on 9 RemoveNode 9 (1,2): 1 on 7 (1,1): 1 on 2 (1,1): 1 on 3 (1,2): 1 on 7 RemoveNode 6 (1,3): 1 on 2 (1,1): 1 on 2 (1,1): 1 on 0 (1,1): 1 on 0 (1,2): -1 on 5 (1,1): 1 on 2 (1,2): 1 on 9 (1,1): -1 on 3 (1,1): 1 on 1 RemoveNode 5 (1,1): 1 on 4 (1,2): 1 on 7 (1,2): 1 on 7 (1,3): 1 on 1 (1,2): 1 on 9 (1,1): 1 on 1 (1,3): 1 on 4 (1,3): 1 on 4 RemoveNode 2 (1,1): 1 on 0 (1,3): 1 on 0 AddNode 4 (1,1): 1 on 0 (1,1): 1 on 3 RemoveNode 4 (1,2): 1 on 8 (1,2): -1 on 9 (1,2): -1 on 7 AddNode 5 (1,1): 1 on 0 AddNode 0 (1,2): 1 on 7 (1,2): -1 on 5 (1,1): 1 on 2 (1,3): 1 on 8 (1,1): -1 on 2 RemoveNode 0 (1,2): 1 on 5 (1,2): -1 on 7 RemoveNode 5 (1,3): 1 on 5 AddNode 2 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 3 (1,1): 1 on 4 (1,2): 1 on 5 (1,3): 1 on 3 AddNode 9 (1,1): 1 on 0 (1,2): 1 on 6 RemoveNode 0 (1,1): 1 on 1 (1,3): 1 on 2 (1,1): -1 on 4 (1,3): 1 on 3 AddNode 5 (1,2): 1 on 9 (1,3): 1 on 2 (1,2): -1 on 5 (1,2): 1 on 6 AddNode 1 (1,1): -1 on 0 RemoveNode 1 (1,2): -1 on 7 AddNode 4 (1,1): 1 on 0 (1,2): 1 on 9 (1,1): 1 on 2 RemoveNode 3 (1,3): -1 on 6 RemoveNode 8 (1,2): 1 on 8 (1,3): 1 on 7 (1,3): 1 on 0 RemoveNode 5 (1,2): -1 on 7 (1,3): 1 on 3 AddNode 6 (1,1): 1 on 2 AddNode 5 (1,2): 1 on 6 AddNode 3 (1,3): 1 on 2 RemoveNode 4 (1,3): 1 on 3 (1,2): 1 on 9 (1,3): 1 on 4 AddNode 0 (1,1): -1 on 4 RemoveNode 0 (1,2): 1 on 6 RemoveNode 5 (1,1): 1 on 0 (1,1): -1 on 4 (1,3): 1 on 1 (1,1): 1 on 0 AddNode 8 (1,1): -1 on 2 (1,3): -1 on 0 (1,3): 1 on 6 (1,1): 1 on 2 (1,2): 1 on 7 AddNode 5 (1,2): 1 on 9 (1,1): 1 on 2 (1,3): 1 on 5 (1,3): 1 on 1 RemoveNode 9 (1,2): 1 on 6 AddNode 0 (1,1): 1 on 1 (1,3): 1 on 3 (1,2): 1 on 6 (1,1): -1 on 0 (1,2): 1 on 9 (1,3): 1 on 1 (1,2): 1 on 8 (1,1): 1 on 3 (1,1): 1 on 3 (1,3): 1 on 4 RemoveNode 5 (1,2): -1 on 6 (1,3): 1 on 4 (1,1): -1 on 1 (1,1): 1 on 3 AddNode 5 (1,1): 1 on 4 (1,3): 1 on 3 (1,1): -1 on 2 (1,3): -1 on 1 (1,1): 1 on 1 (1,2): 1 on 9 (1,2): 1 on 7 (1,1): 1 on 0 (1,3): 1 on 1 RemoveNode 0 (1,2): 1 on 5 (1,3): -1 on 8 (1,2): 1 on 6 (1,1): 1 on 4 (1,1): -1 on 3 RemoveNode 6 (1,3): 1 on 9 AddNode 9 (1,1): 1 on 0 RemoveNode 5 (1,3): 1 on 0 RemoveNode 3 (1,3): -1 on 4 (1,2): 1 on 8 (1,2): 1 on 7 (1,2): -1 on 7 (1,3): -1 on 6 Final state: 403 387 397 417 400 0 0 0 0 0 0 0 0 0 0 359 427 442 433 410 192 199 174 233 198 205 200 154 185 175 - - + - - - - + + + Took 2.724745 seconds avg = 4800 min = 4800 max = 4800 std-dev = 0 ch.0 avg = 1600 ch.0 min = 1499 ch.0 max = 1680 ch.0 std-dev = 33.52342465 ch.1 avg = 1600 ch.1 min = 1504 ch.1 max = 1686 ch.1 std-dev = 35.54096228 ch.2 avg = 1600 ch.2 min = 1518 ch.2 max = 1683 ch.2 std-dev = 34.59421917 avg = 1250 std-dev = 0 avg = 4800 min = 4800 max = 4800 std-dev = 0 ch.0 avg = 1600 ch.0 min = 1600 ch.0 max = 1600 ch.0 std-dev = 0 ch.1 avg = 1600 ch.1 min = 1600 ch.1 max = 1600 ch.1 std-dev = 0 ch.2 avg = 1600 ch.2 min = 1600 ch.2 max = 1600 ch.2 std-dev = 0 avg = 1250 std-dev = 0 avg = 4800 min = 4799 max = 4801 std-dev = 0.2449489743 ch.0 avg = 1600 ch.0 min = 1507 ch.0 max = 1671 ch.0 std-dev = 32.92962192 ch.1 avg = 1600 ch.1 min = 1534 ch.1 max = 1701 ch.1 std-dev = 34.8995702 ch.2 avg = 1600 ch.2 min = 1523 ch.2 max = 1698 ch.2 std-dev = 31.45155004 avg = 1250 std-dev = 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] Test command err: 2024-11-21T17:53:45.078366Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:53:45.079101Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:45.079170Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:53:45.079291Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:53:45.079485Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T17:53:45.079494Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:53:45.079620Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:25:2072] ControllerId# 72057594037932033 2024-11-21T17:53:45.079625Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:53:45.079659Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:53:45.079723Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:53:45.081544Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:53:45.081561Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:53:45.081839Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:33:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:45.081873Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:45.081898Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:45.081925Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:45.081957Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:45.081990Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:45.082021Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:45.082025Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:53:45.082034Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:25:2072] 2024-11-21T17:53:45.082038Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:25:2072] 2024-11-21T17:53:45.082043Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:53:45.082048Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:53:45.082151Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:53:45.084714Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:53:45.084731Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:45.084738Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:53:45.084990Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:53:45.085033Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:45.085069Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:53:45.085074Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:45.085079Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:53:45.085516Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:53:45.085581Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:53:45.085586Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:53:45.086034Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T17:53:45.086051Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:53:45.086054Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:53:45.086085Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2090] 2024-11-21T17:53:45.086097Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:45.086123Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:29:2063] 2024-11-21T17:53:45.086128Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:29:2063] 2024-11-21T17:53:45.086241Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:45.086292Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T17:53:45.086299Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T17:53:45.086303Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T17:53:45.086308Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:53:45.086338Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:45.086347Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T17:53:45.086371Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2090] 2024-11-21T17:53:45.086374Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2090] 2024-11-21T17:53:45.086379Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:53:45.086431Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:45.086461Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:45.086477Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:53:45.086889Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2090] 2024-11-21T17:53:45.087002Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:53:45.087020Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:53:45.087024Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2024-11-21T17:53:45.087738Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:25:2072] 2024-11-21T17:53:45.087747Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:25:2072] 2024-11-21T17:53:45.087761Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T17:53:45.087952Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:53:45.088020Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:45.088036Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2024-11-21T17:53:45.088040Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2024-11-21T17:53:45.088043Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:45.088084Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2024-11-21T17:53:45.088093Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2024-11-21T17:53:45.088096Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2024-11-21T17:53:45.088104Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2024-11-21T17:53:45.088107Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:53:45.088114Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:53:45.088123Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:53:45.088132Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:53:45.088135Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2024-11-21T17:53:45.088141Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:29:2063] 2024-11-21T17:53:45.088145Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:29:2063] 2024-11-21T17:53:45.088152Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2024-11-21T17:53:45.088179Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtaine ... 7] 2024-11-21T17:53:53.987378Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] immediate retry [20:678:2477] 2024-11-21T17:53:53.987381Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [20:678:2477] 2024-11-21T17:53:53.987397Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2024-11-21T17:53:53.987416Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:53.987433Z node 20 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:53.987457Z node 20 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T17:53:53.987464Z node 20 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T17:53:53.987469Z node 20 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T17:53:53.987477Z node 20 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [20:639:2448] CurrentLeaderTablet: [20:641:2449] CurrentGeneration: 3 CurrentStep: 0} 2024-11-21T17:53:53.987496Z node 20 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [20:639:2448] CurrentLeaderTablet: [20:641:2449] CurrentGeneration: 3 CurrentStep: 0} 2024-11-21T17:53:53.987509Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [20:639:2448] CurrentLeaderTablet: [20:641:2449] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T17:53:53.987514Z node 20 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2024-11-21T17:53:53.987520Z node 20 :TABLET_RESOLVER DEBUG: SelectForward node 20 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [20:639:2448] 2024-11-21T17:53:53.987531Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [20:678:2477] 2024-11-21T17:53:53.987536Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [20:678:2477] 2024-11-21T17:53:53.987547Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] connected with status OK role: Leader [21:682:2142] 2024-11-21T17:53:53.987551Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send queued [21:682:2142] 2024-11-21T17:53:53.987562Z node 21 :LOCAL DEBUG: TDomainLocal(dc-1): TDomainLocal::TEvClientConnected for dc-1 shard 72057594046678944 2024-11-21T17:53:53.987582Z node 21 :LOCAL DEBUG: TDomainLocal(dc-1): Send resolve request for /dc-1/tenant1 to schemeshard 72057594046678944 2024-11-21T17:53:53.987599Z node 20 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [20:678:2477] 2024-11-21T17:53:53.987615Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send [21:682:2142] 2024-11-21T17:53:53.987618Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] push event to server [21:682:2142] 2024-11-21T17:53:53.987630Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [21:682:2142] 2024-11-21T17:53:53.987641Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [20:678:2477] 2024-11-21T17:53:53.987645Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [20:678:2477] 2024-11-21T17:53:53.987680Z node 20 :PIPE_SERVER DEBUG: [72057594046678944] Push Sender# [21:681:2142] EventType# 271122945 2024-11-21T17:53:53.987704Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2024-11-21T17:53:53.987711Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:53:53.987758Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T17:53:53.987765Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:53:53.987905Z node 21 :LOCAL DEBUG: TDomainLocal(dc-1): HandleResolve from schemeshard 72057594046678944: Status: StatusSuccess Path: "/dc-1/tenant1" PathDescription { Self { Name: "tenant1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: false CreateTxId: 101 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 18446744073709551615 PathId: 18446744073709551615 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 2024-11-21T17:53:53.987924Z node 21 :LOCAL DEBUG: TDomainLocal(dc-1): Binding tenant /dc-1/tenant1 to hive 72057594037927937 (allocated resources: ) 2024-11-21T17:53:53.987999Z node 21 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:53:53.988005Z node 21 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:53:53.988018Z node 21 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[21:688:2143] 2024-11-21T17:53:53.988071Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [21:688:2143] 2024-11-21T17:53:53.988075Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [21:688:2143] 2024-11-21T17:53:53.988096Z node 21 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:53.988102Z node 21 :TABLET_RESOLVER DEBUG: SelectForward node 21 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [20:316:2259] 2024-11-21T17:53:53.988146Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [21:688:2143] 2024-11-21T17:53:53.988170Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 20 [21:688:2143] 2024-11-21T17:53:53.988211Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [21:688:2143] 2024-11-21T17:53:53.988216Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [21:688:2143] 2024-11-21T17:53:53.988313Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [21:688:2143] 2024-11-21T17:53:53.988398Z node 20 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([21:688:2143]) [20:695:2480] 2024-11-21T17:53:53.988456Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [21:688:2143] 2024-11-21T17:53:53.988465Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [21:688:2143] 2024-11-21T17:53:53.988469Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [21:688:2143] 2024-11-21T17:53:53.988480Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [21:688:2143] 2024-11-21T17:53:53.988494Z node 21 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594037927937 Status=OK ClientId=[21:688:2143] 2024-11-21T17:53:53.988554Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [21:686:2143] EventType# 268959744 2024-11-21T17:53:53.988592Z node 20 :HIVE DEBUG: HIVE#72057594037927937 Handle TEvLocal::TEvRegisterNode from [21:686:2143] HiveId: 72057594037927937 ServicedDomains { SchemeShard: 72057594046678944 PathId: 2 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } 2024-11-21T17:53:53.988605Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{42, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2024-11-21T17:53:53.988610Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{42, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:53:53.988617Z node 20 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxRegisterNode(21)::Execute 2024-11-21T17:53:53.988646Z node 20 :HIVE WARN: HIVE#72057594037927937 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:53.988658Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{42, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{24, redo 152b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2024-11-21T17:53:53.988664Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{42, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:53:53.988707Z node 20 :HIVE DEBUG: HIVE#72057594037927937 TEvInterconnect::TEvNodeInfo NodeId 21 Location DataCenter: "2" Module: "2" Rack: "2" Unit: "2" 2024-11-21T17:53:53.988763Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [20:697:2482] 2024-11-21T17:53:53.988767Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [20:697:2482] 2024-11-21T17:53:53.988778Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:53.988783Z node 20 :TABLET_RESOLVER DEBUG: SelectForward node 20 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [20:316:2259] 2024-11-21T17:53:53.988789Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [20:697:2482] 2024-11-21T17:53:53.988797Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [20:697:2482] 2024-11-21T17:53:53.988803Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [20:697:2482] 2024-11-21T17:53:53.988806Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [20:697:2482] 2024-11-21T17:53:53.988820Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [20:697:2482] 2024-11-21T17:53:53.988833Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [20:697:2482] 2024-11-21T17:53:53.988837Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [20:697:2482] 2024-11-21T17:53:53.988840Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [20:697:2482] 2024-11-21T17:53:53.988844Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [20:697:2482] 2024-11-21T17:53:53.988846Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [20:697:2482] 2024-11-21T17:53:53.988852Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [20:696:2481] EventType# 268697616 2024-11-21T17:53:53.988864Z node 20 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([20:697:2482]) [20:698:2483] >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> THiveTest::TestCreateTablet >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPartitionWriteQuota >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> PQCountersLabeled::Partition >> TTopicYqlTest::CreateAndAlterTopicYql [GOOD] >> TTopicYqlTest::BadRequests >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestOwnership >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> TPQTest::TestAlreadyWritten [GOOD] >> THiveTest::TestSkipBadNode [GOOD] >> TScaleRecommenderTest::BasicTest >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteOwnerTablets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestAlreadyWritten [GOOD] Test command err: 2024-11-21T17:53:29.659101Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:53:29.660167Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:53:29.660245Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T17:53:29.660260Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:53:29.660265Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T17:53:29.660270Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:53:29.660278Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:29.660283Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T17:53:29.660286Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:29.663265Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:29.663287Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2024-11-21T17:53:29.663300Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:53:29.664900Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:29.665700Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T17:53:29.665717Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:29.666003Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:29.666037Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:29.666050Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitConfigStep 2024-11-21T17:53:29.666131Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:53:29.666212Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2024-11-21T17:53:29.666381Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T17:53:29.666389Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:185:2198] 2024-11-21T17:53:29.666397Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:29.666507Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2024-11-21T17:53:29.666513Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2024-11-21T17:53:29.666546Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:29.666580Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:29.666672Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T17:53:29.666697Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2200] 2024-11-21T17:53:29.666784Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 1. Completed. 2024-11-21T17:53:29.666787Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:187:2200] 2024-11-21T17:53:29.666790Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:29.666872Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2024-11-21T17:53:29.666877Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2024-11-21T17:53:29.666891Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:29.666910Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:29.666984Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:53:29.667028Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:29.667662Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:29.667689Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:29.667750Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:29.667758Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2024-11-21T17:53:29.668092Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:29.668101Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:203:2211], now have 1 active actors on pipe 2024-11-21T17:53:29.668325Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67891 Config { TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "client-1" Generation: 1 Important: false } Consumers { Name: "client-2" Generation: 1 Important: false } } BootstrapConfig { } } 2024-11-21T17:53:29.668355Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2024-11-21T17:53:29.668360Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T17:53:29.668364Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2024-11-21T17:53:29.668402Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 231 MaxStep: 18446744073709551615 Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-2" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T17:53:29.668422Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T17:53:29.668470Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67892 Data { Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "client-2" Path: "/topic" } Operations { PartitionId: 2 Begin: 0 End: 0 Consumer: "client-1" Path: "/topic" } Immediate: false } 2024-11-21T17:53:29.668479Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY ... up to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [23:232:2234] sender: [23:332:2057] recipient: [23:14:2061] 2024-11-21T17:53:55.264602Z node 23 :PERSQUEUE INFO: new Cookie default|c5992b4c-2e42042d-51177e36-c0dbdac5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] 2024-11-21T17:53:55.405208Z node 24 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:55.405231Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [24:147:2057] recipient: [24:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [24:147:2057] recipient: [24:145:2168] Leader for TabletID 72057594037927938 is [24:151:2172] sender: [24:152:2057] recipient: [24:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:177:2057] recipient: [24:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:55.408146Z node 24 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:55.408311Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 24 actor [24:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 24 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 24 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 24 Important: false } 2024-11-21T17:53:55.408406Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [24:184:2197] 2024-11-21T17:53:55.408786Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [24:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:55.409047Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [24:185:2198] 2024-11-21T17:53:55.409351Z node 24 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [24:185:2198] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:55.410490Z node 24 :PERSQUEUE INFO: new Cookie default|a6bc9fe3-ce823e1f-8f698b5e-207cbec2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:55.411123Z node 24 :PERSQUEUE INFO: new Cookie default|78810980-3215f13c-d5d994e0-45d2cc2_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:55.411670Z node 24 :PERSQUEUE INFO: new Cookie default|9f0aee03-ccd49f1c-4b25bc37-f5c5a79d_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] 2024-11-21T17:53:55.643902Z node 25 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:55.643921Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [25:147:2057] recipient: [25:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [25:147:2057] recipient: [25:145:2168] Leader for TabletID 72057594037927938 is [25:151:2172] sender: [25:152:2057] recipient: [25:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:177:2057] recipient: [25:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:55.647005Z node 25 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:55.647166Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 25 actor [25:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 25 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 25 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 25 Important: false } 2024-11-21T17:53:55.647255Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [25:184:2197] 2024-11-21T17:53:55.647662Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [25:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:55.647924Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [25:185:2198] 2024-11-21T17:53:55.648173Z node 25 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [25:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:55.649723Z node 25 :PERSQUEUE INFO: new Cookie default|6f4478ae-8137fc8c-dbddc429-7bab03ef_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:55.650646Z node 25 :PERSQUEUE INFO: new Cookie default|661c9316-82280c11-527f04fc-3166a624_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:55.651238Z node 25 :PERSQUEUE INFO: new Cookie default|1d14dbb7-ff3cd252-7041b4e8-f529863c_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError [GOOD] |85.2%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError [GOOD] Test command err: 2024-11-21T17:51:56.876393Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791814691566086:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:56.876419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:56.878058Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791813356121424:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:56.878224Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:56.911391Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:56.909536Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0020da/r3tmp/tmpeq9J4y/pdisk_1.dat 2024-11-21T17:51:56.944311Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28582, node 1 2024-11-21T17:51:56.955884Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/0020da/r3tmp/yandex6LfI71.tmp 2024-11-21T17:51:56.955897Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/0020da/r3tmp/yandex6LfI71.tmp 2024-11-21T17:51:56.955955Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/0020da/r3tmp/yandex6LfI71.tmp 2024-11-21T17:51:56.955998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:56.959217Z INFO: TTestServer started on Port 4509 GrpcPort 28582 TClient is connected to server localhost:4509 PQClient connected to localhost:28582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:57.012899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.012931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:57.013075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.013086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:57.021068Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:57.021129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:57.021661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:57.024564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:57.044913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:51:57.174517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791818986534214:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.174541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.174775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791818986534226:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.174516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791817651089074:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.174538Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791817651089062:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.174545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.175405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2024-11-21T17:51:57.177044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791818986534270:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.177069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.180649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791818986534228:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2024-11-21T17:51:57.180691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791817651089081:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2024-11-21T17:51:57.199529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.255011Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791817651089122:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:57.255583Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDMyYzgwMDEtZGQyNDdhMDYtMjM0NzM4ZTQtNGM5OTQxYjE=, ActorId: [2:7439791817651089050:2280], ActorState: ExecuteState, TraceId: 01jd7xm9qn5h4ja3zmg560b7bg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:57.256088Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:57.258669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.258698Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791818986534451:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:57.258769Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NThmYTlkZmYtZTI2MzEzN2MtZDYzOWZkOWQtMjViYjE0N2Y=, ActorId: [1:7439791818986534211:2300], ActorState: ExecuteState, TraceId: 01jd7xm9qmdw88ert29jzp5ehk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:57.258908Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:57.281790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:51:57.319051Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jd7xm9vk0fe5anzanwe3f3da, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JhMmRiOGEtODJmOWRmOGUtODc0YzZlOTYtZGUzNDMyNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439791818986534745:3040] 2024-11-21T17:52:01.875743Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791814691566086:2190];send_to=[0:7307199536658146131:7762515 ... Result: Partition=0, SeqNo=(NULL) 2024-11-21T17:53:56.168777Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439792327808783426:2478] (SourceId=12345678, PreferedPartition=(NULL)) Start idle 2024-11-21T17:53:56.168784Z node 23 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:53:56.168947Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:56.168962Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [23:7439792327808783488:2478], now have 1 active actors on pipe 2024-11-21T17:53:56.168989Z node 23 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 24, Generation: 1 2024-11-21T17:53:56.169036Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T17:53:56.169048Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T17:53:56.169075Z node 24 :PERSQUEUE INFO: new Cookie 12345678|615c2aa2-efd7f64c-bdcad597-2868abb0_0 generated for partition 0 topic 'rt3.dc1--topic1' owner 12345678 2024-11-21T17:53:56.169111Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:53:56.169129Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:53:56.169305Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T17:53:56.169312Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T17:53:56.169326Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:53:56.169409Z node 23 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|615c2aa2-efd7f64c-bdcad597-2868abb0_0 2024-11-21T17:53:56.169774Z node 23 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|615c2aa2-efd7f64c-bdcad597-2868abb0_0 grpc read done: success: 0 data: 2024-11-21T17:53:56.169782Z node 23 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|615c2aa2-efd7f64c-bdcad597-2868abb0_0 grpc read failed 2024-11-21T17:53:56.169845Z node 23 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|615c2aa2-efd7f64c-bdcad597-2868abb0_0 2024-11-21T17:53:56.169854Z node 23 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|615c2aa2-efd7f64c-bdcad597-2868abb0_0 is DEAD Finish: 0 2024-11-21T17:53:56.169922Z :Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError INFO: Random seed for debugging is 1732211636169920 2024-11-21T17:53:56.169932Z node 23 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:53:56.170025Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:56.170040Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [23:7439792327808783488:2478] destroyed 2024-11-21T17:53:56.170044Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T17:53:56.171960Z :Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError INFO: Wait for "init_response" 2024-11-21T17:53:56.172045Z node 23 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:53:56.172055Z node 23 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2024-11-21T17:53:56.172166Z node 23 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "test-message-group-id" } 2024-11-21T17:53:56.172188Z node 23 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "topic1" message_group_id: "test-message-group-id" from ipv6:[::1]:34132 2024-11-21T17:53:56.172195Z node 23 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:34132 proto=v1 topic=topic1 durationSec=0 2024-11-21T17:53:56.172198Z node 23 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:53:56.172539Z node 23 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2024-11-21T17:53:56.172565Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T17:53:56.172571Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:53:56.172573Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:53:56.172577Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439792327808783491:2493] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:53:56.172983Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439792327808783491:2493] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:53:56.174422Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439792327808783491:2493] (SourceId=test-message-group-id, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T17:53:56.174468Z node 23 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [23:7439792327808783506:2493] connected; active server actors: 1 2024-11-21T17:53:56.174489Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439792327808783491:2493] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=test-message-group-id 2024-11-21T17:53:56.174493Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439792327808783491:2493] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Update the table 2024-11-21T17:53:56.174534Z node 23 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [23:7439792327808783506:2493] disconnected; active server actors: 1 2024-11-21T17:53:56.174541Z node 23 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [23:7439792327808783506:2493] disconnected no session 2024-11-21T17:53:56.176852Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439792327808783491:2493] (SourceId=test-message-group-id, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:53:56.176862Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439792327808783491:2493] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T17:53:56.176864Z node 23 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [23:7439792327808783491:2493] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Start idle 2024-11-21T17:53:56.176868Z node 23 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:53:56.177002Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:56.177014Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [23:7439792327808783516:2493], now have 1 active actors on pipe 2024-11-21T17:53:56.177043Z node 23 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 24, Generation: 1 2024-11-21T17:53:56.177068Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T17:53:56.177077Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T17:53:56.177101Z node 24 :PERSQUEUE INFO: new Cookie test-message-group-id|f040441-d11313b5-8cabec90-9ddbe451_0 generated for partition 0 topic 'rt3.dc1--topic1' owner test-message-group-id 2024-11-21T17:53:56.177137Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:53:56.177157Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:53:56.177498Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T17:53:56.177503Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T17:53:56.177515Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:53:56.177562Z node 23 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group-id|f040441-d11313b5-8cabec90-9ddbe451_0 Init response: status: SUCCESS init_response { session_id: "test-message-group-id|f040441-d11313b5-8cabec90-9ddbe451_0" topic: "topic1" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP } 2024-11-21T17:53:56.177807Z :Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError INFO: Session ID is "test-message-group-id|f040441-d11313b5-8cabec90-9ddbe451_0" 2024-11-21T17:53:56.177975Z :Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError INFO: Wait for session to die 2024-11-21T17:53:56.178372Z node 23 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-message-group-id|f040441-d11313b5-8cabec90-9ddbe451_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:53:56.178389Z node 23 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: bad write request - 'blocks_headers' at position 0 is invalid: given codec (id 3) is not configured for the topic. Configured codecs are raw (id 0), gzip (id 1), lzop (id 2) sessionId: test-message-group-id|f040441-d11313b5-8cabec90-9ddbe451_0 2024-11-21T17:53:56.178462Z node 23 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group-id|f040441-d11313b5-8cabec90-9ddbe451_0 is DEAD status: BAD_REQUEST issues { message: "bad write request - \'blocks_headers\' at position 0 is invalid: given codec (id 3) is not configured for the topic. Configured codecs are raw (id 0), gzip (id 1), lzop (id 2)" issue_code: 500003 severity: 1 } 2024-11-21T17:53:56.178543Z node 23 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:53:56.178631Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:56.178647Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [23:7439792327808783516:2493] destroyed 2024-11-21T17:53:56.178652Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test >> test.py::test[table_range-range_tables_with_view--Results] [GOOD] >> test.py::test[tpch-q15-default.txt-Analyze] >> TPQTabletTests::Huge_ProposeTransacton [GOOD] >> TOlapReboots::CreateDropStandaloneTable [GOOD] >> TOlapReboots::AlterTtlSettings >> test.py::test[tpch-q15-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q15-default.txt-Debug] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Huge_ProposeTransacton [GOOD] Test command err: 2024-11-21T17:53:23.305739Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:53:23.307543Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:53:23.307641Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T17:53:23.307659Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:53:23.307663Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T17:53:23.307669Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:53:23.307677Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:23.307684Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T17:53:23.307699Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2024-11-21T17:53:23.316155Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:23.316189Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:204:2210], now have 1 active actors on pipe 2024-11-21T17:53:23.316208Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:53:23.318641Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:23.321015Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T17:53:23.321042Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:23.321341Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:23.321391Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitConfigStep 2024-11-21T17:53:23.321485Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:53:23.321577Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:213:2217] 2024-11-21T17:53:23.321771Z node 1 :PERSQUEUE DEBUG: Initializing topic 'topic' partition 0. Completed. 2024-11-21T17:53:23.321778Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:213:2217] 2024-11-21T17:53:23.321787Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:23.321925Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2024-11-21T17:53:23.321931Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2024-11-21T17:53:23.321973Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:23.322012Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:23.322080Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:23.323034Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:23.323129Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:23.323137Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:220:2222], now have 1 active actors on pipe 2024-11-21T17:53:23.323467Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:23.323476Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:225:2226], now have 1 active actors on pipe 2024-11-21T17:53:23.323686Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2024-11-21T17:53:23.323695Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T17:53:23.323715Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2024-11-21T17:53:23.323720Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T17:53:23.323728Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2024-11-21T17:53:23.323756Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 135 MaxStep: 30135 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T17:53:23.323772Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:23.324566Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:23.324580Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2024-11-21T17:53:23.324585Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2024-11-21T17:53:23.324634Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67891 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2024-11-21T17:53:23.324640Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2024-11-21T17:53:23.324647Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2024-11-21T17:53:23.324651Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2024-11-21T17:53:23.324654Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2024-11-21T17:53:23.324673Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 137 MaxStep: 30137 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T17:53:23.324684Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:23.325346Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T17:53:23.325357Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PREPARING 2024-11-21T17:53:23.325361Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARED Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:23.326874Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67891 AckTo { RawX1: 175 RawX2: 4294969486 } } Step: 100 2024-11-21T17:53:23.326893Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PREPARED 2024-11-21T17:53:23.326899Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PLANNING 2024-11-21T17:53:23.326904Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67891 2024-11-21T17:53:23.326932Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PLANNED MinStep: 137 MaxStep: 30137 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2024-11-21T17:53:23.326943Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T17:53:23.327000Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 175 RawX2: 4294969486 } } Step: 200 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:23.327617Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKey ... aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2205 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:53:57.468854Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-1200 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:53:57.468859Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-66 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:53:57.468864Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-1377 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:57.476308Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:57.509052Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:57.629566Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets >> test.py::test[tpch-q15-default.txt-Debug] [GOOD] >> test.py::test[tpch-q15-default.txt-ForceBlocks] >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestCreateTabletBeforeLocal >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestChangeConfig >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletReboots >> TDelayedResponsesTests::Test [GOOD] >> TFetchRequestTests::CheckAccess [GOOD] >> PQCountersSimple::PartitionWriteQuota ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] Test command err: 2024-11-21T17:53:28.939307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792209579465535:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:28.939615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:53:28.941698Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792208882079041:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:28.941809Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:53:28.963476Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:53:28.965583Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000d41/r3tmp/tmpjRgDD3/pdisk_1.dat 2024-11-21T17:53:28.995108Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1990, node 1 2024-11-21T17:53:29.010008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000d41/r3tmp/yandexmmd966.tmp 2024-11-21T17:53:29.010022Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000d41/r3tmp/yandexmmd966.tmp 2024-11-21T17:53:29.010082Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000d41/r3tmp/yandexmmd966.tmp 2024-11-21T17:53:29.010126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:53:29.014774Z INFO: TTestServer started on Port 13459 GrpcPort 1990 TClient is connected to server localhost:13459 PQClient connected to localhost:1990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:53:29.039386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:29.039426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:29.040890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:29.071156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:29.071181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:29.073168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:29.075082Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:53:29.075240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:53:29.094871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:53:29.244507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792213874433812:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:29.244535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792213874433817:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:29.244543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:29.245099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792213874433848:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:29.245135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:29.245444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:53:29.250026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792213874433826:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T17:53:29.279203Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439792213177046709:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:53:29.279292Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmQ4YzM5NmItYTIxYzM0YjUtNTc1MDhmMDktNTRjYTZjMmY=, ActorId: [2:7439792213177046685:2280], ActorState: ExecuteState, TraceId: 01jd7xq3nj20gv8at0tv2921bp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:53:29.279771Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:53:29.279855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:53:29.308875Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439792213874434014:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:53:29.308959Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWRjZGU1YmMtNzUwOTdmZWUtMWY1Zjg2NjgtNmU4ZTEwMWQ=, ActorId: [1:7439792213874433809:2300], ActorState: ExecuteState, TraceId: 01jd7xq3mvc7gzgahnx3mpnqqp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:53:29.309107Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:53:29.339445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:53:29.354935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:53:29.382015Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xq3rr57f50vwfdqrvkkr9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI2Y2U3MmMtYWMyYzNiYTYtMjAxYjU2MjYtZGJjZDIxNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439792213874434331:3041] 2024-11-21T17:53:33.939572Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439792209579465535:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:33.939614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:53:33.942093Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439792208882079041:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:33.942121Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:53:34.492990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2024-11-21T17:53:34.583682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T17:53:34.639771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part pro ... [9:7439792313981886117:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:58.956404Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:53:58.956895Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439792316582584401:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:58.956924Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:53:59.346835Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:53:59.448864Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:53:59.492636Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:53:59.542630Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:53:59.587915Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T17:53:59.644818Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (3445495608623639121, "Root", "00415F536F757263655F36", 1732211639688, 1732211639688, 0, 13); 2024-11-21T17:53:59.698418Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715695. Ctx: { TraceId: 01jd7xr1ca1sg7hrxwvr8nv6th, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZDIxZDMzMzMtZWY3M2VjZTktMTdmZGEwMjgtMWIxNzI5ZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:53:59.701590Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T17:53:59.701605Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:53:59.701616Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:53:59.701623Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792339751692560:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2024-11-21T17:53:59.701650Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7439792339751692561:3727], Recipient [9:7439792339751691908:3317]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T17:53:59.701674Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7439792339751692560:3727], Recipient [9:7439792339751691908:3317]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_6" 2024-11-21T17:53:59.701687Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7439792339751691908:3317], Recipient [9:7439792339751692560:3727]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2024-11-21T17:53:59.701693Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439792339751692560:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_6 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2024-11-21T17:53:59.701711Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7439792339751692560:3727], Recipient [9:7439792339751691908:3317]: NActors::TEvents::TEvPoison 2024-11-21T17:53:59.701765Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7439792313981886111:2049], Recipient [9:7439792339751692560:3727]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2024-11-21T17:53:59.701782Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792339751692560:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:53:59.702138Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7439792313981886213:2137], Recipient [9:7439792339751692560:3727]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=NDcyN2U0NDQtMzk0ZDQzNTktZTE5MWMxYTctYTZhNDRmMWM=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2024-11-21T17:53:59.702149Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792339751692560:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:53:59.715249Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7439792313981886213:2137], Recipient [9:7439792339751692560:3727]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=NDcyN2U0NDQtMzk0ZDQzNTktZTE5MWMxYTctYTZhNDRmMWM=" PreparedQuery: "1afbcd56-9916fa63-1b4e5218-a630255b" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jd7xr1d1574p9e8wm049k921" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1732211639688 } items { uint64_value: 1732211639688 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 7 2024-11-21T17:53:59.715289Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439792339751692560:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2024-11-21T17:53:59.715296Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792339751692560:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) GetOldSeqNo 2024-11-21T17:53:59.715341Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7439792339751692587:3727], Recipient [9:7439792339751691907:3316]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T17:53:59.716327Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271187968, Sender [9:7439792339751692560:3727], Recipient [9:7439792339751691907:3316]: NKikimrClient.TPersQueueRequest PartitionRequest { Partition: 0 CmdGetMaxSeqNo { SourceId: "\000A_Source_6" } PipeClient { RawX1: 7439792339751692587 RawX2: 38654709391 } } 2024-11-21T17:53:59.716376Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439792339751692560:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) OnPartitionChosen 2024-11-21T17:53:59.716398Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7439792339751692560:3727], Recipient [9:7439792339751691907:3316]: NActors::TEvents::TEvPoison 2024-11-21T17:53:59.716408Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7439792339751692588:3727], Recipient [9:7439792339751691908:3317]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T17:53:59.716424Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7439792339751692560:3727], Recipient [9:7439792339751691908:3317]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2024-11-21T17:53:59.716435Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [9:7439792339751691908:3317], Recipient [9:7439792339751692560:3727]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2024-11-21T17:53:59.716446Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792339751692560:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) Update the table 2024-11-21T17:53:59.716490Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7439792339751692560:3727], Recipient [9:7439792339751691908:3317]: NActors::TEvents::TEvPoison Received TEvChooseResult: 1 2024-11-21T17:53:59.726843Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [9:7439792313981886213:2137], Recipient [9:7439792339751692560:3727]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=NDcyN2U0NDQtMzk0ZDQzNTktZTE5MWMxYTctYTZhNDRmMWM=" PreparedQuery: "9ad6e7bb-62efe80e-2abb051b-534a85da" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 5 2024-11-21T17:53:59.726858Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792339751692560:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:53:59.726867Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792339751692560:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=157 2024-11-21T17:53:59.726876Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792339751692560:3727] (SourceId=A_Source_6, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 3445495608623639121 AND Topic = "Root" AND ProducerId = "00415F536F757263655F36" 2024-11-21T17:53:59.739004Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715698. Ctx: { TraceId: 01jd7xr1dg9f4ywxad2tt5zk8j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YjE2MzhkOWItZGQ5NTgyYjMtOWNlMjM3NzgtYjVhNzNmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TDelayedResponsesTests::Test [GOOD] >> TPQTest::TestChangeConfig [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveBalancerWithFollowers >> test.py::test[tpch-q15-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q15-default.txt-Plan] >> TPersQueueTest::SetupReadSession [GOOD] >> TPersQueueTest::TestBigMessage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestChangeConfig [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T17:53:38.827028Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:53:38.827780Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:53:38.827830Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2024-11-21T17:53:38.827835Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:53:38.827838Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2024-11-21T17:53:38.827841Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:53:38.827847Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:38.827851Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] empty tx queue 2024-11-21T17:53:38.827854Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:178:2057] recipient: [1:14:2061] 2024-11-21T17:53:38.830606Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:38.830624Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:177:2192], now have 1 active actors on pipe 2024-11-21T17:53:38.830641Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:53:38.832517Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:38.833257Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2024-11-21T17:53:38.833280Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:38.833456Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2024-11-21T17:53:38.833471Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 0. Step TInitConfigStep 2024-11-21T17:53:38.833526Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:53:38.833599Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2024-11-21T17:53:38.833951Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 0. Completed. 2024-11-21T17:53:38.833956Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2024-11-21T17:53:38.833961Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:38.834323Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2024-11-21T17:53:38.834329Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2024-11-21T17:53:38.834332Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2024-11-21T17:53:38.834334Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2024-11-21T17:53:38.834363Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:53:38.834366Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:38.834396Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:38.834460Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:38.834874Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T17:53:38.834929Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:38.834933Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:192:2203], now have 1 active actors on pipe 2024-11-21T17:53:38.836403Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:38.836413Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:197:2207], now have 1 active actors on pipe 2024-11-21T17:53:38.836426Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2024-11-21T17:53:38.836431Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2024-11-21T17:53:38.836710Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2024-11-21T17:53:38.837057Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2024-11-21T17:53:38.837394Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2024-11-21T17:53:38.837729Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 3 messageNo: 0 size: 511957 2024-11-21T17:53:38.837764Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size: 49324 2024-11-21T17:53:38.837767Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size 49324 offset: 0 2024-11-21T17:53:38.837791Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2024-11-21T17:53:38.837808Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2024-11-21T17:53:38.837811Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2024-11-21T17:53:38.837845Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:38.837848Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:199:2209], now have 1 active actors on pipe 2024-11-21T17:53:38.837856Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2024-11-21T17:53:38.837859Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2024-11-21T17:53:38.837872Z node 1 :PERSQUEUE INFO: new Cookie default|55c0706-9e9ce644-de3909d0-5561ec5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2024-11-21T17:53:38.837890Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T17:53:38.837902Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:53:38.837936Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:38.837940Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:201:2211], now have 1 active actors on pipe 2024-11-21T17:53:38.837950Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2024-11-21T17:53:38.837953Z node 1 :PERSQUEUE DEBUG: [PQ: 720575 ... s { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 20 Important: false } Consumers { Name: "bbb" Generation: 21 Important: true } Consumers { Name: "ccc" Generation: 21 Important: true } 2024-11-21T17:54:00.221748Z node 25 :PERSQUEUE INFO: new Cookie default|ae907c08-2c566e03-5b3a3134-a4f5a694_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:00.222660Z node 25 :PERSQUEUE INFO: new Cookie default|c62076d2-f682a018-c9edfb48-9f4e3251_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:00.223589Z node 25 :PERSQUEUE INFO: new Cookie default|3fbb9f0-36edba7f-534a1b8f-613fb865_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] 2024-11-21T17:54:00.442889Z node 26 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:00.442907Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [26:147:2057] recipient: [26:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [26:147:2057] recipient: [26:145:2168] Leader for TabletID 72057594037927938 is [26:151:2172] sender: [26:152:2057] recipient: [26:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:177:2057] recipient: [26:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:00.446104Z node 26 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:00.446362Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 22 actor [26:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 22 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 22 ReadRuleGenerations: 22 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 22 Important: false } Consumers { Name: "aaa" Generation: 22 Important: true } 2024-11-21T17:54:00.446526Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [26:184:2197] 2024-11-21T17:54:00.446933Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [26:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:00.447319Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [26:185:2198] 2024-11-21T17:54:00.447587Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [26:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:00.447976Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [26:186:2199] 2024-11-21T17:54:00.448316Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [26:186:2199] 2024-11-21T17:54:00.448699Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [26:187:2200] 2024-11-21T17:54:00.448950Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [26:187:2200] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:00.449306Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [26:188:2201] 2024-11-21T17:54:00.449554Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [26:188:2201] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:00.451314Z node 26 :PERSQUEUE INFO: new Cookie default|44c78560-e0637fbd-232fac06-f7ad83fc_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:00.452569Z node 26 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:00.453608Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 6, State: StateInit] bootstrapping 6 [26:238:2238] 2024-11-21T17:54:00.453924Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 6 generation 2 [26:238:2238] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:00.454380Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 7, State: StateInit] bootstrapping 7 [26:239:2239] 2024-11-21T17:54:00.454658Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 7 generation 2 [26:239:2239] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:00.455137Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 8, State: StateInit] bootstrapping 8 [26:240:2240] 2024-11-21T17:54:00.455414Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 8 generation 2 [26:240:2240] 2024-11-21T17:54:00.455799Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 9, State: StateInit] bootstrapping 9 [26:241:2241] 2024-11-21T17:54:00.456044Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 9 generation 2 [26:241:2241] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:00.456654Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 5, State: StateInit] bootstrapping 5 [26:237:2237] 2024-11-21T17:54:00.457071Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 5 generation 2 [26:237:2237] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:00.462265Z node 26 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 23 actor [26:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 5 MaxSizeInPartition: 1048576 LifetimeSeconds: 86400 ImportantClientId: "bbb" ImportantClientId: "ccc" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 23 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } ReadRuleGenerations: 22 ReadRuleGenerations: 23 ReadRuleGenerations: 23 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 22 Important: false } Consumers { Name: "bbb" Generation: 23 Important: true } Consumers { Name: "ccc" Generation: 23 Important: true } 2024-11-21T17:54:00.462728Z node 26 :PERSQUEUE INFO: new Cookie default|b7db8b98-e92690a5-fcf1a668-41cb57b7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:00.463324Z node 26 :PERSQUEUE INFO: new Cookie default|5cfa33e1-2e3a4388-d05bae70-8b43b402_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:00.463895Z node 26 :PERSQUEUE INFO: new Cookie default|424ab93b-8895906d-3f06643a-c365ce39_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> test.py::test[tpch-q15-default.txt-Plan] [GOOD] >> test.py::test[tpch-q15-default.txt-Results] >> TTopicYqlTest::BadRequests [GOOD] >> ResourceBrokerConfigValidatorTests::TestMinConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestRepeatedQueueName [GOOD] >> ResourceBrokerConfigValidatorTests::TestNoDefaultQueue [GOOD] >> ResourceBrokerConfigValidatorTests::TestNoUnknownTask [GOOD] >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups >> TOlapReboots::CreateDropTable |85.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestNoUnknownTask [GOOD] >> TPQTest::TestOwnership [GOOD] >> TPQTest::TestPQCacheSizeManagement >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::BadRequests [GOOD] Test command err: 2024-11-21T17:51:57.011227Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791815676855371:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:57.011251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:57.020225Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791816875805088:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:57.040077Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002028/r3tmp/tmptBz7U2/pdisk_1.dat 2024-11-21T17:51:57.043625Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:57.044747Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:57.068022Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28287, node 1 2024-11-21T17:51:57.085183Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/002028/r3tmp/yandexw3HMzl.tmp 2024-11-21T17:51:57.085196Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/002028/r3tmp/yandexw3HMzl.tmp 2024-11-21T17:51:57.085251Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/002028/r3tmp/yandexw3HMzl.tmp 2024-11-21T17:51:57.085298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:57.090286Z INFO: TTestServer started on Port 8684 GrpcPort 28287 TClient is connected to server localhost:8684 PQClient connected to localhost:28287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:57.111528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.111549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:57.113172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:57.144670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.144700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:57.145895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.145993Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:57.146204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:51:57.165394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:51:57.341851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791816875805277:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.341873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791816875805296:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.341905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.347152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T17:51:57.350545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791815676856374:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.350661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791815676856361:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.350679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.371012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791815676856377:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T17:51:57.371045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791816875805306:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T17:51:57.401832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.419787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.484303Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791815676856674:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:57.484696Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjE4YjM2ZjgtNmJkMDRlMWUtZTliMjhiYWItYTBmOThhMGE=, ActorId: [1:7439791815676856344:2300], ActorState: ExecuteState, TraceId: 01jd7xm9x53faxgvs8rd1j46b4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:57.485155Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:57.497360Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791816875805378:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:57.497785Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTk3ZjBlZC1mNWJlMTA4Zi04MmQ0NDgyLTFkNWFmMmEz, ActorId: [2:7439791816875805275:2280], ActorState: ExecuteState, TraceId: 01jd7xm9wxbppnh1w09ffvygfs, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:57.497940Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:57.522166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:51:57.556744Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jd7xma31dqv68md9dyb51kf8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQzOWQ1NmYtYjE3ZWZjYzItN2JiN2Q5NzctYmJhNzkyNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439791815676856895:3055] 2024-11-21T17:52:02.011735Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791815676855371:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:02.011769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:52:02.015868Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439791816875805088:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:02.015907Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initializa ... t proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:53:56.169004Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:53:56.195700Z node 25 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xqxykdz1va1x0tah4x7xk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=Y2EzM2IxYy02NjdiMTMyMC1jOTNmMDk1OC1jMjg4OTUyZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [25:7439792327011220961:3023] 2024-11-21T17:54:00.799842Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7439792322716252169:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:00.799873Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:54:00.800845Z node 26 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[26:7439792326002310309:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:00.800875Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:54:01.227403Z node 25 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [25:7439792348486058018:3358] connected; active server actors: 1 2024-11-21T17:54:01.227549Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T17:54:01.227794Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T17:54:01.228102Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] BALANCER INIT DONE for rt3.dc1--legacy--topic1: (0, 72075186224037892) 2024-11-21T17:54:01.228165Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] TEvClientConnected TabletId 72057594046644480, NodeId 25, Generation 2 2024-11-21T17:54:01.228292Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T17:54:01.228373Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T17:54:01.229106Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T17:54:01.229173Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T17:54:01.229185Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T17:54:01.229188Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T17:54:01.229194Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T17:54:01.229212Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:01.229222Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T17:54:01.229224Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T17:54:01.229335Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:54:01.229359Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [25:7439792348486058042:2440], now have 1 active actors on pipe 2024-11-21T17:54:01.229396Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--legacy--topic1] TEvClientConnected TabletId 72075186224037892, NodeId 26, Generation 1 2024-11-21T17:54:01.276678Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:54:01.276698Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [25:7439792348486058016:3356], now have 1 active actors on pipe 2024-11-21T17:54:01.276709Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:54:01.277003Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [25:7439792322716252530:2194] txId 281474976715677 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T17:54:01.278552Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T17:54:01.278596Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:01.278727Z node 26 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 0. Step TInitConfigStep 2024-11-21T17:54:01.278750Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892] Config applied version 0 actor [25:7439792322716252530:2194] txId 281474976715677 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T17:54:01.278823Z node 26 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:54:01.278909Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [26:7439792351772114920:2377] 2024-11-21T17:54:01.279459Z node 26 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--legacy--topic1' partition 0. Completed. 2024-11-21T17:54:01.279476Z node 26 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--legacy--topic1' partition 0 generation 1 [26:7439792351772114920:2377] 2024-11-21T17:54:01.279488Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--legacy--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:54:01.279906Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--legacy--topic1' partition 0 user c1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:54:01.279915Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--legacy--topic1' partition 0 user c2 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:54:01.279976Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 >> test.py::test[tpch-q15-default.txt-Results] [GOOD] >> test.py::test[tpch-q19-default.txt-Analyze] >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching >> KqpDataIntegrityTrails::Upsert-LogEnabled >> TScaleRecommenderTest::BasicTest [GOOD] >> TStorageBalanceTest::TestScenario1 >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 >> TSchemeShardSubDomainTest::SchemeQuotas >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTable >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots >> TPQTest::TestPQCacheSizeManagement [GOOD] >> TPQTest::TestOffsetEstimation [GOOD] >> test.py::test[tpch-q19-default.txt-Analyze] [GOOD] >> test.py::test[tpch-q19-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2024-11-21T17:53:24.552220Z :HappyWay INFO: Random seed for debugging is 1732211604552213 2024-11-21T17:53:24.674551Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792190152085493:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:24.675116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:53:24.678291Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792189137096688:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:24.708217Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000d80/r3tmp/tmpnmDM9S/pdisk_1.dat 2024-11-21T17:53:24.713164Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:53:24.714434Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:53:24.736066Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63740, node 1 2024-11-21T17:53:24.759850Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000d80/r3tmp/yandexnv44oc.tmp 2024-11-21T17:53:24.759864Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000d80/r3tmp/yandexnv44oc.tmp 2024-11-21T17:53:24.759931Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000d80/r3tmp/yandexnv44oc.tmp 2024-11-21T17:53:24.759986Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:53:24.769400Z INFO: TTestServer started on Port 18243 GrpcPort 63740 TClient is connected to server localhost:18243 PQClient connected to localhost:63740 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:53:24.807985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:24.808013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:24.809794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:24.809912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:24.809922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:24.810727Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:53:24.810958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:24.818121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T17:53:24.997437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792190152086267:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:24.997460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792190152086241:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:24.997526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:24.998224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T17:53:24.998364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792190152086297:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:24.998396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:25.002402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792190152086270:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T17:53:25.032196Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439792193432064140:2283], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:53:25.032326Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDRjYWQ2NS0xN2ZhMDA1NC01Y2NlODM5ZS0yNWVlYjMyZQ==, ActorId: [2:7439792193432064099:2277], ActorState: ExecuteState, TraceId: 01jd7xpzga5gfs64y36va7v8tt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:53:25.032818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:53:25.032881Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:53:25.069502Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439792194447053726:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:53:25.069613Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2Y3NDRlMmEtYTc5NjlhNDAtOTMyNjUzZTUtNGE3ODgzNWM=, ActorId: [1:7439792190152086238:2299], ActorState: ExecuteState, TraceId: 01jd7xpzg517f4v50vwfnhzvyb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:53:25.069802Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:53:25.111436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:53:25.141715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:63740", true, true, 1000); 2024-11-21T17:53:25.178999Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xpzn52vj2zb7ypatj0ra7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjUxMzgxZjEtZjAxYjA1NmMtNDlhYWRjNjItYWZjYjA5Nzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439792194447054024:2925] 2024-11-21T17:53:29.674400Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439792190152085493:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:29.674432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:53:29.677999Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439792189137096688:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:29.678035Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:53:30.265693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:63740 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T17:53:30.284744Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:63740 MetaRequest { CmdCreateTopic { ... keup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured kesus quota request event from [7:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:01.265717Z node 7 :PERSQUEUE INFO: new Cookie default|cb904f2c-2ec64585-2539733a-fb46fa5c_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured kesus quota request event from [7:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:01.500817Z node 7 :PERSQUEUE INFO: new Cookie default|a4c819a3-56668618-6f3cc5ae-9e23a076_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured kesus quota request event from [7:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR **** Total histogram: ****
Interval=0ms: 1
Interval=10000ms: 0
Interval=1000ms: 3
Interval=100ms: 0
Interval=10ms: 0
Interval=1ms: 0
Interval=20ms: 0
Interval=2500ms: 2
Interval=5000ms: 0
Interval=500ms: 0
Interval=50ms: 0
Interval=5ms: 0
Interval=999999ms: 0
**** **** **** **** 2024-11-21T17:54:01.908528Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:01.908551Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:01.912120Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:01.912322Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [8:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:01.912468Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [8:185:2198] 2024-11-21T17:54:01.913232Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [8:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:01.914423Z node 8 :PERSQUEUE INFO: new Cookie default|48d245b5-2d00ee2a-6a606c31-c5493ffb_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured kesus quota request event from [8:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:01.915491Z node 8 :PERSQUEUE INFO: new Cookie default|7d1f73e3-d5d306e6-13968bb-96b0940d_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [8:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:02.231879Z node 8 :PERSQUEUE INFO: new Cookie default|be32affe-1ab82bc7-94977cdb-44fbac65_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured kesus quota request event from [8:203:2213] Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:02.476662Z node 8 :PERSQUEUE INFO: new Cookie default|b81bd29-156c32cc-4707432-d573aeb_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured kesus quota request event from [8:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:02.722981Z node 8 :PERSQUEUE INFO: new Cookie default|58e57257-d8bc20e0-4b6112d7-d0df54ea_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [8:203:2213] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:02.948325Z node 8 :PERSQUEUE INFO: new Cookie default|81745029-739b185b-159a54ca-1581b7ce_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [8:203:2213] Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestOffsetEstimation [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T17:53:24.437097Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.437123Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.441457Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.444086Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2024-11-21T17:53:24.444336Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T17:53:24.444914Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T17:53:24.445658Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T17:53:24.446129Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.451184Z node 1 :PERSQUEUE INFO: new Cookie default|2416fecc-3dbdc708-b1eb0b32-a3a15d1f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [1:175:2190] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T17:53:24.707706Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.707733Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:179:2057] recipient: [2:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:182:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:183:2057] recipient: [2:181:2193] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:185:2057] recipient: [2:181:2193] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.717712Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.717733Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:184:2194] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:261:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:26.268741Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:26.268988Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "aaa" Generation: 2 Important: true } 2024-11-21T17:53:26.269199Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2260] 2024-11-21T17:53:26.269870Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:268:2260] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:26.270588Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:269:2261] 2024-11-21T17:53:26.271092Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:269:2261] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup ... ration 8 [65:803:2661] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:03.324964Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2024-11-21T17:54:03.329722Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [65:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:742:2608] sender: [65:838:2057] recipient: [65:14:2061] 2024-11-21T17:54:03.334692Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:742:2608] sender: [65:845:2057] recipient: [65:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:742:2608] sender: [65:848:2057] recipient: [65:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:742:2608] sender: [65:849:2057] recipient: [65:847:2688] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:850:2689] sender: [65:851:2057] recipient: [65:847:2688] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:03.341589Z node 65 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:03.341608Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:54:03.341755Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [65:913:2744] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:03.342460Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [65:914:2745] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:03.344004Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 9 [65:914:2745] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:03.347462Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 9 [65:913:2744] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:03.352422Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2024-11-21T17:54:03.357201Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [65:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:850:2689] sender: [65:948:2057] recipient: [65:14:2061] 2024-11-21T17:54:03.362203Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:850:2689] sender: [65:955:2057] recipient: [65:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:850:2689] sender: [65:958:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [65:850:2689] sender: [65:959:2057] recipient: [65:957:2771] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:960:2772] sender: [65:961:2057] recipient: [65:957:2771] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:03.369119Z node 65 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:03.369134Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:54:03.369223Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [65:1025:2829] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:03.369782Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [65:1026:2830] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:03.371033Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 10 [65:1026:2830] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:03.373853Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 10 [65:1025:2829] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:03.378053Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2024-11-21T17:54:03.382143Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [65:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:960:2772] sender: [65:1060:2057] recipient: [65:14:2061] 2024-11-21T17:54:03.386300Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:960:2772] sender: [65:1067:2057] recipient: [65:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:960:2772] sender: [65:1070:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [65:960:2772] sender: [65:1071:2057] recipient: [65:1069:2856] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1072:2857] sender: [65:1073:2057] recipient: [65:1069:2856] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:03.392983Z node 65 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:03.392999Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:54:03.393113Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [65:1139:2916] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:03.393655Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [65:1140:2917] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:03.395016Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 11 [65:1140:2917] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:03.397670Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 11 [65:1139:2916] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:03.401750Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2024-11-21T17:54:03.405772Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [65:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1072:2857] sender: [65:1176:2057] recipient: [65:14:2061] 2024-11-21T17:54:03.410308Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1072:2857] sender: [65:1183:2057] recipient: [65:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1072:2857] sender: [65:1186:2057] recipient: [65:1185:2945] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1072:2857] sender: [65:1187:2057] recipient: [65:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [65:1188:2946] sender: [65:1189:2057] recipient: [65:1185:2945] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:03.417550Z node 65 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:03.417565Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:54:03.417682Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [65:1257:3007] 2024-11-21T17:54:03.418175Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [65:1258:3008] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:03.419686Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 12 [65:1258:3008] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:03.422611Z node 65 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 12 [65:1257:3007] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:03.427204Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 2024-11-21T17:54:03.431433Z node 65 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8364507 |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/kqprun |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |85.2%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestDownAfterDrain >> YdbYqlClient::TestReadTableMultiShardWholeTable [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> LabeledDbCounters::OneTabletRemoveCounters [GOOD] >> LabeledDbCounters::OneTabletRestart >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled [GOOD] >> test.py::test[tpch-q19-default.txt-Debug] [GOOD] >> test.py::test[tpch-q19-default.txt-ForceBlocks] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test >> TSchemeShardSubDomainTest::DiskSpaceUsage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:04.022522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:04.022563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:04.022570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:04.022584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:04.028150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:04.028165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:04.028183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:04.046374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:04.135124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:04.135142Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:04.146759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:04.147358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:04.147379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:04.148875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:04.149033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:04.173852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.191476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:04.203726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.268146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:04.268190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.268278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:04.268296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:04.268304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:04.268330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.276613Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:04.328847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:04.340147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.340259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:04.340326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:04.340333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:04.341237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:04.341248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:04.341251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:04.341602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341609Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:04.341921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341926Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.356278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.356861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:04.357480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:04.366973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:04.367257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.367312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:04.367323Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.367391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:04.367401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.367421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:04.367433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:04.368072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:04.368081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:04.368107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.368111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:04.368171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.368177Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:04.368186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:04.368190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.368195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:04.368199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.368203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:04.368206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:04.368217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:04.368222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:04.368246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:04.369414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:04.369429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:04.369432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:04.369435Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:04.369438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:04.369452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:54:04.369981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:54:04.370045Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2024-11-21T17:54:04.370705Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:54:04.371534Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:54:04.371946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:04.371977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.372033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2024-11-21T17:54:04.372652Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:54:04.373149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:04.373173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2024-11-21T17:54:04.373249Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2024-11-21T17:54:04.373651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:04.373668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.373674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2024-11-21T17:54:04.373986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:04.373998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2024-11-21T17:54:04.374029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:54:04.374040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2024-11-21T17:54:04.374052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:04.374054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:54:04.374092Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:54:04.374111Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:04.374116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:04.374119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:285:2277] 2024-11-21T17:54:04.374129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:04.374131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:285:2277] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:04.374169Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:04.374185Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 19us result status StatusPathDoesNotExist 2024-11-21T17:54:04.374211Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:04.374361Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:04.374373Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 14us result status StatusPathDoesNotExist 2024-11-21T17:54:04.374383Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:04.374425Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:04.374436Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 11us result status StatusSuccess 2024-11-21T17:54:04.374514Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:04.022496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:04.022539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:04.022545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:04.022550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:04.028165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:04.028216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:04.028262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:04.046371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:04.135123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:04.135142Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:04.146754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:04.147270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:04.147295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:04.148553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:04.148767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:04.173860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.191501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:04.203673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.268146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:04.268190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.268282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:04.268299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:04.268307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:04.268330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.276511Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:04.328857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:04.340146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.340269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:04.340344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:04.340354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:04.341199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:04.341210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:04.341213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:04.341588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341593Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:04.341855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341860Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.356278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.356791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:04.357480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:04.366962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:04.367255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.367296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:04.367305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.367383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:04.367390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.367414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:04.367425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:04.368029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:04.368036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:04.368074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.368077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:04.368149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.368155Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:04.368168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:04.368172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.368178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:04.368183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.368188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:04.368191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:04.368205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:04.368211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:04.368215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:04.369379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:04.369408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:04.369413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:04.369416Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:04.369421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:04.369432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... calPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:04.381196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:54:04.381198Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:54:04.381201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:04.381407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:04.381415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:04.381418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:54:04.381420Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:04.381423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2024-11-21T17:54:04.381429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 1 2024-11-21T17:54:04.381432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:273:2265] 2024-11-21T17:54:04.381742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:04.381752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:04.381754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:04.381756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:04.381760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:04.381763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:04.381954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:04.382217Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 2024-11-21T17:54:04.382268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T17:54:04.382358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2024-11-21T17:54:04.382432Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T17:54:04.382457Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2024-11-21T17:54:04.382475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.382495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T17:54:04.382525Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 2024-11-21T17:54:04.382546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:04.382563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:04.382595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T17:54:04.382610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:04.382629Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T17:54:04.382644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:04.382658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:54:04.382685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:274:2266] 2024-11-21T17:54:04.382695Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2024-11-21T17:54:04.382729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:04.382748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:04.382778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:54:04.382795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:04.382839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:04.382843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:04.382867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:04.382929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:04.382932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:04.382939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:04.383235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T17:54:04.383251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:04.383677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:04.383698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T17:54:04.383706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:04.383718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:54:04.383744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:04.383767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2024-11-21T17:54:04.383853Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:04.383877Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 28us result status StatusPathDoesNotExist 2024-11-21T17:54:04.383904Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:04.383969Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:04.383983Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 15us result status StatusSuccess 2024-11-21T17:54:04.384027Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPersQueueTest::TestWriteStat [GOOD] >> TPersQueueTest::TestWriteSessionsConflicts ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled [GOOD] Test command err: Trying to start YDB, gRPC: 61261, MsgBus: 3289 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001edd/r3tmp/tmpH0eRnO/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61261, node 1 TClient is connected to server localhost:3289 TClient is connected to server localhost:3289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> test.py::test[pg-select_win_partition_sort-default.txt-Debug] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestCreateTabletChangeToExternal >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribe >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestDeleteTablet >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:04.636207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:04.636223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:04.636250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:04.636256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:04.636268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:04.636271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:04.636277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:04.636330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:04.643261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:04.643278Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:04.645453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:04.646151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:04.646185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:04.647333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:04.647446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:04.647512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.647572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:04.648171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.648422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:04.648432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.648458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:04.648464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:04.648469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:04.648478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.649421Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:04.659906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:04.659968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.660016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:04.660079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:04.660084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.660711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.660728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:04.660759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.660765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:04.660768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:04.660771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:04.661031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.661038Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:04.661040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:04.661256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.661261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.661264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.661268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.661627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:04.661971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:04.662006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:04.662135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.662151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:04.662155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.662188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:04.662192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.662212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:04.662220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:04.662490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:04.662494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:04.662525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.662528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:04.662600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.662604Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:04.662611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:04.662614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.662617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:04.662620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.662623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:04.662625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:04.662633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:04.662636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:04.662639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:04.662852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:04.662862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:04.662865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:04.662868Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:04.662871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:04.662879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... BUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:04.699344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:04.699519Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2024-11-21T17:54:04.699851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T17:54:04.699906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2024-11-21T17:54:04.699975Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:54:04.700082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.700105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T17:54:04.700212Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:54:04.700326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:04.700351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 2024-11-21T17:54:04.700550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:04.700609Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 Forgetting tablet 72075186233409548 2024-11-21T17:54:04.700677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T17:54:04.700709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:04.700764Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:54:04.700855Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T17:54:04.700955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:04.700979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:04.701018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:54:04.701035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409551 2024-11-21T17:54:04.701123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:04.701128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:04.701149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2024-11-21T17:54:04.701469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:04.701479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:04.701491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:04.701739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T17:54:04.701747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T17:54:04.701776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:04.701780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:04.701868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T17:54:04.702273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:04.702280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:04.702295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T17:54:04.702298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T17:54:04.702312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2024-11-21T17:54:04.702321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:04.702325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:04.702374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:54:04.702379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:54:04.702646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:04.702660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2024-11-21T17:54:04.702718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:54:04.702735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2024-11-21T17:54:04.702750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:04.702753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:54:04.702814Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:54:04.702830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:04.702835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:611:2518] 2024-11-21T17:54:04.702848Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:04.702860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:04.702863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:611:2518] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:04.702923Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:04.702952Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 40us result status StatusPathDoesNotExist 2024-11-21T17:54:04.702991Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:04.703043Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:04.703062Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 21us result status StatusSuccess 2024-11-21T17:54:04.703120Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |85.2%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteTabletWithFollowers >> TSchemeShardSubDomainTest::RedefineErrors >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false >> test.py::test[tpch-q19-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q19-default.txt-Plan] [GOOD] >> test.py::test[tpch-q19-default.txt-Results] >> TGRpcNewCoordinationClient::CheckUnauthorized ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:04.022494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:04.022536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:04.022540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:04.022544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:04.028170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:04.028197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:04.028220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:04.046372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:04.135123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:04.135142Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:04.146755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:04.147338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:04.147359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:04.148510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:04.148720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:04.173837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.191494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:04.203978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.268146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:04.268190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.268278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:04.268296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:04.268304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:04.268330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.276501Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:04.328848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:04.340158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.340259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:04.340334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:04.340344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:04.341199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:04.341210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:04.341213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:04.341537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:04.341878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341885Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.341888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.356278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.356800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:04.357481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:04.366967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:04.367256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.367307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:04.367318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.367380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:04.367386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.367414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:04.367425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:04.368050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:04.368060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:04.368098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.368102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:04.368173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.368181Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:04.368191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:04.368195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.368200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:04.368205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.368209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:04.368213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:04.368241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:04.368248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:04.368251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:04.369448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:04.369469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:04.369473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:04.369478Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:04.369483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:04.369500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 37 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:05.581167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, at schemeshard: 72057594046678944 2024-11-21T17:54:05.581177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, schema: Name: "Table11" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key", at schemeshard: 72057594046678944 2024-11-21T17:54:05.581253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_0, child name: Table11, child id: [OwnerId: 72057594046678944, LocalPathId: 10], at schemeshard: 72057594046678944 2024-11-21T17:54:05.581274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2024-11-21T17:54:05.581282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 1 2024-11-21T17:54:05.581298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2024-11-21T17:54:05.581302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 1 -> 2 2024-11-21T17:54:05.581420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 137:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:05.581427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 137:0, at schemeshard: 72057594046678944 2024-11-21T17:54:05.581443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 11 2024-11-21T17:54:05.581452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2024-11-21T17:54:05.581938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 10, at schemeshard: 72057594046678944 2024-11-21T17:54:05.581966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2024-11-21T17:54:05.582007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:05.582012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:05.582049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2024-11-21T17:54:05.582061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:05.582065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1015:2885], at schemeshard: 72057594046678944, txId: 137, path id: 2 2024-11-21T17:54:05.582069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1015:2885], at schemeshard: 72057594046678944, txId: 137, path id: 10 2024-11-21T17:54:05.582156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2024-11-21T17:54:05.582163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet72057594046678944 2024-11-21T17:54:05.582217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2024-11-21T17:54:05.582366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2024-11-21T17:54:05.582375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2024-11-21T17:54:05.582378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2024-11-21T17:54:05.582384Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18 2024-11-21T17:54:05.582388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 2024-11-21T17:54:05.582481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2024-11-21T17:54:05.582489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2024-11-21T17:54:05.582492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2024-11-21T17:54:05.582495Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 1 2024-11-21T17:54:05.582498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2024-11-21T17:54:05.582506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2024-11-21T17:54:05.583156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2024-11-21T17:54:05.583186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2024-11-21T17:54:05.583191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2024-11-21T17:54:05.583299Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2024-11-21T17:54:05.583342Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2024-11-21T17:54:05.583368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2024-11-21T17:54:05.583372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2024-11-21T17:54:05.583385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2024-11-21T17:54:05.583390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T17:54:05.583395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2024-11-21T17:54:05.583411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 2 -> 3 2024-11-21T17:54:05.583698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2024-11-21T17:54:05.583818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2024-11-21T17:54:05.584359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2024-11-21T17:54:05.584407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2024-11-21T17:54:05.584414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#137:0 ProgressState at tabletId# 72057594046678944 2024-11-21T17:54:05.584423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId#137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2024-11-21T17:54:05.584489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId#137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 965 RawX2: 4294970141 } TxBody: "\n\236\004\n\007Table11\020\n\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\n:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 2 2024-11-21T17:54:05.585128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2024-11-21T17:54:05.585163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 >> test.py::test[pg-select_win_partition_sort-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_partition_sort-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_partition_sort-default.txt-Results] >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribe [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateTabletChangeToExternal [GOOD] Test command err: 2024-11-21T17:53:55.129429Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:53:55.130259Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:55.130340Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:53:55.130494Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:53:55.130746Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T17:53:55.130757Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:53:55.130925Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:25:2072] ControllerId# 72057594037932033 2024-11-21T17:53:55.130930Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:53:55.130965Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:53:55.131032Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:53:55.132996Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:53:55.133015Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:53:55.133322Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:33:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:55.133356Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:55.133378Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:55.133400Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:55.133424Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:55.133449Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:55.133479Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:55.133483Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:53:55.133496Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:25:2072] 2024-11-21T17:53:55.133499Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:25:2072] 2024-11-21T17:53:55.133505Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:53:55.133511Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:53:55.133614Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:53:55.136774Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:53:55.136808Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:55.136820Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:53:55.137206Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:53:55.137276Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:55.137330Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:53:55.137342Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:55.137351Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:53:55.137952Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:53:55.138060Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:53:55.138072Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:53:55.138571Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T17:53:55.138608Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:53:55.138612Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:53:55.138651Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2090] 2024-11-21T17:53:55.138666Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:55.138698Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:29:2063] 2024-11-21T17:53:55.138702Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:29:2063] 2024-11-21T17:53:55.138828Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:55.138872Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T17:53:55.138879Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T17:53:55.138882Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T17:53:55.138888Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:53:55.138917Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:55.138925Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T17:53:55.138944Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2090] 2024-11-21T17:53:55.138947Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2090] 2024-11-21T17:53:55.138951Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:53:55.139001Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:55.139031Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:55.139043Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:53:55.139350Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2090] 2024-11-21T17:53:55.139433Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:53:55.139448Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:53:55.139452Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2024-11-21T17:53:55.140172Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:25:2072] 2024-11-21T17:53:55.140180Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:25:2072] 2024-11-21T17:53:55.140195Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T17:53:55.140423Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:53:55.140500Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:55.140519Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2024-11-21T17:53:55.140524Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2024-11-21T17:53:55.140528Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:55.140595Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2024-11-21T17:53:55.140607Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2024-11-21T17:53:55.140611Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2024-11-21T17:53:55.140619Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2024-11-21T17:53:55.140624Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:53:55.140634Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:53:55.140644Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:53:55.140668Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:53:55.140673Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2024-11-21T17:53:55.140682Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:29:2063] 2024-11-21T17:53:55.140686Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:29:2063] 2024-11-21T17:53:55.140695Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2024-11-21T17:53:55.140730Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtaine ... 4b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:54:05.448086Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Execute Owner: 72057594037927937 OwnerIdx: 0 TabletType: Dummy TabletBootMode: TABLET_BOOT_MODE_EXTERNAL BindedChannels { StoragePoolName: "def1" } BindedChannels { StoragePoolName: "def2" } BindedChannels { StoragePoolName: "def3" } 2024-11-21T17:54:05.448093Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 State: ReadyToWork 2024-11-21T17:54:05.448100Z node 18 :HIVE DEBUG: HIVE#72057594037927937 Tablet(Dummy.72075186224037888.Leader.1) VolatileState: Running -> Stopped (Node 18) 2024-11-21T17:54:05.448109Z node 18 :HIVE TRACE: HIVE#72057594037927937 Node(18, (0,1048576,0,0)->(0,0,0,0)) 2024-11-21T17:54:05.448121Z node 18 :HIVE TRACE: HIVE#72057594037927937 UpdateTotalResources: ObjectId (72057594037927937,0): {Memory: 1048576} -> {} 2024-11-21T17:54:05.448128Z node 18 :HIVE TRACE: HIVE#72057594037927937 UpdateTotalResources: Type Dummy: {Memory: 1048576} -> {} 2024-11-21T17:54:05.448135Z node 18 :HIVE DEBUG: HIVE#72057594037927937 Sending TEvStopTablet(Dummy.72075186224037888.Leader.1 gen 1) to node 18 2024-11-21T17:54:05.448173Z node 18 :HIVE DEBUG: HIVE#72057594037927937 CreateTabletFollowers Tablet Dummy.72075186224037888.Leader.1 2024-11-21T17:54:05.448181Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Execute Existing tablet Dummy.72075186224037888.Leader.1 has been successfully updated 2024-11-21T17:54:05.448185Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T17:54:05.448198Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:8} Tx{15, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{10, redo 442b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2024-11-21T17:54:05.448204Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:8} Tx{15, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:54:05.458518Z node 18 :BS_PROXY_PUT INFO: [a55b41de52eb2a08] bootstrap ActorId# [18:387:2349] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:8:0:0:230:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T17:54:05.458552Z node 18 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] Id# [72057594037927937:2:8:0:0:230:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:54:05.458558Z node 18 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] restore Id# [72057594037927937:2:8:0:0:230:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:54:05.458567Z node 18 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:8:0:0:230:1] Marker# BPG33 2024-11-21T17:54:05.458581Z node 18 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:8:0:0:230:1] Marker# BPG32 2024-11-21T17:54:05.458605Z node 18 :BS_PROXY DEBUG: Send to queueActorId# [18:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:8:0:0:230:1] FDS# 230 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:54:05.458965Z node 18 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] received {EvVPutResult Status# OK ID# [72057594037927937:2:8:0:0:230:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 22 } Cost# 81811 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 23 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T17:54:05.458985Z node 18 :BS_PROXY_PUT DEBUG: [a55b41de52eb2a08] Result# TEvPutResult {Id# [72057594037927937:2:8:0:0:230:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T17:54:05.458992Z node 18 :BS_PROXY_PUT INFO: [a55b41de52eb2a08] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:8:0:0:230:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T17:54:05.459020Z node 18 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:8:0:0:230:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T17:54:05.459042Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} commited cookie 1 for step 8 2024-11-21T17:54:05.459057Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Complete (72057594037927937,0) TabletId: 72075186224037888 SideEffects: {Notifications: 0x10080003 [18:49:2090],0x10040207 [18:383:2345],0x10040201 [18:383:2345]} 2024-11-21T17:54:05.459085Z node 18 :LOCAL DEBUG: TLocalNodeRegistrar: Handle TEvStopTablet TabletId:(72075186224037888,0) Generation:1 2024-11-21T17:54:05.459097Z node 18 :TABLET_MAIN DEBUG: Tablet: 72075186224037888 Received TEvTabletStop from [18:49:2090], reason = ReasonStop Marker# TSYS29 2024-11-21T17:54:05.459102Z node 18 :PIPE_SERVER DEBUG: [72075186224037888] Stop 2024-11-21T17:54:05.459133Z node 18 :TABLET_MAIN NOTICE: Tablet: 72075186224037888 Type: Dummy, EReason: ReasonPill, SuggestedGeneration: 1, KnownGeneration: 1 Marker# TSYS31 2024-11-21T17:54:05.459139Z node 18 :PIPE_SERVER DEBUG: [72075186224037888] Detach 2024-11-21T17:54:05.459160Z node 18 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:3} suiciding, Waste{1:0, 289b +(0, 0b), 2 trc, -0b acc} 2024-11-21T17:54:05.459242Z node 18 :LOCAL DEBUG: TLocalNodeRegistrar: Handle TEvTablet::TEvTabletDead tabletId:72075186224037888 generation:1 reason:33 2024-11-21T17:54:05.459253Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] peer closed [18:380:2343] 2024-11-21T17:54:05.459256Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [18:380:2343] 2024-11-21T17:54:05.459273Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send [18:50:2090] 2024-11-21T17:54:05.459276Z node 18 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [18:50:2090] 2024-11-21T17:54:05.459281Z node 18 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [18:49:2090] EventType# 268960257 2024-11-21T17:54:05.459296Z node 18 :HIVE DEBUG: HIVE#72057594037927937 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2024-11-21T17:54:05.459306Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2024-11-21T17:54:05.459309Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:54:05.459318Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletStatus::Execute for tablet Dummy.72075186224037888.Leader.1 status 5 reason ReasonPill generation 1 follower 0 from local [18:49:2090] 2024-11-21T17:54:05.459325Z node 18 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue (0) 2024-11-21T17:54:05.459329Z node 18 :HIVE TRACE: HIVE#72057594037927937 ProcessBootQueue - sending 2024-11-21T17:54:05.459340Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T17:54:05.459346Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{16, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:54:05.459353Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10080004 [18:49:2090]} 2024-11-21T17:54:05.459363Z node 18 :LOCAL DEBUG: TLocalNodeRegistrar: Handle TEvDeadTabletAck TabletId:(72075186224037888,0) 2024-11-21T17:54:05.459372Z node 18 :HIVE TRACE: HIVE#72057594037927937 ProcessBootQueue - executing 2024-11-21T17:54:05.459376Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2024-11-21T17:54:05.459378Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:54:05.459381Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Execute 2024-11-21T17:54:05.459384Z node 18 :HIVE DEBUG: HIVE#72057594037927937 Handle ProcessBootQueue (size: 0) 2024-11-21T17:54:05.459387Z node 18 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T17:54:05.459390Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T17:54:05.459393Z node 18 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{17, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:54:05.459395Z node 18 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Complete 2024-11-21T17:54:05.459435Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [18:389:2351] 2024-11-21T17:54:05.459439Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [18:389:2351] 2024-11-21T17:54:05.459455Z node 18 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:05.459463Z node 18 :TABLET_RESOLVER DEBUG: SelectForward node 18 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [18:312:2292] 2024-11-21T17:54:05.459474Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [18:389:2351] 2024-11-21T17:54:05.459479Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [18:389:2351] 2024-11-21T17:54:05.459488Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [18:389:2351] 2024-11-21T17:54:05.459491Z node 18 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [18:389:2351] 2024-11-21T17:54:05.459495Z node 18 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2024-11-21T17:54:05.459546Z node 18 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:54:05.459564Z node 18 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T17:54:05.459572Z node 18 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T17:54:05.459576Z node 18 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T17:54:05.459583Z node 18 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [18:312:2292] CurrentLeaderTablet: [18:329:2304] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T17:54:05.459590Z node 18 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [18:312:2292] CurrentLeaderTablet: [18:329:2304] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T17:54:05.459600Z node 18 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [18:312:2292] CurrentLeaderTablet: [18:329:2304] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 0}} 2024-11-21T17:54:05.459612Z node 18 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:06.013405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:06.013431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:06.013437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:06.013441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:06.013455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:06.013459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:06.013467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:06.013546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:06.023297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:06.023318Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:06.025755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:06.026263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:06.026287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:06.027322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:06.027450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:06.027512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:06.027576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:06.028438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:06.028664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:06.028670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:06.028697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:06.028702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:06.028707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:06.028718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.029751Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:06.044885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:06.044973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.045032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:06.045099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:06.045106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.045891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:06.045914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:06.045955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.045964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:06.045968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:06.045972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:06.046310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.046319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:06.046323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:06.046561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.046567Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.046585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:06.046592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:06.047110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:06.047503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:06.047548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:06.047709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:06.047732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:06.047738Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:06.047783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:06.047789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:06.047814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:06.047824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:06.048164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:06.048171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:06.048210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:06.048214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:06.048305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.048312Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:06.048324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:06.048327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:06.048332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:06.048336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:06.048340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:06.048343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:06.048352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:06.048357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:06.048360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:06.048616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:06.048627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:06.048630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:06.048634Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:06.048638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:06.048647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... n, txId: 107, at schemeshard: 72057594046678944 2024-11-21T17:54:06.084310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T17:54:06.084315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:568:2523] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2024-11-21T17:54:06.084955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 2 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:06.084992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/USER_0, opId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.085036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:06.085074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:06.085080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.085531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAccepted TxId: 108 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:06.085557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /MyRoot/USER_0 2024-11-21T17:54:06.085586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.085594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 108:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:06.085598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 108:0 ProgressState no shards to create, do next state 2024-11-21T17:54:06.085603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 2 -> 3 2024-11-21T17:54:06.086048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.086060Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#108:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:06.086066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 3 -> 128 2024-11-21T17:54:06.086414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.086425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.086430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet 72057594046678944 2024-11-21T17:54:06.086436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2024-11-21T17:54:06.086463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:06.086781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2024-11-21T17:54:06.086808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000007 2024-11-21T17:54:06.086870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:06.086887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:06.086893Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet 72057594046678944 2024-11-21T17:54:06.086947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 240 2024-11-21T17:54:06.086954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet 72057594046678944 2024-11-21T17:54:06.086981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:06.086994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2024-11-21T17:54:06.087419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:06.087429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:06.087465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:06.087471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 108, path id: 2 2024-11-21T17:54:06.087543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.087549Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2024-11-21T17:54:06.087559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2024-11-21T17:54:06.087563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T17:54:06.087567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2024-11-21T17:54:06.087572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T17:54:06.087577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2024-11-21T17:54:06.087581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2024-11-21T17:54:06.087592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T17:54:06.087597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2024-11-21T17:54:06.087601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 2], 8 2024-11-21T17:54:06.087689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T17:54:06.087700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T17:54:06.087705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2024-11-21T17:54:06.087711Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2024-11-21T17:54:06.087714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:06.087728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2024-11-21T17:54:06.088302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2024-11-21T17:54:06.088366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2024-11-21T17:54:06.088374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2024-11-21T17:54:06.088450Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2024-11-21T17:54:06.088467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T17:54:06.088473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:593:2548] TestWaitNotification: OK eventTxId 108 2024-11-21T17:54:06.088553Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:06.088580Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 35us result status StatusSuccess 2024-11-21T17:54:06.088650Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestFollowers >> TGRpcNewCoordinationClient::CheckUnauthorized [GOOD] >> TGRpcNewCoordinationClient::CreateDropDescribe >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] |85.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/query_replay_yt/query_replay_yt |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |85.3%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:06.586212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:06.586232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:06.586236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:06.586240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:06.586252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:06.586255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:06.586260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:06.586321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:06.593157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:06.593172Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:06.595134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:06.595777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:06.595812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:06.597198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:06.597361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:06.597433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:06.597500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:06.598457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:06.598760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:06.598774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:06.598811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:06.598818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:06.598824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:06.598839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.600065Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:06.612187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:06.612295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.612371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:06.612443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:06.612451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.613410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:06.613440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:06.613493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.613505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:06.613508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:06.613514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:06.614037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.614050Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:06.614056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:06.614447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.614456Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.614464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:06.614470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:06.614943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:06.615302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:06.615338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:06.615497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:06.615517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:06.615523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:06.615566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:06.615571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:06.615597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:06.615606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:06.615944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:06.615948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:06.615980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:06.615983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:06.616049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.616053Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:06.616061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:06.616064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:06.616068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:06.616072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:06.616074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:06.616077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:06.616083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:06.616087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:06.616090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:06.616338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:06.616349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:06.616353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:06.616356Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:06.616359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:06.616369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2024-11-21T17:54:06.619822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:06.619863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:06.620057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.620062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.620067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:06.620070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T17:54:06.620087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:06.620334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T17:54:06.620352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T17:54:06.620392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:06.620403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:06.620406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:06.620460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T17:54:06.620464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:06.620478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:06.620483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:06.620488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:54:06.621073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:06.621094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:06.621143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:06.621167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:06.621173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:54:06.621179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:54:06.621254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.621263Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:54:06.621278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:54:06.621287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:06.621294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:54:06.621301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:06.621306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:54:06.621310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:54:06.621334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:06.621340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:54:06.621345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:54:06.621349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:54:06.621512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:06.621524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:06.621529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:06.621534Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:54:06.621540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:06.621676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:06.621686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:06.621689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:06.621692Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:54:06.621696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:06.621705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:54:06.622701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:06.622772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T17:54:06.623581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:06.623626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.623674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2024-11-21T17:54:06.624254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:06.624287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2024-11-21T17:54:06.624343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:06.624360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T17:54:06.624375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:54:06.624378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:54:06.624450Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:06.624476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:06.624481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:310:2302] 2024-11-21T17:54:06.624498Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:54:06.624515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:54:06.624518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:310:2302] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> test.py::test[tpch-q19-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Analyze] >> test.py::test[pg-select_win_partition_sort-default.txt-Results] [GOOD] >> test.py::test[pg-select_win_rank-default.txt-Debug] >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowerPromotion >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:07.089154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:07.089173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:07.089176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:07.089179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:07.089189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:07.089191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:07.089197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:07.089253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:07.096335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:07.096352Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:07.098315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:07.098826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:07.098857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:07.100085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:07.100267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:07.100352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:07.100438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:07.101230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:07.101449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:07.101456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:07.101482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:07.101486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:07.101491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:07.101499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.102411Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:07.112207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:07.112285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.112335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:07.112384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:07.112389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.112991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:07.113010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:07.113043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.113049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:07.113051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:07.113054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:07.113332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.113338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:07.113340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:07.113570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.113575Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.113578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:07.113582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:07.113960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:07.114262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:07.114298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:07.114414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:07.114428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:07.114433Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:07.114466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:07.114470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:07.114492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:07.114500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:07.114790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:07.114796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:07.114829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:07.114832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:07.114895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.114900Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:07.114907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:07.114910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:07.114914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:07.114916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:07.114919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:07.114921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:07.114928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:07.114931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:07.114934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:07.115117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:07.115127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:07.115131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:07.115135Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:07.115139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:07.115148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... lete, at schemeshard: 72057594046678944 2024-11-21T17:54:07.122119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:54:07.122123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2024-11-21T17:54:07.122175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.122180Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#101:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:07.122187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T17:54:07.122206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:07.122291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:07.122300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:07.122304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:07.122308Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T17:54:07.122312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:07.122670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:07.122680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:07.122683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:07.122687Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:54:07.122690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:07.122700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T17:54:07.122808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T17:54:07.122827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2024-11-21T17:54:07.123067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:07.123081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:07.123086Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:54:07.123100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T17:54:07.123116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:07.123120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:54:07.123381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:07.123509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:54:07.123812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:07.123817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:07.123837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:07.123847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:07.123850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:54:07.123863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2024-11-21T17:54:07.123907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.123911Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:54:07.123919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:54:07.123921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:07.123924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:54:07.123927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:07.123930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:54:07.123932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:54:07.123942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:07.123946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:54:07.123948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T17:54:07.123950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:54:07.124010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:07.124020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:07.124023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:07.124025Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:54:07.124027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:07.124089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:07.124094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:07.124096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:07.124099Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:54:07.124100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:54:07.124105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:54:07.124543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:07.124741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T17:54:07.125287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0/dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:07.125347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.125358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, schema: Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId", at schemeshard: 72057594046678944 2024-11-21T17:54:07.125374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, at schemeshard: 72057594046678944 2024-11-21T17:54:07.125802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Inclusive subDomain do not support shared transactions" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:07.125826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_0, subject: , status: StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, operation: CREATE TABLE, path: /MyRoot/USER_0/dir/table_0 TestModificationResult got TxId: 102, wait until txId: 102 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] >> TGRpcNewCoordinationClient::CreateDropDescribe [GOOD] >> TGRpcNewCoordinationClient::CreateAlter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] Test command err: 2024-11-21T17:54:03.505606Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792359960835234:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:03.505805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001141/r3tmp/tmp8hHNj4/pdisk_1.dat 2024-11-21T17:54:03.549850Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27329, node 1 2024-11-21T17:54:03.569190Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:03.569206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:03.569208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:03.569251Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21738 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:03.589429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:03.590134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:03.590147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:03.590734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:03.590784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:03.590791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:54:03.591126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:54:03.591158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:03.591166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:54:03.591442Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:03.592022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211643638, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:03.592031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:54:03.592071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:54:03.592483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:03.592535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:03.592552Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:54:03.592562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:54:03.592569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:54:03.592583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:54:03.592876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:54:03.592889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:54:03.592892Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:03.592900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:54:03.599111Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jd7xr56e9p55ez172aff35mk, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42356, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 9.999130s 2024-11-21T17:54:03.602204Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jd7xr56g3x3xa1s9yrw9x5sp, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42356, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T17:54:03.605814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:03.605842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:03.607327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:03.734594Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jd7xr5ap0zpmma0rc89y3cfe, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42356, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T17:54:03.734728Z node 1 :TX_PROXY DEBUG: actor# [1:7439792359960835451:2137] Handle TEvProposeTransaction 2024-11-21T17:54:03.734740Z node 1 :TX_PROXY DEBUG: actor# [1:7439792359960835451:2137] TxId# 281474976710658 ProcessProposeTransaction 2024-11-21T17:54:03.734751Z node 1 :TX_PROXY DEBUG: actor# [1:7439792359960835451:2137] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7439792359960836148:2590] 2024-11-21T17:54:03.740958Z node 1 :TX_PROXY DEBUG: Actor# [1:7439792359960836148:2590] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:42356" 2024-11-21T17:54:03.741083Z node 1 :TX_PROXY DEBUG: Actor# [1:7439792359960836148:2590] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:54:03.741102Z node 1 :TX_PROXY DEBUG: Actor# [1:7439792359960836148:2590] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:54:03.741150Z node 1 :TX_PROXY DEBUG: Actor# [1:7439792359960836148:2590] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:54:03.741188Z node 1 :TX_PROXY DEBUG: Actor# [1:7439792359960836148:2590] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:54:03.741203Z node 1 :TX_PROXY DEBUG: Actor# [1:7439792359960836148:2590] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2024-11-21T17:54:03.741259Z node 1 :TX_PROXY DEBUG: Actor# [1:7439792359960836148:2590] txid# 281474976710658 HANDLE EvClientConnected 2024-11-21T17:54:03.741341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:03.741469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:54:03.741650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:03.741658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:03.742317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:54:03.742343Z node 1 :TX_PROXY DEBUG: Actor# [1:7439792359960836148:2590] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2024-11-21T17:54:03.742357Z node 1 :TX_PROXY DEBUG: Actor# [1:7439792359960836148:2590] txid# 281474976710658 SEND to# [1:7439792359960836147:2296] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2024-11-21T17:54:03.742366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:03.742407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:03.742423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:54:03.742570Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T17:54:03.742605Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T17:54:03.742616Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T17:54:03.742626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:54:03.742637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:54:03.742644Z node 1 :GRPC_SERVER DEBUG: Can't upd ... ASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit WaitForStreamClearance 2024-11-21T17:54:06.929757Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit ReadTableScan 2024-11-21T17:54:06.929761Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2024-11-21T17:54:06.929800Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Continue 2024-11-21T17:54:06.929803Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:54:06.929805Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2024-11-21T17:54:06.929806Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2024-11-21T17:54:06.929808Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2024-11-21T17:54:06.929812Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2024-11-21T17:54:06.929927Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7439792372305445216:2083], Recipient [10:7439792372305444310:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T17:54:06.929933Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T17:54:06.929945Z node 10 :READ_TABLE_API DEBUG: [10:7439792372305445196:2360] Adding quota request to queue ShardId: 0, TxId: 281474976715680 2024-11-21T17:54:06.929951Z node 10 :READ_TABLE_API DEBUG: [10:7439792372305445196:2360] Assign stream quota to Shard 0, Quota 5, TxId 281474976715680 Reserved: 5 of 25, Queued: 0 2024-11-21T17:54:06.929997Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 5 2024-11-21T17:54:06.930050Z node 10 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037897, TxId: 281474976715681, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 4 2024-11-21T17:54:06.930068Z node 10 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037897, TxId: 281474976715681, PendingAcks: 0 2024-11-21T17:54:06.930077Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 4 2024-11-21T17:54:06.930088Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2024-11-21T17:54:06.930096Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715681, at: 72075186224037897 2024-11-21T17:54:06.930099Z node 10 :READ_TABLE_API DEBUG: [10:7439792372305445196:2360] got stream part, size: 75, RU required: 128 rate limiter absent 2024-11-21T17:54:06.930115Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [10:7439792372305445197:2360], Recipient [10:7439792372305444310:2307]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715681 2024-11-21T17:54:06.930121Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2024-11-21T17:54:06.930124Z node 10 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037897 txId 281474976715681 2024-11-21T17:54:06.930131Z node 10 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037897 txId 281474976715681 2024-11-21T17:54:06.930145Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [10:7439792372305445197:2360], Recipient [10:7439792372305444310:2307]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715681 2024-11-21T17:54:06.930151Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2024-11-21T17:54:06.930162Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439792372305445197:2360], Recipient [10:7439792372305444310:2307]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732211646977 TxId: 281474976715680 2024-11-21T17:54:06.930192Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7439792372305444310:2307], Recipient [10:7439792372305444310:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:54:06.930198Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:54:06.930201Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2024-11-21T17:54:06.930203Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2024-11-21T17:54:06.930208Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for ReadTableScan 2024-11-21T17:54:06.930211Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2024-11-21T17:54:06.930215Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715681] at 72075186224037897 error: , IsFatalError: 0 2024-11-21T17:54:06.930220Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2024-11-21T17:54:06.930226Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit ReadTableScan 2024-11-21T17:54:06.930228Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit FinishPropose 2024-11-21T17:54:06.930231Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit FinishPropose 2024-11-21T17:54:06.930237Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is DelayCompleteNoMoreRestarts 2024-11-21T17:54:06.930244Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit FinishPropose 2024-11-21T17:54:06.930245Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit CompletedOperations 2024-11-21T17:54:06.930246Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit CompletedOperations 2024-11-21T17:54:06.930252Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2024-11-21T17:54:06.930253Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit CompletedOperations 2024-11-21T17:54:06.930255Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715681] at 72075186224037897 has finished 2024-11-21T17:54:06.930257Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:54:06.930258Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2024-11-21T17:54:06.930260Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2024-11-21T17:54:06.930262Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2024-11-21T17:54:06.930266Z node 10 :READ_TABLE_API DEBUG: [10:7439792372305445196:2360] Starting inactivity timer for 600.000000s with tag 3 2024-11-21T17:54:06.930417Z node 10 :READ_TABLE_API NOTICE: [10:7439792372305445196:2360] Finish grpc stream, status: 400000 2024-11-21T17:54:06.931243Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6e8500] received request Name# Request ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931336Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6d5900] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931375Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6f0c00] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931379Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6e9900] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931412Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6f0200] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931438Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6dd600] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931448Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2024-11-21T17:54:06.931453Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715681] at 72075186224037897 on unit FinishPropose 2024-11-21T17:54:06.931458Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715681 at tablet 72075186224037897 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2024-11-21T17:54:06.931467Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6d3600] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931476Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2024-11-21T17:54:06.931480Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6e3a00] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931498Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6e8f00] received request Name# FillNode ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931522Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6d7200] received request Name# DrainNode ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931539Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6efd00] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931559Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6d2c00] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931580Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6f1b00] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931631Z node 10 :GRPC_SERVER DEBUG: [0x16e7dc441e00] received request Name# KeyValue ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931672Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6d4500] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931710Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6e4e00] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931716Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6e7b00] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931743Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6d3b00] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931747Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6d6300] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931775Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6e6200] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931780Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6e5800] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931809Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6c2800] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931811Z node 10 :GRPC_SERVER DEBUG: [0x16e7fec87e80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2024-11-21T17:54:06.931838Z node 10 :GRPC_SERVER DEBUG: [0x16e7ff6e4900] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:07.432779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:07.432807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:07.432812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:07.432817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:07.432834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:07.432837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:07.432846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:07.432935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:07.442512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:07.442533Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:07.445203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:07.445829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:07.445857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:07.446965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:07.447148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:07.447222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:07.447294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:07.448246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:07.448481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:07.448488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:07.448517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:07.448523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:07.448527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:07.448537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.449580Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:07.462788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:07.462857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.462913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:07.462969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:07.462974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.463643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:07.463665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:07.463705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.463713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:07.463716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:07.463720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:07.464015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.464022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:07.464025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:07.464317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.464323Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.464327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:07.464331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:07.464757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:07.465093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:07.465130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:07.465273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:07.465289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:07.465296Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:07.465341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:07.465346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:07.465368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:07.465377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:07.465705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:07.465711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:07.465743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:07.465747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:07.465814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.465819Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:07.465830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:07.465834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:07.465839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:07.465845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:07.465848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:07.465851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:07.465858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:07.465862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:07.465865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:07.466091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:07.466104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:07.466109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:07.466112Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:07.466115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:07.466124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 72057594046678944 2024-11-21T17:54:07.492599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:07.492651Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409551 2024-11-21T17:54:07.492763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T17:54:07.492777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2024-11-21T17:54:07.492840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:07.492851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:07.492924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:54:07.492942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:07.493001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:07.493005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:07.493021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:07.493341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T17:54:07.493349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T17:54:07.493360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2024-11-21T17:54:07.493377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T17:54:07.493384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:07.493387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:07.493730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:07.493746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:07.493749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:07.493756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:07.493771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:07.493774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:07.493805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T17:54:07.493808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T17:54:07.493814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:07.493816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:07.493854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:54:07.493857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:54:07.493868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2024-11-21T17:54:07.494011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:07.494170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2024-11-21T17:54:07.494208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:54:07.494227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2024-11-21T17:54:07.494240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:07.494242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T17:54:07.494247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:54:07.494249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:54:07.494296Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:54:07.494308Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:07.494314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:07.494317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:626:2531] 2024-11-21T17:54:07.494326Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:54:07.494331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:07.494333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:626:2531] 2024-11-21T17:54:07.494344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:54:07.494346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:626:2531] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2024-11-21T17:54:07.494390Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:07.494408Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 26us result status StatusPathDoesNotExist 2024-11-21T17:54:07.494438Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:07.494483Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:07.494490Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 9us result status StatusPathDoesNotExist 2024-11-21T17:54:07.494500Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:07.494526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:07.494536Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 10us result status StatusSuccess 2024-11-21T17:54:07.494588Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] >> test.py::test[pg-select_win_rank-default.txt-Debug] [GOOD] >> test.py::test[pg-select_win_rank-default.txt-Plan] [GOOD] >> test.py::test[pg-select_win_rank-default.txt-Results] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain >> test.py::test[type_v3-decimal_yt_llvm--Analyze] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Debug] >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowersCrossDC_Easy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:07.534408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:07.534429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:07.534432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:07.534437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:07.534448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:07.534451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:07.534459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:07.534529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:07.541643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:07.541663Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:07.544481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:07.545172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:07.545207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:07.546541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:07.546708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:07.546803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:07.546896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:07.547713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:07.547979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:07.547990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:07.548027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:07.548034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:07.548041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:07.548053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.549236Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:07.565279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:07.565362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.565428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:07.565499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:07.565508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.566311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:07.566340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:07.566387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.566398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:07.566402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:07.566407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:07.566771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.566782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:07.566786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:07.567066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.567075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.567079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:07.567086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:07.567639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:07.568039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:07.568089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:07.568285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:07.568309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:07.568316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:07.568364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:07.568370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:07.568399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:07.568411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:07.568806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:07.568813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:07.568853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:07.568857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:07.568940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.568946Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:07.568957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:07.568960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:07.568965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:07.568970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:07.568974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:07.568977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:07.568988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:07.568993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:07.568997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:07.569273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:07.569287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:07.569292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:07.569297Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:07.569301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:07.569313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.717479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:54:07.717483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T17:54:07.717777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:07.717797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:07.718069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.718088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.718130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:07.718134Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T17:54:07.718142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:54:07.718145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:54:07.718148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T17:54:07.718157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:488:2439] message: TxId: 103 2024-11-21T17:54:07.718161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:54:07.718164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:54:07.718166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:54:07.718180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:54:07.718445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:54:07.718452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:489:2440] TestWaitNotification: OK eventTxId 103 2024-11-21T17:54:07.718526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:07.718559Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 39us result status StatusSuccess 2024-11-21T17:54:07.718646Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:07.718710Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:07.718725Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 16us result status StatusSuccess 2024-11-21T17:54:07.718779Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:07.718828Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:07.718837Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 10us result status StatusSuccess 2024-11-21T17:54:07.718860Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:07.718883Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:07.718893Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 11us result status StatusSuccess 2024-11-21T17:54:07.718920Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TGRpcNewCoordinationClient::CreateAlter [GOOD] >> TGRpcNewCoordinationClient::NodeNotFound >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir >> TPersQueueTest::TestBigMessage [GOOD] >> TPersQueueTest::SetMeteringMode |85.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/query_replay/ydb_query_replay |85.3%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] >> test.py::test[pg-select_win_rank-default.txt-Results] [GOOD] >> test.py::test[pg-str_lookup_pg-default.txt-Debug] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas >> test.py::test[type_v3-decimal_yt_llvm--Debug] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--ForceBlocks] >> TSchemeShardSubDomainTest::CreateDropNbs >> DataShardTxOrder::RandomPoints_DelayData [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:08.577333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:08.577352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:08.577355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:08.577358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:08.577368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:08.577371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:08.577377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:08.577441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:08.584536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:08.584552Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:08.586445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:08.586926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:08.586949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:08.587840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:08.587960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:08.588026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:08.588088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:08.588745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:08.588953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:08.588959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:08.588986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:08.588990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:08.588994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:08.589003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.589941Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:08.600657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:08.600721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.600771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:08.600824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:08.600829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.601437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:08.601452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:08.601482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.601487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:08.601490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:08.601493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:08.601753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.601758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:08.601761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:08.601950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.601954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.601958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:08.601962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:08.602339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:08.602726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:08.602778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:08.602917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:08.602940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:08.602945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:08.602986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:08.602991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:08.603014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:08.603024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:08.603358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:08.603364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:08.603397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:08.603400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:08.603475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.603480Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:08.603487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:08.603490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:08.603494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:08.603497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:08.603500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:08.603502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:08.603510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:08.603513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:08.603516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:08.603737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:08.603747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:08.603750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:08.603753Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:08.603755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:08.603763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... UG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T17:54:08.702470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:54:08.703760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:08.703769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:08.703803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:08.703817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:08.703821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:54:08.703824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2024-11-21T17:54:08.703884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.703889Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:54:08.703898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:54:08.703901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:08.703905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:54:08.703908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:08.703912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:54:08.703915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:54:08.703942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T17:54:08.703947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2024-11-21T17:54:08.703950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2024-11-21T17:54:08.703952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:54:08.704026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:08.704033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:08.704036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:08.704038Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T17:54:08.704041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:08.704299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:08.704309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:08.704312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:08.704316Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:54:08.704319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T17:54:08.704328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2024-11-21T17:54:08.704331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:908:2742] 2024-11-21T17:54:08.704781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:08.704833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:08.704842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:08.704846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:909:2743] TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:08.704924Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:08.704949Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 31us result status StatusSuccess 2024-11-21T17:54:08.705015Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:08.705068Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:08.705078Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 10us result status StatusSuccess 2024-11-21T17:54:08.705098Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:08.705134Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:08.705144Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 12us result status StatusSuccess 2024-11-21T17:54:08.705173Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TGRpcNewCoordinationClient::NodeNotFound [GOOD] >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] |85.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:09.013918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:09.013941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:09.013946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:09.013951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:09.013968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:09.013972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:09.013980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:09.014050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:09.023781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:09.023806Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:09.027515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:09.028482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:09.028537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:09.033122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:09.033377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:09.033498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:09.033604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:09.034541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:09.034848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:09.034858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:09.034895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:09.034902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:09.034908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:09.034921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.036175Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:09.052772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:09.052860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.052925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:09.052998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:09.053005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.053784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:09.053811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:09.053855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.053864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:09.053869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:09.053873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:09.054277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.054293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:09.054297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:09.054669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.054678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.054682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:09.054688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:09.055103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:09.055480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:09.055524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:09.055661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:09.055680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:09.055684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:09.055726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:09.055730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:09.055753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:09.055762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:09.056102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:09.056109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:09.056149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:09.056154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:09.056225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.056243Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:09.056252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:09.056254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:09.056259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:09.056262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:09.056265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:09.056267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:09.056274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:09.056278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:09.056280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:09.056482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:09.056491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:09.056494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:09.056498Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:09.056500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:09.056509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... rd: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:09.125318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:54:09.125320Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:54:09.125322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:09.125407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:09.125412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:09.125414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:54:09.125416Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:09.125418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:09.125422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T17:54:09.125643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:09.125648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:09.125652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:09.125654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:09.125836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:09.125899Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:54:09.126076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:09.126115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:09.126159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409546 2024-11-21T17:54:09.126289Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:54:09.126309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:09.126328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:09.126347Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:54:09.126361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:09.126371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2024-11-21T17:54:09.126754Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T17:54:09.126853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:54:09.126885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409549 2024-11-21T17:54:09.126968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:09.126971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:54:09.126978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:09.127046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:09.127050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:09.127065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:09.127097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:09.127448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:09.127457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:09.127468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:09.127470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:09.127477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:09.127479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:09.127712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:54:09.127717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:54:09.127743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:09.127751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:09.127758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:09.127761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:09.127768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:09.127979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T17:54:09.128019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:54:09.128024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:54:09.128070Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:54:09.128081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:54:09.128085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:536:2491] TestWaitNotification: OK eventTxId 102 2024-11-21T17:54:09.129224Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:09.129261Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 49us result status StatusPathDoesNotExist 2024-11-21T17:54:09.129296Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:09.129346Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:09.129354Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 9us result status StatusPathDoesNotExist 2024-11-21T17:54:09.129363Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[pg-str_lookup_pg-default.txt-Debug] [GOOD] >> test.py::test[pg-str_lookup_pg-default.txt-Plan] >> TPQTest::TestWritePQ [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage >> test.py::test[pg-str_lookup_pg-default.txt-Plan] [GOOD] >> test.py::test[pg-str_lookup_pg-default.txt-Results] >> test.py::test[type_v3-decimal_yt_llvm--ForceBlocks] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Plan] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:06.082546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:06.082581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:06.082586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:06.082591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:06.082605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:06.082608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:06.082615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:06.082680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:06.089590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:06.089606Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:06.091926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:06.092430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:06.092455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:06.093502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:06.093652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:06.093725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:06.093798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:06.094555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:06.094766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:06.094772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:06.094798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:06.094802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:06.094806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:06.094814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.095698Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:06.106989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:06.107059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.107116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:06.107174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:06.107179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.107828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:06.107849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:06.107883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.107890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:06.107893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:06.107897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:06.108334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.108354Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:06.108362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:06.108860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.108872Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.108878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:06.108885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:06.109507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:06.109833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:06.109871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:06.110004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:06.110019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:06.110024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:06.110063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:06.110068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:06.110092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:06.110100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:06.110415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:06.110421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:06.110453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:06.110456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:06.110521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:06.110526Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:06.110536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:06.110538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:06.110543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:06.110546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:06.110549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:06.110552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:06.110559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:06.110564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:06.110566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:06.110799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:06.110812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:06.110817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:06.110821Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:06.110825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:06.110836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... poseLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 190 } } 2024-11-21T17:54:09.354009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969695 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2024-11-21T17:54:09.354017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2024-11-21T17:54:09.354030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969695 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2024-11-21T17:54:09.354036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T17:54:09.354139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.354147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T17:54:09.354156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:54:09.354161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2024-11-21T17:54:09.354172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T17:54:09.354200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2024-11-21T17:54:09.354219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:09.354230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:09.354885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.354937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.354977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:09.354982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:09.355024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:09.355049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:09.355055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T17:54:09.355059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T17:54:09.355155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.355164Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:54:09.355175Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.355179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:54:09.355182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T17:54:09.355373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:54:09.355385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:54:09.355389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:54:09.355394Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2024-11-21T17:54:09.355399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:09.355653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:54:09.355664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:54:09.355668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:54:09.355672Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:54:09.355677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:54:09.355688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2024-11-21T17:54:09.356100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:09.356107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:09.356180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:09.356213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:54:09.356218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:54:09.356223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T17:54:09.356251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:404:2371] message: TxId: 103 2024-11-21T17:54:09.356257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:54:09.356262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:54:09.356268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:54:09.356281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:09.356373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:09.356376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:09.356469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:09.356676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:09.356929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:09.356936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2024-11-21T17:54:09.356952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:54:09.356957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:662:2597] 2024-11-21T17:54:09.357075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2024-11-21T17:54:09.357211Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:09.357243Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 38us result status StatusSuccess 2024-11-21T17:54:09.357329Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] >> TTableProfileTests::DescribeTableWithPartitioningPolicy |85.3%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut >> test.py::test[pg-str_lookup_pg-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_order_all_corr-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] Test command err: 2024-11-21T17:54:06.196864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792373018179614:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:06.197103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00113d/r3tmp/tmpj3cSp6/pdisk_1.dat 2024-11-21T17:54:06.242973Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11176, node 1 2024-11-21T17:54:06.260982Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:06.260993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:06.260994Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:06.261028Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:06.281507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:06.282421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:06.282437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:06.283103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:06.283155Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:06.283165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:54:06.283520Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:54:06.283592Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:06.283601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:54:06.283902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:06.284581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211646333, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:06.284599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:54:06.284663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:54:06.285122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:06.285159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:06.285175Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:54:06.285185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:54:06.285200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:54:06.285210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:54:06.285590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:54:06.285607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:54:06.285610Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:06.285622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:54:06.296720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:06.296750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:06.298153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:06.300832Z node 1 :TX_PROXY ERROR: Access denied for bad@builtin with access CreateTable to path Root 2024-11-21T17:54:06.846874Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439792373337443145:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:06.846989Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00113d/r3tmp/tmp98PpX1/pdisk_1.dat 2024-11-21T17:54:06.862118Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18817, node 4 2024-11-21T17:54:06.876222Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:06.876257Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:06.876262Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:06.876316Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:06.946962Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:06.946993Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:06.948651Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:06.949510Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:06.949615Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:06.949628Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:06.949950Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:06.950001Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:06.950009Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:54:06.950322Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:06.950333Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:06.950476Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:06.950717Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:06.956058Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211647005, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:06.956075Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:54:06.956131Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:54:06.956555Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:06.956608Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:06.956620Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:54:06.956630Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:54:06.956637Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:54:06.956649Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subs ... 2024-11-21T17:54:09.186374Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00113d/r3tmp/tmpscoqEj/pdisk_1.dat 2024-11-21T17:54:09.197663Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1419, node 13 2024-11-21T17:54:09.211862Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:09.211875Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:09.211877Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:09.211933Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:09.286340Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:09.286371Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:09.287945Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:09.288300Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:09.288388Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:09.288397Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:09.288764Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:09.288803Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:09.288810Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:54:09.289089Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:09.289096Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:09.289312Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:09.289360Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:09.290064Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211649336, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:09.290075Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:54:09.290118Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:54:09.290397Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:09.290449Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:09.290465Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:54:09.290484Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:54:09.290498Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:54:09.290515Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:54:09.290665Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:09.290677Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:54:09.290679Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:09.290689Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:54:09.300250Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:09.300303Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:54:09.300419Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:09.300431Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:09.301106Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2024-11-21T17:54:09.301165Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:09.301227Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:09.301251Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateKesus, at tablet72057594046644480 2024-11-21T17:54:09.301309Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:54:09.301475Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:09.301487Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:09.301491Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:09.301532Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:09.301540Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:09.301541Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:54:09.302843Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:09.302870Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:54:09.355491Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:54:09.356091Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:09.357157Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211649406, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:09.357172Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T17:54:09.357208Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:54:09.357923Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:09.358007Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:09.358028Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:54:09.358049Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:54:09.358064Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:54:09.358099Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T17:54:09.358342Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:09.358365Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:09.358370Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:54:09.358418Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:09.358428Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:09.358429Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:54:09.358436Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 >> test.py::test[type_v3-decimal_yt_llvm--Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--Analyze] >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader >> YdbTableBulkUpsert::Nulls >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] >> TOlapReboots::DropMultipleTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:08.103442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:08.103463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:08.103467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:08.103471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:08.103482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:08.103485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:08.103491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:08.103561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:08.113305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:08.113329Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:08.116173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:08.116925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:08.116953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:08.118203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:08.118325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:08.118390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:08.118451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:08.119052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:08.119303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:08.119314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:08.119351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:08.119358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:08.119364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:08.119376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.120322Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:08.130376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:08.130446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.130494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:08.130551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:08.130556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.131193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:08.131209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:08.131242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.131249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:08.131251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:08.131255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:08.131529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.131536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:08.131538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:08.131767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.131771Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.131781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:08.131786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:08.132158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:08.132483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:08.132523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:08.132650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:08.132665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:08.132669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:08.132703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:08.132707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:08.132729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:08.132737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:08.133023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:08.133028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:08.133059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:08.133062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:08.133121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.133126Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:08.133133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:08.133135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:08.133139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:08.133142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:08.133145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:08.133148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:08.133155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:08.133159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:08.133161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:08.133343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:08.133351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:08.133354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:08.133357Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:08.133359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:08.133367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... poseLatency: 2 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 122 } } 2024-11-21T17:54:10.341772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 522 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2024-11-21T17:54:10.341777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2024-11-21T17:54:10.341789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 522 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2024-11-21T17:54:10.341793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2024-11-21T17:54:10.342231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:10.342241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet72075186233409546 2024-11-21T17:54:10.342259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:54:10.342263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2024-11-21T17:54:10.342272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet72075186233409546 2024-11-21T17:54:10.342297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2024-11-21T17:54:10.342330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2024-11-21T17:54:10.342339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2024-11-21T17:54:10.342643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:10.342718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:10.342999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T17:54:10.343007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T17:54:10.343046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2024-11-21T17:54:10.343069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T17:54:10.343073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 104, path id: 1 2024-11-21T17:54:10.343078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 104, path id: 2 2024-11-21T17:54:10.343131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:10.343136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2024-11-21T17:54:10.343150Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:10.343154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2024-11-21T17:54:10.343158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2024-11-21T17:54:10.343312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T17:54:10.343323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T17:54:10.343326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2024-11-21T17:54:10.343330Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2024-11-21T17:54:10.343335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2024-11-21T17:54:10.343525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T17:54:10.343535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T17:54:10.343538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2024-11-21T17:54:10.343544Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:10.343548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2024-11-21T17:54:10.343558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T17:54:10.344116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:10.344126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2024-11-21T17:54:10.344201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2024-11-21T17:54:10.344246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T17:54:10.344251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:54:10.344256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T17:54:10.344267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2494] message: TxId: 104 2024-11-21T17:54:10.344272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:54:10.344276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:54:10.344279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:54:10.344293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2024-11-21T17:54:10.344365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T17:54:10.344370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T17:54:10.344408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2024-11-21T17:54:10.344707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2024-11-21T17:54:10.344922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T17:54:10.344929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 0, path id: 1 2024-11-21T17:54:10.344968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:54:10.344974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:821:2742] 2024-11-21T17:54:10.345077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2024-11-21T17:54:10.345278Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2024-11-21T17:54:10.345303Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 33us result status StatusSuccess 2024-11-21T17:54:10.345385Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |85.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> test.py::test[pg-sublink_order_all_corr-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_order_all_corr-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_order_all_corr-default.txt-Results] >> test.py::test[type_v3-insert_struct_v3_wo_native--Analyze] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--Debug] >> YdbYqlClient::TestColumnOrder >> THiveTest::TestFollowersReconfiguration >> TTableProfileTests::DescribeTableWithPartitioningPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsSimple >> YdbYqlClient::TestDescribeDirectory >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] |85.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |85.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> TOlapReboots::DropMultipleStandaloneTables [GOOD] |85.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |85.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] Test command err: 2024-11-21T17:53:40.150018Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792258249004570:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:40.150182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:53:40.152722Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792258856831041:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:40.152739Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000cd0/r3tmp/tmp7Ax0jD/pdisk_1.dat 2024-11-21T17:53:40.175695Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:53:40.176104Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created TServer::EnableGrpc on GrpcPort 12023, node 1 2024-11-21T17:53:40.201699Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:53:40.204081Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000cd0/r3tmp/yandexrKK2JN.tmp 2024-11-21T17:53:40.204089Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000cd0/r3tmp/yandexrKK2JN.tmp 2024-11-21T17:53:40.204129Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000cd0/r3tmp/yandexrKK2JN.tmp 2024-11-21T17:53:40.204160Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:53:40.208028Z INFO: TTestServer started on Port 63040 GrpcPort 12023 TClient is connected to server localhost:63040 PQClient connected to localhost:12023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:40.250203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:40.250229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:40.251566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:40.274938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:40.274959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:40.275576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:40.275938Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:53:40.276218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:53:40.288159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:53:40.405052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792258249005556:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:40.405079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792258249005548:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:40.405150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:40.405609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792258249005591:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:40.405623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:40.405758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2024-11-21T17:53:40.409209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792258249005562:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2024-11-21T17:53:40.430219Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439792258856831410:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:53:40.430307Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTAwZGRkMWEtNzQyYWU4NmEtNjBmNmExOWEtZWZiYjA1NDQ=, ActorId: [2:7439792258856831371:2280], ActorState: ExecuteState, TraceId: 01jd7xqehq080wk7vsy2a0ekzr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:53:40.430812Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:53:40.431015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T17:53:40.468045Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439792258249005754:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:53:40.468139Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmNlNTJhZTItNWRhMzgyZjktNTE1MjE4ZTgtN2VmOTlkN2I=, ActorId: [1:7439792258249005545:2300], ActorState: ExecuteState, TraceId: 01jd7xqehjfd9gqhxrm109g9kv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:53:40.468304Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:53:40.489676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2024-11-21T17:53:40.556150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:53:40.585485Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jd7xqeps1w4cnvykztrcza1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY1YzFkNi1hZjczNDVlNi00YWM2ZjA2LTVkZTA3ZGU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439792258249006075:3037] 2024-11-21T17:53:45.150342Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439792258249004570:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:45.150372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:53:45.152900Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439792258856831041:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:45.152921Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:53:45.597253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720679:1, at schemeshard: 72057594046644480 2024-11-21T17:53:45.659482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976720682:0, at schemeshard: 72057594046644480 2024-11-21T17:53:45.702308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part prop ... 003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:54:06.013474Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:06.072568Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:06.089215Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:06.097487Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7439792371776934093:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:54:06.097572Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NTI1ZWJmNzgtMmZjYTg1ODQtYWNhYmMwNjItZDNiNDFiMjI=, ActorId: [10:7439792367481966685:2280], ActorState: ExecuteState, TraceId: 01jd7xr7hfd2tpwc6jknqqdke6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:54:06.097703Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:54:06.117328Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xr7mnbzzvgqeevryd8x7k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZTlkYTg4ODgtNTMyMDk1NTQtZWQ0OWE5MGUtNWNmNDYwN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7439792370150385048:3032] 2024-11-21T17:54:10.711976Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7439792365855416249:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:10.712027Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:54:10.712966Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439792367481966354:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:10.712992Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:54:11.155222Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:54:11.221488Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.279219Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.346721Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.403107Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.475015Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (16261273835729377752, "Root", "00415F536F757263655F3130", 1732211651538, 1732211651538, 0, 13); 2024-11-21T17:54:11.553456Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715695. Ctx: { TraceId: 01jd7xrcymfkjx18b66qdhyg41, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=MWRjYWM0YjgtZTZjMTBiYTctOGIzYWMyM2YtZDk5OTBiOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:11.556689Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T17:54:11.556702Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:54:11.556704Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:54:11.556710Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792391625222698:3734] (SourceId=A_Source_10, PreferedPartition=1) GetOwnershipFast Partition=1 TabletId=1001 2024-11-21T17:54:11.556736Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7439792391625222699:3734], Recipient [9:7439792391625222049:3325]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T17:54:11.556762Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7439792391625222698:3734], Recipient [9:7439792391625222049:3325]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_10" 2024-11-21T17:54:11.556779Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7439792391625222049:3325], Recipient [9:7439792391625222698:3734]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2024-11-21T17:54:11.556793Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439792391625222698:3734] (SourceId=A_Source_10, PreferedPartition=1) InitTable: SourceId=A_Source_10 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2024-11-21T17:54:11.556817Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7439792391625222698:3734], Recipient [9:7439792391625222049:3325]: NActors::TEvents::TEvPoison 2024-11-21T17:54:11.556832Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7439792365855416243:2049], Recipient [9:7439792391625222698:3734]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2024-11-21T17:54:11.556843Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792391625222698:3734] (SourceId=A_Source_10, PreferedPartition=1) StartKqpSession 2024-11-21T17:54:11.557299Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7439792365855416384:2170], Recipient [9:7439792391625222698:3734]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=YjQyNGRjY2ItMWI5MzY5ZTgtOWFmYjNkMWItZmFiODkwMGM=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2024-11-21T17:54:11.557311Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792391625222698:3734] (SourceId=A_Source_10, PreferedPartition=1) Select from the table 2024-11-21T17:54:11.571412Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7439792365855416384:2170], Recipient [9:7439792391625222698:3734]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=YjQyNGRjY2ItMWI5MzY5ZTgtOWFmYjNkMWItZmFiODkwMGM=" PreparedQuery: "d2d62a13-a4426880-d486e60e-eaa307c3" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jd7xrczh7w581b5at11esq0t" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1732211651538 } items { uint64_value: 1732211651538 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 8 2024-11-21T17:54:11.571451Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439792391625222698:3734] (SourceId=A_Source_10, PreferedPartition=1) Selected from table PartitionId=0 SeqNo=13 2024-11-21T17:54:11.571456Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439792391625222698:3734] (SourceId=A_Source_10, PreferedPartition=1) OnPartitionChosen 2024-11-21T17:54:11.571463Z node 9 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [9:7439792391625222698:3734] (SourceId=A_Source_10, PreferedPartition=1) ReplyError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. Received TEvChooseError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 16261273835729377752 AND Topic = "Root" AND ProducerId = "00415F536F757263655F3130" 2024-11-21T17:54:11.587497Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715698. Ctx: { TraceId: 01jd7xrcznczseeh62pca8cf5k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ODQ5ZGNhOTktYWFkZjI3MjctNzI1YzMyMDgtMjAzOWYxZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |85.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestHiveBalancer |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |85.4%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |85.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> test.py::test[pg-sublink_order_all_corr-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_where_all-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] Test command err: 2024-11-21T17:53:39.558980Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792254586354742:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:39.559002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:53:39.561768Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792253600892841:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:39.561842Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000cef/r3tmp/tmpWfArzX/pdisk_1.dat 2024-11-21T17:53:39.583720Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:53:39.585334Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:53:39.596970Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12547, node 1 2024-11-21T17:53:39.608831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/000cef/r3tmp/yandex1fzcIy.tmp 2024-11-21T17:53:39.608842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/000cef/r3tmp/yandex1fzcIy.tmp 2024-11-21T17:53:39.608897Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/000cef/r3tmp/yandex1fzcIy.tmp 2024-11-21T17:53:39.608931Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:53:39.612036Z INFO: TTestServer started on Port 15064 GrpcPort 12547 TClient is connected to server localhost:15064 PQClient connected to localhost:12547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:39.659110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:39.659135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:39.660818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:39.680301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:39.680320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:39.680913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:39.681241Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:53:39.681516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2024-11-21T17:53:39.693125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:53:39.820407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792253600893188:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:39.820424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:39.820430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792253600893196:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:39.821200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2024-11-21T17:53:39.824473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439792253600893203:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2024-11-21T17:53:39.858273Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439792254586355785:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:53:39.858370Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjNlN2VkMGMtNjQ0ZGZhZjktYWJiN2NmNzItNzExZWM1MmQ=, ActorId: [1:7439792254586355744:2300], ActorState: ExecuteState, TraceId: 01jd7xqdzyakx4vdr6b29e1zab, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:53:39.858707Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:53:39.858982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:53:39.906613Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439792253600893244:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:53:39.906705Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjZhOGQ0NDEtMjM5NzMzYzctYzAzMTZjZTktMjIwODY1Njc=, ActorId: [2:7439792253600893172:2280], ActorState: ExecuteState, TraceId: 01jd7xqdzc1rs59zmjkph973j2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:53:39.906844Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:53:39.917339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T17:53:39.936067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:53:39.964761Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jd7xqe3c07z53r8hbq5cjrq9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzBiOTkwYmMtN2VhZjA1YzMtMmZhNzM0Y2YtZDFiNDliZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439792254586356205:3025] 2024-11-21T17:53:44.559291Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439792254586354742:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:44.559316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:53:44.561856Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439792253600892841:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:44.561883Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:53:44.993417Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T17:53:44.993427Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:53:44.993430Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; ... 6720657 completed, doublechecking } 2024-11-21T17:54:05.212086Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7439792366281697393:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:54:05.212256Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=MTVhMDQwOWYtOTkzOTUzOTAtODQyOTQxMy1mZGE4OGE2NA==, ActorId: [9:7439792366281697352:2300], ActorState: ExecuteState, TraceId: 01jd7xr6rrb6bqt9j1e0n3k2vk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:54:05.212421Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:54:05.212950Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:05.255230Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7439792367982565153:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:54:05.255356Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZGYwZjM1YzEtMzdjZWQyMy1mMTg2MjMxMC1iZjc3ODBl, ActorId: [10:7439792367982565079:2280], ActorState: ExecuteState, TraceId: 01jd7xr6qx4n9xvh3b81dhas5n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:54:05.255491Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:54:05.271323Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:05.335785Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:54:05.363625Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xr6x44jdjynwk4c7f92e8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZGM2ZjgzYTUtYmUxMGMwYy1iNzQyY2M1LTRmYTg4YThk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7439792366281697818:3026] 2024-11-21T17:54:09.891034Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7439792361986729031:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:09.891070Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:54:09.892012Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439792363687597454:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:09.892039Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T17:54:11.394760Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T17:54:11.394774Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:54:11.394776Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:54:11.394780Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439792392051502222:3375] (SourceId=A_Source, PreferedPartition=0) InitTable: SourceId=A_Source TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2024-11-21T17:54:11.395639Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:1, at schemeshard: 72057594046644480 2024-11-21T17:54:11.462311Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.522818Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.592414Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.650368Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.715556Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.771774Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7439792361986729019:2049], Recipient [9:7439792392051502222:3375]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2024-11-21T17:54:11.771789Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792392051502222:3375] (SourceId=A_Source, PreferedPartition=0) StartKqpSession 2024-11-21T17:54:11.772291Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7439792361986729186:2188], Recipient [9:7439792392051502222:3375]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=NmU2Y2E4ODgtZGNjZGQyODQtNWE1NDRiZDMtYWY4NDYzMjE=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2024-11-21T17:54:11.772302Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792392051502222:3375] (SourceId=A_Source, PreferedPartition=0) Select from the table 2024-11-21T17:54:11.783386Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7439792361986729186:2188], Recipient [9:7439792392051502222:3375]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=NmU2Y2E4ODgtZGNjZGQyODQtNWE1NDRiZDMtYWY4NDYzMjE=" PreparedQuery: "5790e136-f48744d4-726f0fa4-85112e76" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jd7xrd66dznsbtxpdd4g1mgz" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 6 2024-11-21T17:54:11.783418Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439792392051502222:3375] (SourceId=A_Source, PreferedPartition=0) Selected from table PartitionId=(NULL) SeqNo=(NULL) 2024-11-21T17:54:11.783424Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7439792392051502222:3375] (SourceId=A_Source, PreferedPartition=0) OnPartitionChosen 2024-11-21T17:54:11.783428Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792392051502222:3375] (SourceId=A_Source, PreferedPartition=0) Update the table 2024-11-21T17:54:11.793406Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [9:7439792361986729186:2188], Recipient [9:7439792392051502222:3375]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=NmU2Y2E4ODgtZGNjZGQyODQtNWE1NDRiZDMtYWY4NDYzMjE=" PreparedQuery: "4a6944a1-c91df1b4-39157aee-9ca0eed9" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 4 Received TEvChooseResult: 2024-11-21T17:54:11.793425Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792392051502222:3375] (SourceId=A_Source, PreferedPartition=0) HandleUpdate PartitionPersisted=0 Status=SUCCESS 0 2024-11-21T17:54:11.793438Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792392051502222:3375] (SourceId=A_Source, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T17:54:11.793447Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7439792392051502222:3375] (SourceId=A_Source, PreferedPartition=0) Start idle |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/benchmark/benchmark |85.4%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/benchmark |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/benchmark |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/pgwire/pgwire >> YdbYqlClient::TestColumnOrder [GOOD] >> YdbYqlClient::TestBusySession |85.4%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::DropMultipleStandaloneTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:53:22.732362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:53:22.732401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:53:22.732407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:53:22.732413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:53:22.732430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:53:22.732434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:53:22.732443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:53:22.732573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:53:22.790362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:53:22.790384Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:53:22.792875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:53:22.792981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:53:22.793003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:53:22.810653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:53:22.810753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:53:22.831273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:53:22.840882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:53:22.860958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:53:22.893862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:53:22.893889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:53:22.893907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:53:22.893919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:53:22.893926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:53:22.893969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:53:22.904809Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:53:22.980450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:53:22.992352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:22.992449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:53:22.992506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:53:22.992514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:22.994430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:53:22.994459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:53:22.994492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:22.994499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:53:22.994504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:53:22.994507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:53:22.996186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:22.996200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:53:22.996206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:53:22.996630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:22.996642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:22.996648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:53:22.996655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:53:22.997304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:53:22.998278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:53:23.007805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:53:23.008218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:53:23.008289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:53:23.008311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:53:23.008415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:53:23.008426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:53:23.008471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:53:23.008488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:53:23.012308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:53:23.012325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:53:23.012371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:53:23.012378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:53:23.012465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:23.012474Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:53:23.012501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:53:23.012506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:53:23.012513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:53:23.012518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:53:23.012523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:53:23.012527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:53:23.012544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:53:23.012551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:53:23.012555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 07 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:54:11.886491Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:54:11.886595Z node 107 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:54:11.886602Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:54:11.886604Z node 107 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:54:11.886606Z node 107 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2024-11-21T17:54:11.886608Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T17:54:11.886616Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T17:54:11.886650Z node 107 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:54:11.886716Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:11.886752Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:54:11.887744Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2024-11-21T17:54:11.887850Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:11.887855Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:54:11.887865Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:54:11.888273Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:54:11.888339Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:54:11.888547Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:11.888556Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:11.888566Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 2024-11-21T17:54:11.899539Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409547 TxId: 1004 MinStep: 0 Step: 5000006 2024-11-21T17:54:11.899569Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409547, partId: 0 2024-11-21T17:54:11.899600Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409547 TxId: 1004 MinStep: 0 Step: 5000006 2024-11-21T17:54:11.899611Z node 107 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409547 TxId: 1004 MinStep: 0 Step: 5000006 FAKE_COORDINATOR: Erasing txId 1004 2024-11-21T17:54:11.899747Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1004 2024-11-21T17:54:11.899754Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409547, partId: 0 2024-11-21T17:54:11.899765Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1004 2024-11-21T17:54:11.899796Z node 107 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 130 2024-11-21T17:54:11.900555Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:54:11.900639Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:54:11.900665Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:54:11.900673Z node 107 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedDeleteParts operationId#1004:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:11.900697Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:54:11.900728Z node 107 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:54:11.900733Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:54:11.900740Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T17:54:11.900755Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [107:356:2336] message: TxId: 1004 2024-11-21T17:54:11.900761Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:54:11.900767Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:54:11.900771Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:54:11.900801Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:54:11.901253Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:11.901284Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:54:11.901291Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [107:519:2486] 2024-11-21T17:54:11.901391Z node 107 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:54:11.901440Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:11.901522Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2024-11-21T17:54:11.902713Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:11.902727Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:54:11.902746Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:11.903833Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:11.903862Z node 107 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:11.903960Z node 107 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 2024-11-21T17:54:11.904087Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:11.904136Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable1" took 56us result status StatusPathDoesNotExist 2024-11-21T17:54:11.904178Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ColumnTable1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:11.904278Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:11.904297Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable2" took 21us result status StatusPathDoesNotExist 2024-11-21T17:54:11.904312Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/ColumnTable2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[type_v3-insert_struct_v3_wo_native--Debug] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--ForceBlocks] >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersSimple::Partition >> YdbYqlClient::TestDescribeDirectory [GOOD] >> YdbYqlClient::TestDoubleKey |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |85.4%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut >> PQCountersSimple::Partition [GOOD] >> PQCountersSimple::PartitionFirstClass >> TGRpcNewCoordinationClient::SessionMethods >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets >> test.py::test[pg-sublink_where_all-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_where_all-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_where_all-default.txt-Results] >> TDatabaseQuotas::DisableWritesToDatabase >> PQCountersSimple::PartitionFirstClass [GOOD] >> YdbYqlClient::TestBusySession [GOOD] >> YdbYqlClient::TestConstraintViolation >> TTableProfileTests::ExplicitPartitionsSimple [GOOD] >> TTableProfileTests::ExplicitPartitionsUnordered >> TOlapReboots::CreateStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> PQCountersSimple::PartitionFirstClass [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2024-11-21T17:53:55.792533Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:55.792554Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:55.796343Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:55.798228Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T17:53:55.798438Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T17:53:55.798847Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T17:53:55.799155Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T17:53:55.799400Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR 2024-11-21T17:53:55.801325Z node 1 :PERSQUEUE INFO: new Cookie default|e5400f26-92016f10-4ce806fd-f30ef13d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:55.802138Z node 1 :PERSQUEUE INFO: new Cookie default|8c22507d-b424e4ea-25534597-536fb3ec_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:55.802759Z node 1 :PERSQUEUE INFO: new Cookie default|cfcbebb-571f3d10-a07c2c04-6a9af93a_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": ... NDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user/total/total ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic 2024-11-21T17:54:12.991671Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:12.991696Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:12.995242Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:12.995406Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 9 actor [4:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 9 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 9 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 9 Important: false } 2024-11-21T17:54:12.995493Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:184:2197] 2024-11-21T17:54:12.995891Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:12.996138Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:185:2198] 2024-11-21T17:54:12.996510Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [4:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:12.997697Z node 4 :PERSQUEUE INFO: new Cookie default|e46f618-75369d38-f684add2-5f19dc5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:12.998355Z node 4 :PERSQUEUE INFO: new Cookie default|fbed5997-eab42351-d8af2643-386317a_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:12.998840Z node 4 :PERSQUEUE INFO: new Cookie default|a36a1d11-4ccd6d15-b3d20d21-3c2e8f67_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:12.999319Z node 4 :PERSQUEUE INFO: new Cookie default|b4c1ac30-1f01cec-90526af9-417b3554_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2024-11-21T17:54:12.999430Z node 4 :PERSQUEUE INFO: new Cookie default|1650f8f4-243540a5-cfb13ed2-b82b4a1_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:54:13.239854Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:13.239876Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:13.243182Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:13.243336Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 10 actor [5:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 10 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 10 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 10 Important: false } 2024-11-21T17:54:13.243462Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:184:2197] 2024-11-21T17:54:13.243637Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [5:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:13.243799Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [5:185:2198] 2024-11-21T17:54:13.243912Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [5:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:13.245005Z node 5 :PERSQUEUE INFO: new Cookie default|49421cc5-84458ccc-b0e746e1-a63929b6_0 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:13.245788Z node 5 :PERSQUEUE INFO: new Cookie default|13fb61e4-a7c80eb3-e79732c1-ff6794b4_1 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:13.246461Z node 5 :PERSQUEUE INFO: new Cookie default|f9be49e6-c208daba-efa5f1cc-1cd35459_2 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:13.247145Z node 5 :PERSQUEUE INFO: new Cookie default|2bb55184-eb23ddc3-b06033c6-15f113d3_3 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:04.939774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:04.939793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:04.939796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:04.939799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:04.939809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:04.939812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:04.939817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:04.939884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:04.947052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:04.947072Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:04.949755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:04.950380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:04.950405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:04.951736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:04.951870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:04.951949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.952018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:04.952696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.952931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:04.952938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.952967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:04.952972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:04.952977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:04.952987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.953907Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:04.966191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:04.966271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.966338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:04.966397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:04.966405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.967194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.967214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:04.967249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.967257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:04.967260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:04.967264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:04.967601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.967610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:04.967613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:04.967871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.967877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.967880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.967884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.968412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:04.968837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:04.968890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:04.969039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:04.969057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:04.969062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.969104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:04.969108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:04.969132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:04.969141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:04.969536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:04.969543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:04.969576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:04.969580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:04.969644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:04.969649Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:04.969670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:04.969676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.969682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:04.969686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:04.969690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:04.969694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:04.969703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:04.969709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:04.969711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:04.969956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:04.969966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:04.969969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:04.969972Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:04.969975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:04.969984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... mplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1728 DataSize: 1728 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:13.188655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:478:2438] sender: [1:754:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:478:2438] sender: [1:757:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:478:2438] sender: [1:758:2058] recipient: [1:756:2681] Leader for TabletID 72057594046678944 is [1:759:2682] sender: [1:760:2058] recipient: [1:756:2681] 2024-11-21T17:54:13.194208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:13.194228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:13.194233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:13.194236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:13.194240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:13.194242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:13.194248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:13.194296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:13.195055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:13.195351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:13.195394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:13.195421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:13.195427Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:13.195495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:13.195574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:13.195599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table2, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:54:13.195608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T17:54:13.195704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T17:54:13.195732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:13.195769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:54:13.195772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:13.195788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.195994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.196009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.196018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.196022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.196026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.197930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:13.197947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:13.197973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:13.197981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:13.197986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:13.198009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:759:2682] sender: [1:812:2058] recipient: [1:15:2062] 2024-11-21T17:54:13.228802Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:13.228885Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 87us result status StatusSuccess 2024-11-21T17:54:13.229009Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1728 DataSize: 1728 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbYqlClient::TestDoubleKey [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats >> test.py::test[pg-sublink_where_all-default.txt-Results] [GOOD] >> test.py::test[pg-sublink_where_exists-default.txt-Debug] |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |85.4%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut >> TPersQueueTest::TestWriteSessionsConflicts [GOOD] >> TPersQueueTest::TestReadRuleServiceTypePassword ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2024-11-21T17:52:11.351686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:52:11.351706Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:11.351719Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T17:52:11.353792Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T17:52:11.353912Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T17:52:11.353951Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:52:11.354555Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T17:52:11.360341Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:52:11.360435Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T17:52:11.360539Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T17:52:11.360555Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T17:52:11.360560Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T17:52:11.360590Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T17:52:11.362460Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T17:52:11.362490Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T17:52:11.362527Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T17:52:11.362531Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T17:52:11.362533Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T17:52:11.362536Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:11.362588Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:11.362593Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:11.362604Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T17:52:11.362614Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T17:52:11.362647Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:11.362652Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T17:52:11.362655Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T17:52:11.362659Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T17:52:11.362662Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T17:52:11.362665Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T17:52:11.362668Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:52:11.367830Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:11.367847Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:11.367853Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T17:52:11.368126Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T17:52:11.368133Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T17:52:11.368145Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T17:52:11.368163Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T17:52:11.368169Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T17:52:11.368175Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T17:52:11.368179Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:11.368182Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T17:52:11.368184Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T17:52:11.368187Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:11.368245Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T17:52:11.368250Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T17:52:11.368253Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T17:52:11.368256Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:11.368265Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T17:52:11.368268Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T17:52:11.368271Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T17:52:11.368274Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:11.368278Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T17:52:11.389384Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T17:52:11.389410Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T17:52:11.389416Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T17:52:11.389436Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T17:52:11.389447Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T17:52:11.389541Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:11.389546Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:52:11.389551Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T17:52:11.389565Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T17:52:11.389568Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T17:52:11.389592Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T17:52:11.389601Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:11.389603Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T17:52:11.389606Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T17:52:11.390103Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T17:52:11.390109Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:52:11.390156Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:11.390159Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T17:52:11.390164Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T17:52:11.390170Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T17:52:11.390172Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T17:52:11.390177Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T17:52:11.390180Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T17:52:11.390185Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:11.390187Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T17:52:11.390190Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T17:52:11.390192Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T17:52:11.390224Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T17:52:11.390227Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:11.390229Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T17:52:11.390232Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T17:52:11.390234Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T17:52:11.390241Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T17:52:11.390243Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T17:52:11.390245Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T17:52:11.390247Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T17:52:11.390255Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T17:52:11.390258Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T17:52:11.390260Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T17:52:11.390264Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T17:52:11.390266Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T17:52:11.390268Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... 437184 consumer 9437184 txId 521 2024-11-21T17:54:09.050913Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 522 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2024-11-21T17:54:09.050915Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.050917Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 522 2024-11-21T17:54:09.050926Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 523 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2024-11-21T17:54:09.050928Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.050930Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 523 2024-11-21T17:54:09.050943Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2024-11-21T17:54:09.050945Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.050947Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2024-11-21T17:54:09.050958Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T17:54:09.050960Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.050962Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2024-11-21T17:54:09.050973Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2024-11-21T17:54:09.050975Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.050977Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2024-11-21T17:54:09.050993Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2024-11-21T17:54:09.050994Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.050996Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2024-11-21T17:54:09.051005Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2024-11-21T17:54:09.051007Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051009Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2024-11-21T17:54:09.051021Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2024-11-21T17:54:09.051024Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051027Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2024-11-21T17:54:09.051046Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2024-11-21T17:54:09.051048Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051049Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2024-11-21T17:54:09.051060Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2024-11-21T17:54:09.051062Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051065Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2024-11-21T17:54:09.051084Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T17:54:09.051086Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051088Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2024-11-21T17:54:09.051099Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2024-11-21T17:54:09.051100Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051102Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2024-11-21T17:54:09.051121Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2024-11-21T17:54:09.051124Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051127Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2024-11-21T17:54:09.051140Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2024-11-21T17:54:09.051143Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051146Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2024-11-21T17:54:09.051156Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2024-11-21T17:54:09.051158Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051159Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2024-11-21T17:54:09.051173Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2024-11-21T17:54:09.051174Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051176Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2024-11-21T17:54:09.051191Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2024-11-21T17:54:09.051193Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051195Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2024-11-21T17:54:09.051207Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2024-11-21T17:54:09.051210Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051212Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2024-11-21T17:54:09.051227Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T17:54:09.051229Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.051231Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 2024-11-21T17:54:09.063567Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:54:09.063589Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2024-11-21T17:54:09.063608Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [16:97:2132], exec latency: 1 ms, propose latency: 2 ms 2024-11-21T17:54:09.063621Z node 16 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2024-11-21T17:54:09.063627Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:54:09.063677Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T17:54:09.063681Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2024-11-21T17:54:09.063687Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [16:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T17:54:09.063691Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T17:54:09.063744Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:227:2222], Recipient [16:329:2302]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2024-11-21T17:54:09.063750Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T17:54:09.063755Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 31 29 29 28 23 22 27 21 30 27 30 21 15 29 11 30 21 27 27 - 30 30 4 - - - - - - - - - actual 31 29 29 28 23 22 27 21 30 27 30 21 15 29 11 30 21 27 27 - 30 30 4 - - - - - - - - - interm 28 29 29 28 23 22 27 21 30 27 30 21 15 29 11 30 21 27 27 - 30 30 4 - - - - - - - - - >> TGRpcNewCoordinationClient::SessionMethods [GOOD] >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet >> YdbYqlClient::SimpleColumnFamilies [GOOD] >> YdbYqlClient::TestConstraintViolation [GOOD] >> YdbYqlClient::TableKeyRangesSinglePartition >> test.py::test[type_v3-insert_struct_v3_wo_native--ForceBlocks] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--Plan] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] >> YdbTableBulkUpsert::Nulls [GOOD] >> YdbTableBulkUpsert::Overload >> YdbYqlClient::TestReadTableOneBatch >> TTableProfileTests::ExplicitPartitionsUnordered [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex >> YdbYqlClient::TestDescribeTableWithShardStats [GOOD] >> YdbYqlClient::TestExplicitPartitioning |85.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::SimpleColumnFamilies [GOOD] Test command err: 2024-11-21T17:53:20.622053Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792175284619117:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:53:20.622098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00117d/r3tmp/tmpPjuN8a/pdisk_1.dat 2024-11-21T17:53:20.690019Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23325, node 1 2024-11-21T17:53:20.720486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:53:20.720514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:53:20.722383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:53:20.724378Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:53:20.724387Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:53:20.724390Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:53:20.724420Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:53:20.763274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:20.764355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:20.764364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:20.764971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:53:20.765019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:53:20.765022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:53:20.765443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:53:20.765449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:53:20.765907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:53:20.767604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211600812, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:20.767615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:53:20.767667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:53:20.770400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:20.770447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:20.770456Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:53:20.770465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:53:20.770472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:53:20.770487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T17:53:20.771243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:53:20.771256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:53:20.771260Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:53:20.771273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2024-11-21T17:53:20.784375Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:53:20.950413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792175284619905:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:20.950445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:53:20.990511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:20.990604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:53:20.990719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:53:20.990723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:53:20.991682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:53:20.991736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:20.991785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:20.991799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:53:20.992262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:20.992270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:20.992274Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:53:20.992323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:20.992325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:20.992327Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:53:20.993553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:53:20.996932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:53:20.996966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:53:21.001538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:53:21.027618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:53:21.027631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:53:21.027661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:53:21.032651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:53:21.033800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211601078, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:53:21.033813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211601078 2024-11-21T17:53:21.033845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:53:21.034323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:53:21.034398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:53:21.034408Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:53:21.034849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:53:21.034856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:53:21.034859Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, p ... 588Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719634. Ctx: { TraceId: 01jd7xrf5b1hyvxrn8p9k28aeg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.822842Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719635. Ctx: { TraceId: 01jd7xrf5pawhmrt1h5emngfmf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.833527Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719636. Ctx: { TraceId: 01jd7xrf61eeatfsxdcc7jw47p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.844698Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719637. Ctx: { TraceId: 01jd7xrf6c5hk078gfeb3qm50z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.855927Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719638. Ctx: { TraceId: 01jd7xrf6rbjprer3j3kmgt75t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.867169Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719639. Ctx: { TraceId: 01jd7xrf739wxz0k4zz3641js6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.878085Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719640. Ctx: { TraceId: 01jd7xrf7e187s811pr6zvgvdq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.889768Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719641. Ctx: { TraceId: 01jd7xrf7s5zcmcpt3agbefkn3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.903498Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719642. Ctx: { TraceId: 01jd7xrf854ejkd1rpjn72qep4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.914268Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719643. Ctx: { TraceId: 01jd7xrf8jbqp7sgy70rz4bc5z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.925714Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719644. Ctx: { TraceId: 01jd7xrf8xbpypzn213s43d3bg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.936413Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719645. Ctx: { TraceId: 01jd7xrf98533qmnj7zab96nrc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.947445Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719646. Ctx: { TraceId: 01jd7xrf9k2hj5twkfrn8195gg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.959924Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719647. Ctx: { TraceId: 01jd7xrf9yatkb5dffmq75e7t7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.970387Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719648. Ctx: { TraceId: 01jd7xrfab0ydmk88j8wt528g6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.980898Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719649. Ctx: { TraceId: 01jd7xrfanfvks9mdjr3bdpj0b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:13.992103Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719650. Ctx: { TraceId: 01jd7xrfaz71v2717mtjwhgqxt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.002372Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719651. Ctx: { TraceId: 01jd7xrfbb5ena6yctbege1z5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.014341Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719652. Ctx: { TraceId: 01jd7xrfbn1ydz7vhpszmny1th, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.023603Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719653. Ctx: { TraceId: 01jd7xrfc17vpknsxx93snq8fy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.033496Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719654. Ctx: { TraceId: 01jd7xrfcaeydqe6crzf914p6s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.044099Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719655. Ctx: { TraceId: 01jd7xrfcmfmjgesqtn7fkwxxc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.055412Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719656. Ctx: { TraceId: 01jd7xrfcy7yyzcjk8j5kd2mck, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.067029Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719657. Ctx: { TraceId: 01jd7xrfd9ajpz10m81p6z12zy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.077498Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719658. Ctx: { TraceId: 01jd7xrfdpcr18957m3tbbb92b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.087109Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719659. Ctx: { TraceId: 01jd7xrfe0110cnd2g0wmc22rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.097713Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719660. Ctx: { TraceId: 01jd7xrfe9f1x0fhqc99j8gmf7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.107861Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719661. Ctx: { TraceId: 01jd7xrfemb5c9vp842frds1p1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.119157Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719662. Ctx: { TraceId: 01jd7xrfey7pzcnmfyj7vf78wx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.130415Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719663. Ctx: { TraceId: 01jd7xrffaffpwzfvy4zek3a2e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.141950Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719664. Ctx: { TraceId: 01jd7xrffnazx7yfp2xjy1gykf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.153717Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719665. Ctx: { TraceId: 01jd7xrfg1aasqnwp9zw1ywm7f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.165798Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719666. Ctx: { TraceId: 01jd7xrfgc77r62zqpjyzwmx2s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.175902Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719667. Ctx: { TraceId: 01jd7xrfgra44mejmqrg4q3qng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.187605Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976719668. Ctx: { TraceId: 01jd7xrfh3891whjgrfcmwhww6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjU3YWIxMDMtNTY0OGZjZDMtZDhiODEzZjEtODY2YWIxNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:14.193361Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2024-11-21T17:54:14.193640Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected >> test.py::test[pg-sublink_where_exists-default.txt-Debug] [GOOD] >> test.py::test[pg-sublink_where_exists-default.txt-Plan] [GOOD] >> test.py::test[pg-sublink_where_exists-default.txt-Results] >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach >> YdbTableBulkUpsert::ValidRetry |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |85.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] >> test.py::test[pg-sublink_where_exists-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Debug] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] [GOOD] >> test.py::test[type_v3-type_subset--Analyze] [SKIPPED] >> test.py::test[type_v3-type_subset--Debug] [SKIPPED] >> test.py::test[type_v3-type_subset--ForceBlocks] [SKIPPED] >> test.py::test[type_v3-type_subset--Plan] [SKIPPED] >> test.py::test[type_v3-type_subset--Results] [SKIPPED] >> test.py::test[union-union_column_extention-default.txt-Analyze] >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes |85.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |85.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions >> YdbYqlClient::TestReadTableOneBatch [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] Test command err: 2024-11-21T17:54:11.512880Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792390846761656:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:11.512929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001116/r3tmp/tmpRm01Bx/pdisk_1.dat 2024-11-21T17:54:11.572632Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17985, node 1 2024-11-21T17:54:11.594829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:11.594848Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:11.594851Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:11.594900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:54:11.613064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:11.613099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:17878 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:54:11.614583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:11.653418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.654503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:11.654525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.655150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:11.655195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:11.655203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:54:11.655517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:11.655526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:11.655625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:11.656415Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.657346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211651702, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:11.657354Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:54:11.657421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:54:11.657827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:11.657876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:11.657890Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:54:11.657904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:54:11.657916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:54:11.657932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:54:11.658352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:11.658375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:54:11.658385Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:11.658400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:54:11.857839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792390846762579:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:11.857881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:11.889786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.889932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:54:11.890075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:11.890087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:11.890636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:54:11.890690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:11.890742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:11.890759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:54:11.890829Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:54:11.890971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:11.890982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:11.890985Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:11.891007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:11.891010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:11.891010Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:54:11.892703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:11.892724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:54:11.893171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:54:11.945408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:54:11.945421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:54:11.945446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:54:11.946014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:54:11.946839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211651996, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:11.946855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211651996 2024-11-21T17:54:11.946883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:54:11.947294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:11.947381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:11.947399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:54:11.947637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:11.947650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:11.947653Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: ... CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:14.952271Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:14.952300Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:14.953930Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:14.955916Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:14.956023Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:14.956035Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:14.956486Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:14.956534Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:14.956543Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:54:14.956933Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:14.956942Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:14.957085Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:14.957245Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:14.957981Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211655006, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:14.957994Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:54:14.958051Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:54:14.958390Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:14.958441Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:14.958455Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:54:14.958466Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:54:14.958474Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:54:14.958492Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:54:14.958871Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:14.958895Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:54:14.958914Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:14.958934Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:54:15.113140Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.113276Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:54:15.113420Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:15.113431Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.113953Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:54:15.114009Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:15.114063Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:15.114091Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:54:15.114152Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:54:15.114290Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:15.114304Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:15.114308Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:15.114342Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:15.114350Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:15.114353Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:54:15.115685Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.115710Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:54:15.116063Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:54:15.168188Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:54:15.168197Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:54:15.168218Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:54:15.168633Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:54:15.169292Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211655216, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:15.169308Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211655216 2024-11-21T17:54:15.169335Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:54:15.169641Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:15.169729Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:15.169741Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:54:15.169952Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:15.169964Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:15.169968Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:54:15.169997Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:15.170005Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:15.170006Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:54:15.170357Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732211655216 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 276 } } 2024-11-21T17:54:15.170424Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:54:15.170435Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.170439Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T17:54:15.170761Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:54:15.170775Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:54:15.170783Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 >> TGRpcClientLowTest::GrpcRequestProxy |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> YdbTableBulkUpsert::ValidRetry [GOOD] >> YdbTableBulkUpsert::Uint8 >> TTableProfileTests::ExplicitPartitionsComplex [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat >> test.py::test[union-union_column_extention-default.txt-Analyze] [GOOD] >> test.py::test[union-union_column_extention-default.txt-Debug] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions [GOOD] >> TGRpcYdbTest::AlterTableAddIndexBadRequest >> test.py::test[pg-tpcds-q05-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Results] >> TGRpcClientLowTest::GrpcRequestProxy [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check >> YdbYqlClient::TestReadTableNotNullBorder [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder2 >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts >> YdbLogStore::LogStore >> TOlapReboots::CreateTable >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] >> TGroupMapperTest::MonteCarlo [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q29-default.txt-Debug] >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] >> test.py::test[union-union_column_extention-default.txt-Debug] [GOOD] >> test.py::test[union-union_column_extention-default.txt-ForceBlocks] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:08.910539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:08.910568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:08.910589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:08.910594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:08.910609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:08.910613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:08.910622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:08.910713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:08.917651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:08.917671Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:08.920032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:08.920523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:08.920549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:08.921626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:08.921777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:08.921847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:08.921913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:08.922787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:08.923001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:08.923007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:08.923034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:08.923038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:08.923041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:08.923050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.924015Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:08.935697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:08.935767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.935827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:08.935887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:08.935892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.936477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:08.936500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:08.936536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.936544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:08.936548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:08.936551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:08.937080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.937093Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:08.937096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:08.937381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.937387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.937391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:08.937395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:08.937813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:08.938199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:08.938249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:08.938444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:08.938464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:08.938468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:08.938508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:08.938512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:08.938533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:08.938543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:08.938883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:08.938890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:08.938930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:08.938934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:08.939004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:08.939008Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:08.939019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:08.939022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:08.939026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:08.939029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:08.939032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:08.939034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:08.939041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:08.939046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:08.939048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:08.939254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:08.939262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:08.939265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:08.939268Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:08.939271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:08.939279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... Latency: 2 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 187 } } 2024-11-21T17:54:17.499960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 522 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2024-11-21T17:54:17.499973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2024-11-21T17:54:17.499990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 522 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2024-11-21T17:54:17.499997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2024-11-21T17:54:17.500109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:17.500120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet72075186233409546 2024-11-21T17:54:17.500128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:54:17.500133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2024-11-21T17:54:17.500146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet72075186233409546 2024-11-21T17:54:17.500181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2024-11-21T17:54:17.500209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2024-11-21T17:54:17.500222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2024-11-21T17:54:17.508418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:17.509613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:17.509715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T17:54:17.509725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T17:54:17.509778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2024-11-21T17:54:17.509824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T17:54:17.509830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 104, path id: 1 2024-11-21T17:54:17.509834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 104, path id: 2 2024-11-21T17:54:17.509962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:17.509972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2024-11-21T17:54:17.509998Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:17.510005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2024-11-21T17:54:17.510012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2024-11-21T17:54:17.510304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T17:54:17.510318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T17:54:17.510323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2024-11-21T17:54:17.510329Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 11 2024-11-21T17:54:17.510336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2024-11-21T17:54:17.510554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T17:54:17.510567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T17:54:17.510585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2024-11-21T17:54:17.510589Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:17.510595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2024-11-21T17:54:17.510609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T17:54:17.511872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:17.511885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2024-11-21T17:54:17.512011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2024-11-21T17:54:17.512047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T17:54:17.512051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:54:17.512057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T17:54:17.512072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2494] message: TxId: 104 2024-11-21T17:54:17.512077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:54:17.512083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:54:17.512088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:54:17.512109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2024-11-21T17:54:17.512243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T17:54:17.512253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T17:54:17.513186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2024-11-21T17:54:17.513261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2024-11-21T17:54:17.513619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T17:54:17.513629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 0, path id: 1 2024-11-21T17:54:17.513739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:54:17.513745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1429:3342] 2024-11-21T17:54:17.513838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2024-11-21T17:54:17.514402Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2024-11-21T17:54:17.514442Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 49us result status StatusSuccess 2024-11-21T17:54:17.514539Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> test.py::test[select-where_not_null--Plan] |85.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MonteCarlo [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check [GOOD] >> TGRpcClientLowTest::BiStreamPing >> YdbYqlClient::TestReadTableNotNullBorder2 [GOOD] >> YdbYqlClient::TestReadTableSnapshot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] Test command err: 2024-11-21T17:54:10.190181Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792389212220252:2250];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:10.190202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00112c/r3tmp/tmpTa7K1Q/pdisk_1.dat 2024-11-21T17:54:10.237218Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6538, node 1 2024-11-21T17:54:10.250006Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:10.250018Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:10.250020Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:10.250048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:10.271577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:10.272589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:10.272606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:10.273483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:10.273554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:10.273563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:54:10.273917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:10.273928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:54:10.273989Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:54:10.274222Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:10.275003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211650323, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:10.275013Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:54:10.275069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:54:10.275424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:10.275470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:10.275484Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:54:10.275493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:54:10.275506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:54:10.275519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:54:10.275890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:54:10.275904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:54:10.275908Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:10.275921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:3536 2024-11-21T17:54:10.290091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:10.290123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:10.291787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:10.298585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:10.298709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:10.298719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:10.299244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T17:54:10.299291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:10.299346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:10.299366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateSubDomain, at tablet72057594046644480 2024-11-21T17:54:10.299374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 waiting... 2024-11-21T17:54:10.299627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:54:10.299639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:54:10.299643Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:10.299673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:54:10.299681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:54:10.299682Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:54:10.299716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:54:10.299851Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:10.299863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T17:54:10.300209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:10.300895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211650344, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:10.300906Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet 72057594046644480 2024-11-21T17:54:10.300960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T17:54:10.301297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:10.301339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:10.301352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T17:54:10.301364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T17:54:10.301378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T17:54:10.301392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T17:54:10.301509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:54:10.301518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:54:10.301520Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:54:10.301536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:54:10.301542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:54:10.301543Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:54:10.301546Z node 1 :FLAT_TX_SCHE ... 54:16.743275Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:16.743284Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:16.743286Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:54:16.743757Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:16.743768Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:54:16.744216Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:16.744666Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:54:16.745309Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211656791, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:16.745328Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715658:0, at tablet 72057594046644480 2024-11-21T17:54:16.745389Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:54:16.745830Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:16.745901Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:16.745918Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:54:16.745952Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:54:16.745968Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:54:16.745988Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T17:54:16.746179Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:16.746201Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:16.746210Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:54:16.746259Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:16.746262Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:16.746264Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:54:16.746271Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T17:54:16.747381Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:54:16.747532Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:16.747544Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:54:16.747936Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T17:54:16.747979Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 waiting... 2024-11-21T17:54:16.748603Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T17:54:16.749637Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:16.749711Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:16.749722Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2024-11-21T17:54:16.750101Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:17.260544Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:17.260566Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:17.263323Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2024-11-21T17:54:17.263633Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3658 2024-11-21T17:54:17.293984Z node 13 :FLAT_TX_SCHEMESHARD INFO: Got new config: TableProfilesConfig { TableProfiles { Name: "default" CompactionPolicy: "default" ExecutionPolicy: "default" PartitioningPolicy: "default" StoragePolicy: "default" ReplicationPolicy: "default" CachingPolicy: "default" } TableProfiles { Name: "profile1" CompactionPolicy: "compaction1" ExecutionPolicy: "execution1" PartitioningPolicy: "partitioning1" StoragePolicy: "storage1" ReplicationPolicy: "replication1" CachingPolicy: "caching1" } TableProfiles { Name: "profile2" CompactionPolicy: "compaction2" ExecutionPolicy: "execution2" PartitioningPolicy: "partitioning2" StoragePolicy: "storage2" ReplicationPolicy: "replication2" CachingPolicy: "caching2" } CompactionPolicies { Name: "default" } CompactionPolicies { Name: "compaction1" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction2" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } ExecutionPolicies { Name: "default" } ExecutionPolicies { Name: "execution1" PipelineConfig { NumActiveTx: 1 EnableOutOfOrder: false DisableImmediate: false EnableSoftUpdates: true } ResourceProfile: "profile1" EnableFilterByKey: true ExecutorFastLogPolicy: false TxReadSizeLimit: 10000000 } ExecutionPolicies { Name: "execution2" PipelineConfig { NumActiveTx: 8 EnableOutOfOrder: true DisableImmediate: true EnableSoftUpdates: false } ResourceProfile: "profile2" EnableFilterByKey: false ExecutorFastLogPolicy: true TxReadSizeLimit: 20000000 } PartitioningPolicies { Name: "default" } PartitioningPolicies { Name: "partitioning1" UniformPartitionsCount: 10 AutoSplit: true AutoMerge: false SizeToSplit: 123456 } PartitioningPolicies { Name: "partitioning2" UniformPartitionsCount: 20 AutoSplit: true AutoMerge: true SizeToSplit: 1000000000 } StoragePolicies { Name: "default" } StoragePolicies { Name: "storage1" ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecLZ4 StorageConfig { SysLog { PreferredPoolKind: "hdd" } Log { PreferredPoolKind: "hdd" } Data { PreferredPoolKind: "hdd" } External { PreferredPoolKind: "hdd" } ExternalThreshold: 4294967295 } } } StoragePolicies { Name: "storage2" ColumnFamilies { Id: 0 ColumnCache: ColumnCacheEver StorageConfig { SysLog { PreferredPoolKind: "ssd" } Log { PreferredPoolKind: "ssd" } Data { PreferredPoolKind: "ssd" } External { PreferredPoolKind: "ssd" } DataThreshold: 30000 } } } ReplicationPolicies { Name: "default" } ReplicationPolicies { Name: "replication1" FollowerCount: 1 AllowFollowerPromotion: false CrossDataCenter: true } ReplicationPolicies { Name: "replication2" FollowerCount: 2 AllowFollowerPromotion: true CrossDataCenter: false } CachingPolicies { Name: "default" } CachingPolicies { Name: "caching1" ExecutorCacheSize: 10000000 } CachingPolicies { Name: "caching2" ExecutorCacheSize: 20000000 } } 2024-11-21T17:54:17.341837Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2024-11-21T17:54:17.341992Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] Test command err: 2024-11-21T17:54:13.456392Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792402717179054:2056];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:13.456502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0010ff/r3tmp/tmpEKnwDG/pdisk_1.dat 2024-11-21T17:54:13.501724Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3196, node 1 2024-11-21T17:54:13.516575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:13.516589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:13.516590Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:13.516618Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:13.556621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:13.556642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:13.558088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:13.578747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:13.579588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:13.579601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:13.579962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:13.580000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:13.580009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:54:13.580276Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:13.580285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:13.580485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:13.580600Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:13.581244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211653627, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:13.581257Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:54:13.581308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:54:13.581638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:13.581695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:13.581708Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:54:13.581717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:54:13.581730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:54:13.581740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:54:13.582142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:13.582171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:54:13.582175Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:13.582198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:54:13.601530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:13.601575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:54:13.601660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:13.601670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:13.602284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2024-11-21T17:54:13.602341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:13.602396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:13.602419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateKesus, at tablet72057594046644480 2024-11-21T17:54:13.602470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:54:13.602608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:13.602624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:13.602632Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:13.602680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:13.602694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:13.602695Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:54:13.604466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:13.604497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:54:13.656810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:54:13.657256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:13.658013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211653704, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:13.658025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T17:54:13.658048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:54:13.658402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:13.658459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:13.658471Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:54:13.658483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:54:13.658492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:54:13.658517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T17:54:13.658680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:13.658692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:13.658695Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:54:13.658715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:13.658717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:13.658718Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:54:13.658721Z node 1 :FLAT_ ... nished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:17.034694Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:17.034738Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:17.036258Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:17.038174Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.038285Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:17.038296Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.038755Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:17.038794Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:17.038802Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:54:17.039128Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:17.039135Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:17.039374Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:17.039424Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.040294Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211657085, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:17.040306Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:54:17.040368Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:54:17.040734Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:17.040774Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:17.040788Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:54:17.040803Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:54:17.040814Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:54:17.040830Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:54:17.040923Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:17.040933Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:54:17.040936Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:17.040948Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:54:17.053810Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TheTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.053950Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:54:17.054122Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:17.054133Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.054931Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TheTable 2024-11-21T17:54:17.054984Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:17.055027Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:17.055052Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:54:17.055172Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:17.055184Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:17.055188Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:17.055219Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:17.055223Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:17.055225Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:54:17.055349Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:54:17.056881Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:17.056921Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:54:17.057419Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:54:17.110675Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:54:17.110688Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:54:17.110714Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:54:17.111209Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:54:17.112102Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211657155, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:17.112114Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211657155 2024-11-21T17:54:17.112151Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:54:17.112970Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:17.113063Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:17.113080Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:54:17.113443Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:17.113452Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:17.113457Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:54:17.113493Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:17.113496Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:17.113497Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:54:17.120041Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732211657155 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 755 } } 2024-11-21T17:54:17.120131Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:54:17.120142Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.120153Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T17:54:17.129546Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:54:17.129576Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:54:17.129588Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestExternalBoot >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] >> THiveTest::TestExternalBoot [GOOD] >> YdbTableBulkUpsert::Uint8 [GOOD] >> THiveTest::TestExternalBootWhenLocked |85.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris >> YdbTableBulkUpsert::ZeroRows |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |85.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris >> test.py::test[pg-tpcds-q29-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q29-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q29-default.txt-Results] >> test.py::test[union-union_column_extention-default.txt-ForceBlocks] [GOOD] >> test.py::test[union-union_column_extention-default.txt-Plan] [GOOD] >> test.py::test[union-union_column_extention-default.txt-Results] |85.5%| [TA] $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.5%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcClientLowTest::BiStreamPing [GOOD] >> TGRpcClientLowTest::BiStreamCancelled >> TPersQueueTest::InflightLimit [GOOD] >> YdbYqlClient::TestReadTableSnapshot [GOOD] |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> test.py::test[select-where_not_null--Plan] [GOOD] >> test.py::test[select-where_not_null--Results] >> THiveTest::TestExternalBootWhenLocked [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2024-11-21T17:53:48.842347Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:53:48.842970Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:48.843029Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:53:48.843141Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:53:48.843303Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T17:53:48.843310Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:53:48.843409Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:25:2072] ControllerId# 72057594037932033 2024-11-21T17:53:48.843415Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:53:48.843442Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:53:48.843501Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:53:48.845319Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:53:48.845339Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:53:48.845616Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:33:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:48.845649Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:48.845674Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:48.845697Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:48.845720Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:48.845743Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:48.845772Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:48.845775Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:53:48.845786Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:25:2072] 2024-11-21T17:53:48.845790Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:25:2072] 2024-11-21T17:53:48.845796Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:53:48.845802Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:53:48.845920Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:53:48.848863Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:53:48.848880Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:48.848884Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:53:48.849118Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:53:48.849149Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:48.849176Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:53:48.849179Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:48.849184Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:53:48.849527Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:53:48.849576Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:53:48.849580Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:53:48.849910Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T17:53:48.849923Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:53:48.849926Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:53:48.849951Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2090] 2024-11-21T17:53:48.849960Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:48.849980Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:29:2063] 2024-11-21T17:53:48.849982Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:29:2063] 2024-11-21T17:53:48.850093Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:48.850127Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T17:53:48.850132Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T17:53:48.850134Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T17:53:48.850138Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:53:48.850159Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:48.850163Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T17:53:48.850177Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2090] 2024-11-21T17:53:48.850179Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2090] 2024-11-21T17:53:48.850182Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:53:48.850214Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:48.850232Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:48.850239Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:53:48.850539Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2090] 2024-11-21T17:53:48.850643Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:53:48.850657Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:53:48.850661Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2024-11-21T17:53:48.851260Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:25:2072] 2024-11-21T17:53:48.851266Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:25:2072] 2024-11-21T17:53:48.851278Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T17:53:48.851420Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:53:48.851471Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:48.851482Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2024-11-21T17:53:48.851485Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2024-11-21T17:53:48.851487Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:48.851522Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2024-11-21T17:53:48.851529Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2024-11-21T17:53:48.851531Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2024-11-21T17:53:48.851539Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2024-11-21T17:53:48.851542Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:53:48.851547Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:53:48.851555Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:53:48.851561Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:53:48.851565Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2024-11-21T17:53:48.851569Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:29:2063] 2024-11-21T17:53:48.851572Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:29:2063] 2024-11-21T17:53:48.851578Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2024-11-21T17:53:48.851603Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtaine ... 63] CurrentLeaderTablet: [54:1940:2266] CurrentGeneration: 3 CurrentStep: 0} 2024-11-21T17:54:17.431536Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037892 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037892 Cookie: 0 CurrentLeader: [54:1934:2263] CurrentLeaderTablet: [54:1940:2266] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2024-11-21T17:54:17.431540Z node 49 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037892 followers: 0 2024-11-21T17:54:17.431545Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037892 followers: 0 countLeader 1 allowFollowers 0 winner: [54:1934:2263] 2024-11-21T17:54:17.431565Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037892] forward result remote node 54 [49:2063:2723] 2024-11-21T17:54:17.431593Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037892] remote node connected [49:2063:2723] 2024-11-21T17:54:17.431597Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037892]::SendEvent [49:2063:2723] 2024-11-21T17:54:17.431630Z node 54 :PIPE_SERVER DEBUG: [72075186224037892] Accept Connect Originator# [49:2063:2723] 2024-11-21T17:54:17.431730Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037892] connected with status OK role: Leader [49:2063:2723] 2024-11-21T17:54:17.431735Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037892] send queued [49:2063:2723] 2024-11-21T17:54:17.431858Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893] ::Bootstrap [49:2067:2725] 2024-11-21T17:54:17.431862Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893] lookup [49:2067:2725] 2024-11-21T17:54:17.431869Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:17.431874Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [54:1280:2096] 2024-11-21T17:54:17.431896Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893] forward result remote node 54 [49:2067:2725] 2024-11-21T17:54:17.431912Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893] remote node connected [49:2067:2725] 2024-11-21T17:54:17.431916Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893]::SendEvent [49:2067:2725] 2024-11-21T17:54:17.431968Z node 54 :PIPE_SERVER DEBUG: [72075186224037893] Accept Connect Originator# [49:2067:2725] 2024-11-21T17:54:17.432058Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893] connected with status OK role: Leader [49:2067:2725] 2024-11-21T17:54:17.432064Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037893] send queued [49:2067:2725] 2024-11-21T17:54:17.432363Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] ::Bootstrap [49:2070:2727] 2024-11-21T17:54:17.432372Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [49:2070:2727] 2024-11-21T17:54:17.432381Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:17.432387Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [53:1284:2097] 2024-11-21T17:54:17.432405Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 53 [49:2070:2727] 2024-11-21T17:54:17.432428Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [49:2070:2727] 2024-11-21T17:54:17.432432Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [49:2070:2727] 2024-11-21T17:54:17.432508Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connect request undelivered [49:2070:2727] 2024-11-21T17:54:17.432513Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] immediate retry [49:2070:2727] 2024-11-21T17:54:17.432517Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [49:2070:2727] 2024-11-21T17:54:17.432522Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037894 entry.State: StNormal 2024-11-21T17:54:17.432549Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:17.432564Z node 49 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:54:17.432590Z node 49 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2024-11-21T17:54:17.432597Z node 49 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2024-11-21T17:54:17.432603Z node 49 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2024-11-21T17:54:17.432611Z node 49 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [54:1936:2264] CurrentLeaderTablet: [54:1941:2267] CurrentGeneration: 3 CurrentStep: 0} 2024-11-21T17:54:17.432627Z node 49 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [54:1936:2264] CurrentLeaderTablet: [54:1941:2267] CurrentGeneration: 3 CurrentStep: 0} 2024-11-21T17:54:17.432637Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037894 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [54:1936:2264] CurrentLeaderTablet: [54:1941:2267] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2024-11-21T17:54:17.432641Z node 49 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2024-11-21T17:54:17.432646Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [54:1936:2264] 2024-11-21T17:54:17.432669Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 54 [49:2070:2727] 2024-11-21T17:54:17.432687Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [49:2070:2727] 2024-11-21T17:54:17.432693Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [49:2070:2727] 2024-11-21T17:54:17.432740Z node 54 :PIPE_SERVER DEBUG: [72075186224037894] Accept Connect Originator# [49:2070:2727] 2024-11-21T17:54:17.432826Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connected with status OK role: Leader [49:2070:2727] 2024-11-21T17:54:17.432832Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037894] send queued [49:2070:2727] 2024-11-21T17:54:17.432940Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895] ::Bootstrap [49:2074:2729] 2024-11-21T17:54:17.432944Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895] lookup [49:2074:2729] 2024-11-21T17:54:17.432951Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:17.432956Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [54:1782:2191] 2024-11-21T17:54:17.432977Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895] forward result remote node 54 [49:2074:2729] 2024-11-21T17:54:17.432997Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895] remote node connected [49:2074:2729] 2024-11-21T17:54:17.433000Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895]::SendEvent [49:2074:2729] 2024-11-21T17:54:17.433046Z node 54 :PIPE_SERVER DEBUG: [72075186224037895] Accept Connect Originator# [49:2074:2729] 2024-11-21T17:54:17.433116Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895] connected with status OK role: Leader [49:2074:2729] 2024-11-21T17:54:17.433121Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037895] send queued [49:2074:2729] 2024-11-21T17:54:17.433213Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896] ::Bootstrap [49:2077:2731] 2024-11-21T17:54:17.433218Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896] lookup [49:2077:2731] 2024-11-21T17:54:17.433226Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:17.433233Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [54:1785:2193] 2024-11-21T17:54:17.433254Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896] forward result remote node 54 [49:2077:2731] 2024-11-21T17:54:17.433274Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896] remote node connected [49:2077:2731] 2024-11-21T17:54:17.433277Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896]::SendEvent [49:2077:2731] 2024-11-21T17:54:17.433332Z node 54 :PIPE_SERVER DEBUG: [72075186224037896] Accept Connect Originator# [49:2077:2731] 2024-11-21T17:54:17.433425Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896] connected with status OK role: Leader [49:2077:2731] 2024-11-21T17:54:17.433431Z node 49 :PIPE_CLIENT DEBUG: TClient[72075186224037896] send queued [49:2077:2731] 2024-11-21T17:54:17.433537Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [49:2079:2732] 2024-11-21T17:54:17.433541Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [49:2079:2732] 2024-11-21T17:54:17.433548Z node 49 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:17.433554Z node 49 :TABLET_RESOLVER DEBUG: SelectForward node 49 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [49:582:2269] 2024-11-21T17:54:17.433567Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [49:2079:2732] 2024-11-21T17:54:17.433582Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [49:2079:2732] 2024-11-21T17:54:17.433598Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [49:2079:2732] 2024-11-21T17:54:17.433602Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [49:2079:2732] 2024-11-21T17:54:17.433624Z node 49 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [49:2079:2732] 2024-11-21T17:54:17.433662Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [49:2079:2732] 2024-11-21T17:54:17.433667Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [49:2079:2732] 2024-11-21T17:54:17.433670Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [49:2079:2732] 2024-11-21T17:54:17.433674Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [49:2079:2732] 2024-11-21T17:54:17.433678Z node 49 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [49:2079:2732] 2024-11-21T17:54:17.433684Z node 49 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [49:554:2264] EventType# 268697616 2024-11-21T17:54:17.433754Z node 49 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([49:2079:2732]) [49:2080:2733] |85.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub >> YdbTableBulkUpsert::ZeroRows [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug >> test.py::test[pg-tpcds-q29-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-Debug] >> test.py::test[union-union_column_extention-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Analyze] |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |85.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableSnapshot [GOOD] Test command err: 2024-11-21T17:54:15.027749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792409787154697:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:15.027929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0010ec/r3tmp/tmprwUsp7/pdisk_1.dat 2024-11-21T17:54:15.089391Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18786, node 1 2024-11-21T17:54:15.106612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:15.106624Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:15.106627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:15.106665Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16753 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:54:15.128188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:15.128216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:15.129714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:15.172716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.173591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:15.173608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.174040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:15.174091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:15.174099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:54:15.174453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:15.174460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:15.174496Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:15.174772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.175480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211655223, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:15.175493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:54:15.175547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:54:15.175914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:15.175950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:15.175964Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:54:15.175974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:54:15.175987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:54:15.176003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:54:15.176454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:15.176473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:54:15.176477Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:15.176526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:54:15.316823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792409787155620:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:15.316844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:15.339975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.340075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:54:15.340194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:15.340201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.340707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:54:15.340762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:15.340811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:15.340835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:54:15.340892Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:54:15.341018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:15.341028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:15.341032Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:15.341079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:15.341084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:15.341086Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:54:15.342900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.342931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:54:15.343355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:54:15.395338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:54:15.395348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:54:15.395369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:54:15.395785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:54:15.396399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211655440, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:15.396409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211655440 2024-11-21T17:54:15.396427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:54:15.396700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:15.396774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:15.396795Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:54:15.397004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:15.397015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:15.397021Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: ... D NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/EmptyTable 2024-11-21T17:54:18.589735Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:18.589779Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:18.589793Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:54:18.590072Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T17:54:18.590104Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T17:54:18.590107Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:54:18.590113Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T17:54:18.590124Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T17:54:18.590163Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:18.590185Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:18.590190Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:18.590218Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:18.590226Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:18.590227Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:54:18.591988Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:18.592007Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:54:18.592532Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:54:18.652844Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:54:18.652865Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:54:18.652901Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:54:18.653615Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:54:18.654604Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211658702, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:18.654621Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211658702 2024-11-21T17:54:18.654657Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:54:18.655144Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:18.655243Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:18.655256Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:54:18.655514Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T17:54:18.655550Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T17:54:18.655552Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T17:54:18.655568Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T17:54:18.655716Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:18.655725Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:18.655730Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:54:18.655773Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:18.655776Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:18.655778Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:54:18.656823Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732211658702 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 387 } } 2024-11-21T17:54:18.657555Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:54:18.657571Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:18.657579Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T17:54:18.658004Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:54:18.658021Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:54:18.658037Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:54:18.664485Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jd7xrkx8246qad6nhwe12rz3, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:39336, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T17:54:18.669333Z node 10 :READ_TABLE_API DEBUG: [10:7439792422288011434:2305] Adding quota request to queue ShardId: 0, TxId: 281474976715659 2024-11-21T17:54:18.669352Z node 10 :READ_TABLE_API DEBUG: [10:7439792422288011434:2305] Assign stream quota to Shard 0, Quota 5, TxId 281474976715659 Reserved: 5 of 25, Queued: 0 2024-11-21T17:54:18.669538Z node 10 :READ_TABLE_API DEBUG: [10:7439792422288011434:2305] got stream part, size: 35, RU required: 128 rate limiter absent 2024-11-21T17:54:18.669643Z node 10 :READ_TABLE_API DEBUG: [10:7439792422288011434:2305] Starting inactivity timer for 600.000000s with tag 3 2024-11-21T17:54:18.669657Z node 10 :READ_TABLE_API NOTICE: [10:7439792422288011434:2305] Finish grpc stream, status: 400000 2024-11-21T17:54:18.670373Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6d9f00] received request Name# Request ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670445Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6e7100] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670451Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6e1200] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670488Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6e0d00] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670501Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6d5900] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670523Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6cfa00] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670531Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6e5d00] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670560Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6d0900] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670597Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6dae00] received request Name# FillNode ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670624Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6e8f00] received request Name# DrainNode ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670655Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6dea00] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670686Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6d2c00] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670715Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6d6d00] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670746Z node 10 :GRPC_SERVER DEBUG: [0x556b6b403000] received request Name# KeyValue ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670773Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6e2100] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670795Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6c5a00] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670818Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6c5500] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670843Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6cf500] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670879Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6d0e00] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670905Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6c2800] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670927Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6d0400] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670949Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6ddb00] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670970Z node 10 :GRPC_SERVER DEBUG: [0x556b7ec86880] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2024-11-21T17:54:18.670993Z node 10 :GRPC_SERVER DEBUG: [0x556b7f6ea800] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> TGRpcClientLowTest::BiStreamCancelled [GOOD] >> TGRpcClientLowTest::ChangeAcl |85.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/stress_tool/ydb_stress_tool |85.5%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool >> test.py::test[select-where_not_null--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_base-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit [GOOD] Test command err: 2024-11-21T17:51:57.246356Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791819359842563:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:57.246495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:57.249216Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791817021378842:2061];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:57.274012Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00201f/r3tmp/tmpnQ8Pmf/pdisk_1.dat 2024-11-21T17:51:57.283827Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:57.284839Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:57.313663Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9580, node 1 2024-11-21T17:51:57.333016Z INFO: TTestServer started on Port 31895 GrpcPort 9580 2024-11-21T17:51:57.328980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/00201f/r3tmp/yandexXkB56b.tmp 2024-11-21T17:51:57.328993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/00201f/r3tmp/yandexXkB56b.tmp 2024-11-21T17:51:57.329057Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/00201f/r3tmp/yandexXkB56b.tmp 2024-11-21T17:51:57.329092Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31895 PQClient connected to localhost:9580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:57.356040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:57.377508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.377535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:57.378112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.378118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:57.378718Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:57.378832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:57.378922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:57.395563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T17:51:57.622071Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791817021379184:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.622104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439791817021379173:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.622128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.622416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791819359843533:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.622437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791819359843545:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.622444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:51:57.623260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T17:51:57.627715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791819359843547:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:51:57.627765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439791817021379188:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T17:51:57.646630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.701331Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791817021379231:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:57.701472Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmI4YmJmNTUtZmFiNjJmMWQtOTUyMjNkYzUtNWEyZTgxNzk=, ActorId: [2:7439791817021379157:2280], ActorState: ExecuteState, TraceId: 01jd7xma5k03c52f08kd086s8t, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:57.702024Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:57.705532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.714022Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791819359843856:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:51:57.714111Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTk0MzUwNS1iMjhiYzU1OS1hOGVlNjliYS1kNGQ0NjdhMQ==, ActorId: [1:7439791819359843530:2300], ActorState: ExecuteState, TraceId: 01jd7xma5payp7wjqj8j75f1rc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:51:57.714308Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:51:57.723518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T17:51:57.754935Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd7xma9946w5h80s643fbxa9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWNlYTI3ZC0yYjQ1YjY0ZS0yOTFhMDNhNi1mYTJlZGE3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439791819359844079:3048] 2024-11-21T17:52:02.246596Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439791819359842563:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:02.246638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T17:52:02.249154Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439791817021378842:2061];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:02.249193Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initializ ... topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2024-11-21T17:54:10.738013Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1048576 } } 2024-11-21T17:54:10.738042Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 got read request: guid# f9ee42f5-49de7dd5-f6f80704-e42e3947 2024-11-21T17:54:10.738055Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_547385227602411621_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2024-11-21T17:54:10.738060Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2024-11-21T17:54:10.738071Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1732211647441, sizeLag# 82536 2024-11-21T17:54:10.738081Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1TEvPartitionReady. Aval parts: 1 2024-11-21T17:54:10.738088Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 performing read request: guid# 71ce035d-f335e38c-fced37c-ebad5cb3, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 99043, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T17:54:10.738096Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 99043 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid 71ce035d-f335e38c-fced37c-ebad5cb3 2024-11-21T17:54:10.738231Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T17:54:10.738240Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T17:54:10.738242Z node 28 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T17:54:11.377650Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:54:13.717988Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Bytes readed: 82616 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2024-11-21T17:54:14.381051Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2024-11-21T17:54:14.381072Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T17:54:14.381118Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2024-11-21T17:54:14.381127Z node 28 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:54:14.381231Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:54:14.381695Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_16263715938818262118_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1732211647441 CreateTimestampMS: 1732211647441 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1732211647444 CreateTimestampMS: 1732211647443 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1732211647446 CreateTimestampMS: 1732211647446 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1732211647449 CreateTimestampMS: 1732211647448 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 3644 } Cookie: 0 } 2024-11-21T17:54:14.381795Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_16263715938818262118_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2024-11-21T17:54:14.381803Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_16263715938818262118_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid c876fe0d-4f075c21-bab77baf-1aa2aa6c has messages 1 2024-11-21T17:54:14.381851Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_16263715938818262118_v1 read done: guid# c876fe0d-4f075c21-bab77baf-1aa2aa6c, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82616 2024-11-21T17:54:14.381861Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_16263715938818262118_v1 response to read: guid# c876fe0d-4f075c21-bab77baf-1aa2aa6c 2024-11-21T17:54:14.382088Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_16263715938818262118_v1 Process answer. Aval parts: 0 2024-11-21T17:54:14.383519Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_16263715938818262118_v1 grpc read done: success# 0, data# { } 2024-11-21T17:54:14.383523Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_16263715938818262118_v1 grpc read failed 2024-11-21T17:54:14.383528Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_16263715938818262118_v1 grpc closed 2024-11-21T17:54:14.383538Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_16263715938818262118_v1 is DEAD 2024-11-21T17:54:14.383904Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:54:14.383924Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_3_16263715938818262118_v1 2024-11-21T17:54:14.383938Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7439792386996630009:2564] destroyed 2024-11-21T17:54:14.383961Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_3_16263715938818262118_v1 2024-11-21T17:54:14.903709Z node 27 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:54:14.903725Z node 27 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:16.377885Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T17:54:18.387730Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2024-11-21T17:54:18.387748Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T17:54:18.387794Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2024-11-21T17:54:18.387802Z node 28 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:54:18.387861Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T17:54:18.388404Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1732211647441 CreateTimestampMS: 1732211647441 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1732211647444 CreateTimestampMS: 1732211647443 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1732211647446 CreateTimestampMS: 1732211647446 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1732211647449 CreateTimestampMS: 1732211647448 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 7649 } Cookie: 0 } 2024-11-21T17:54:18.388480Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2024-11-21T17:54:18.388487Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 71ce035d-f335e38c-fced37c-ebad5cb3 has messages 1 2024-11-21T17:54:18.388536Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 read done: guid# 71ce035d-f335e38c-fced37c-ebad5cb3, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82616 2024-11-21T17:54:18.388545Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 response to read: guid# 71ce035d-f335e38c-fced37c-ebad5cb3 2024-11-21T17:54:18.388780Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 Process answer. Aval parts: 0 Bytes readed: 82616 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2024-11-21T17:54:18.404570Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_547385227602411621_v1 grpc read done: success# 0, data# { } 2024-11-21T17:54:18.404585Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_547385227602411621_v1 grpc read failed 2024-11-21T17:54:18.404599Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_547385227602411621_v1 grpc closed 2024-11-21T17:54:18.404617Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_547385227602411621_v1 is DEAD 2024-11-21T17:54:18.405172Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:54:18.405188Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_2_547385227602411621_v1 2024-11-21T17:54:18.405203Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7439792386996630008:2565] destroyed 2024-11-21T17:54:18.405218Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_2_547385227602411621_v1 2024-11-21T17:54:18.718173Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] Test command err: 2024-11-21T17:53:47.594760Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:53:47.595874Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:47.595921Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:53:47.596038Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [3:65:2071] ControllerId# 72057594037932033 2024-11-21T17:53:47.596041Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:53:47.596073Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:53:47.596144Z node 3 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:53:47.596255Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:53:47.596262Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:53:47.596544Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:71:2075] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.596579Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:72:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.596603Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:73:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.596641Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:74:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.596664Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:75:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.596688Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:76:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.596713Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:77:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.596718Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:53:47.596729Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:65:2071] 2024-11-21T17:53:47.596733Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:65:2071] 2024-11-21T17:53:47.596740Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:53:47.596748Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:53:47.596814Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:53:47.596868Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:53:47.597352Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:47.597400Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:53:47.597536Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:53:47.597701Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T17:53:47.597707Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:53:47.597792Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:87:2075] ControllerId# 72057594037932033 2024-11-21T17:53:47.597795Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:53:47.597804Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:53:47.597833Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:53:47.598491Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:61:2065] 2024-11-21T17:53:47.598495Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:61:2065] 2024-11-21T17:53:47.598502Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:53:47.600860Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:53:47.600868Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:53:47.601039Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:94:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.601057Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:95:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.601072Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:96:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.601089Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:97:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.601108Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:98:2084] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.601132Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:99:2085] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.601149Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:100:2086] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.601151Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:53:47.601157Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:87:2075] 2024-11-21T17:53:47.601160Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:87:2075] 2024-11-21T17:53:47.601163Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:53:47.601168Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:53:47.601272Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:53:47.601284Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:53:47.602218Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:47.602236Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:53:47.602324Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:108:2072] ControllerId# 72057594037932033 2024-11-21T17:53:47.602328Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:53:47.602335Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:53:47.602378Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:53:47.602461Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:53:47.602483Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:53:47.602485Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:53:47.602674Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:114:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.602689Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:115:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.602703Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:116:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.602721Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:117:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.602735Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:118:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.602756Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:119:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.602769Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:120:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.602772Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:53:47.602777Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:108:2072] 2024-11-21T17:53:47.602779Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:108:2072] 2024-11-21T17:53:47.602783Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:53:47.602786Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:53:47.602853Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:53:47.602878Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:65:2071] 2024-11-21T17:53:47.602885Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:47.602888Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:53:47.602892Z node 3 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:53:47.602916Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:47.602923Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:61:2065] 2024-11-21T17:53:47.606229Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:87:2075] 2024-11-21T17:53:47.606236Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:47.606239Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:53:47.606476Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:53:47.606492Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:47.606514Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:108:2072] 2024-11-21T17:53:47.606518Z node 2 :BS_NO ... Kikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:54:18.897985Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletStatus::Execute for tablet Dummy.72075186224037888.Leader.1 status 5 reason ReasonPill generation 1 follower 0 from local [42:94:2091] 2024-11-21T17:54:18.897991Z node 42 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue (0) 2024-11-21T17:54:18.897993Z node 42 :HIVE TRACE: HIVE#72057594037927937 ProcessBootQueue - sending 2024-11-21T17:54:18.898002Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T17:54:18.898008Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:54:18.898012Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10080004 [42:94:2091]} 2024-11-21T17:54:18.898021Z node 42 :LOCAL DEBUG: TLocalNodeRegistrar: Handle TEvDeadTabletAck TabletId:(72075186224037888,0) 2024-11-21T17:54:18.898030Z node 42 :HIVE TRACE: HIVE#72057594037927937 ProcessBootQueue - executing 2024-11-21T17:54:18.898033Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2024-11-21T17:54:18.898036Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:54:18.898038Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Execute 2024-11-21T17:54:18.898041Z node 42 :HIVE DEBUG: HIVE#72057594037927937 Handle ProcessBootQueue (size: 0) 2024-11-21T17:54:18.898049Z node 42 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T17:54:18.898052Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T17:54:18.898054Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:54:18.898057Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Complete 2024-11-21T17:54:18.898111Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [42:446:2353] 2024-11-21T17:54:18.898114Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [42:446:2353] 2024-11-21T17:54:18.898127Z node 42 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:18.898134Z node 42 :TABLET_RESOLVER DEBUG: SelectForward node 42 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [42:363:2294] 2024-11-21T17:54:18.898142Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [42:446:2353] 2024-11-21T17:54:18.898146Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [42:446:2353] 2024-11-21T17:54:18.898153Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [42:446:2353] 2024-11-21T17:54:18.898156Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [42:446:2353] 2024-11-21T17:54:18.898161Z node 42 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2024-11-21T17:54:18.898180Z node 42 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:54:18.898193Z node 42 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T17:54:18.898199Z node 42 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T17:54:18.898204Z node 42 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T17:54:18.898210Z node 42 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [42:363:2294] CurrentLeaderTablet: [42:380:2306] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T17:54:18.898218Z node 42 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [42:363:2294] CurrentLeaderTablet: [42:380:2306] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T17:54:18.898227Z node 42 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [42:363:2294] CurrentLeaderTablet: [42:380:2306] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T17:54:18.898238Z node 42 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T17:54:18.898264Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [43:448:2091] 2024-11-21T17:54:18.898269Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [43:448:2091] 2024-11-21T17:54:18.898276Z node 43 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:18.898280Z node 43 :TABLET_RESOLVER DEBUG: SelectForward node 43 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [42:314:2258] 2024-11-21T17:54:18.898283Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [43:448:2091] 2024-11-21T17:54:18.898289Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [43:448:2091] 2024-11-21T17:54:18.898293Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 42 [43:448:2091] 2024-11-21T17:54:18.898301Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [43:448:2091] 2024-11-21T17:54:18.898304Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [43:448:2091] 2024-11-21T17:54:18.898325Z node 42 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [43:448:2091] 2024-11-21T17:54:18.898348Z node 42 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([43:448:2091]) [42:449:2354] 2024-11-21T17:54:18.898358Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [43:448:2091] 2024-11-21T17:54:18.898361Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [43:448:2091] 2024-11-21T17:54:18.898363Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [43:448:2091] 2024-11-21T17:54:18.898369Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [43:448:2091] 2024-11-21T17:54:18.898372Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [43:448:2091] 2024-11-21T17:54:18.898374Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [43:448:2091] 2024-11-21T17:54:18.898398Z node 42 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [43:436:2086] EventType# 268697624 2024-11-21T17:54:18.898410Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2024-11-21T17:54:18.898413Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:54:18.898416Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2024-11-21T17:54:18.898445Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxStartTablet::Execute, Sending TEvBootTablet(Dummy.72075186224037888.Leader.2) to node 43 storage {Version# 1 TabletID# 72075186224037888 TabletType# Dummy Channels# {0:{Channel# 0 Type# none StoragePool# def1 History# {0:{FromGeneration# 0 GroupID# 2147483648 Timestamp# 1970-01-01T00:00:00.058024Z}}, 1:{Channel# 1 Type# none StoragePool# def2 History# {0:{FromGeneration# 0 GroupID# 2147483649 Timestamp# 1970-01-01T00:00:00.058024Z}}, 2:{Channel# 2 Type# none StoragePool# def3 History# {0:{FromGeneration# 0 GroupID# 2147483650 Timestamp# 1970-01-01T00:00:00.058024Z}}} Tenant: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:18.898456Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{13, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2024-11-21T17:54:18.898459Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:54:18.908829Z node 42 :BS_PROXY_PUT INFO: [d70ef3c23a1a2346] bootstrap ActorId# [42:451:2356] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T17:54:18.908876Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] Id# [72057594037927937:2:9:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:54:18.908884Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] restore Id# [72057594037927937:2:9:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:54:18.908894Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG33 2024-11-21T17:54:18.908899Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG32 2024-11-21T17:54:18.908929Z node 42 :BS_PROXY DEBUG: Send to queueActorId# [42:53:2078] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:54:18.909288Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 23 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 24 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T17:54:18.909311Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T17:54:18.909319Z node 42 :BS_PROXY_PUT INFO: [d70ef3c23a1a2346] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T17:54:18.909351Z node 42 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T17:54:18.909377Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 2024-11-21T17:54:18.909392Z node 42 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [43:436:2086]} 2024-11-21T17:54:18.909440Z node 42 :HIVE TRACE: HIVE#72057594037927937 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 43 Cookie 0 >> TGRpcClientLowTest::ChangeAcl [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-Results] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Analyze] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Debug] >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::ChangeAcl [GOOD] Test command err: 2024-11-21T17:54:16.239624Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792413244215593:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:16.239949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0010d7/r3tmp/tmpI9Y1wV/pdisk_1.dat 2024-11-21T17:54:16.293679Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13178, node 1 2024-11-21T17:54:16.310112Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:16.310129Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:16.310131Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:16.310194Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:16.336059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:16.337060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:16.337079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:16.337707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:16.337778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:16.337787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:54:16.338171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:54:16.338242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:16.338250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:54:16.338649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:16.339442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:16.339465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:16.339648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211656385, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:16.339664Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:54:16.339721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:54:16.340117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:16.340163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:16.340179Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:54:16.340191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:54:16.340202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:54:16.340213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:54:16.340620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:54:16.340640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:54:16.340643Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:16.340656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:54:16.340843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2024-11-21T17:54:16.523074Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T17:54:16.525409Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T17:54:17.128133Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439792418343297539:2069];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:17.128299Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0010d7/r3tmp/tmpVE3HrK/pdisk_1.dat 2024-11-21T17:54:17.150205Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16756, node 4 2024-11-21T17:54:17.172719Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:17.172731Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:17.172733Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:17.172785Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:17.226740Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:17.226770Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:17.229898Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:17.233808Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.233936Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:17.233948Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.234465Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:17.234524Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:17.234536Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:54:17.234934Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:17.234945Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:17.235214Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.236054Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211657281, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:17.236064Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:54:17.236134Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:54:17.236613Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:17.236671Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:17.236686Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:54:17.236700Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:54:17.236709Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 ... . (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:19.786831Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:19.786869Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:19.788401Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:19.789794Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:19.789902Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:19.789913Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:19.790298Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:19.790337Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:19.790347Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:54:19.790645Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:19.790654Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:19.790786Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:19.790983Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:19.791752Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211659836, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:19.791764Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:54:19.791824Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:54:19.792198Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:19.792259Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:19.792275Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:54:19.792286Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:54:19.792296Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:54:19.792312Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:54:19.792454Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:19.792465Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:54:19.792468Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:19.792479Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:54:19.810273Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/TheDirectory, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:19.810340Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:19.811532Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/TheDirectory 2024-11-21T17:54:19.811594Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:19.811647Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:19.811670Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:19.811910Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:19.811926Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:19.811932Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:19.811985Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:19.811993Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:19.811995Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:54:19.813661Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211659857, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:19.813674Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211659857, at schemeshard: 72057594046644480 2024-11-21T17:54:19.813702Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T17:54:19.814085Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:19.814127Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:19.814142Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:54:19.814172Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:54:19.814187Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T17:54:19.814201Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 0 2024-11-21T17:54:19.814348Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:19.814359Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:19.814362Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:54:19.814386Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:19.814395Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:19.814397Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:54:19.814402Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 2024-11-21T17:54:19.815072Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:54:19.821259Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/TheDirectory, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:54:19.821337Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:19.821347Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:54:19.821363Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T17:54:19.821389Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:54:19.821408Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 2, subscribers: 0 2024-11-21T17:54:19.821954Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/TheDirectory, set owner:qqq, add access: +R:qqq, add access: -():qqq:- 2024-11-21T17:54:19.821997Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:19.822058Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:19.822270Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:54:19.822284Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:54:19.822288Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T17:54:19.822331Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:54:19.822334Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:54:19.822335Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:54:19.822340Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 0 >> TPersQueueTest::SetMeteringMode [GOOD] >> TPersQueueTest::TClusterTrackerTest >> YdbTableBulkUpsert::Overload [GOOD] >> YdbTableBulkUpsert::RetryOperationSync ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] Test command err: 2024-11-21T17:54:15.305641Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792408161123996:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:15.305690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0010e3/r3tmp/tmpy1Ex4r/pdisk_1.dat 2024-11-21T17:54:15.350451Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23209, node 1 2024-11-21T17:54:15.366123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:15.366138Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:15.366140Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:15.366184Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:15.406083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:15.406119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:15.407503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:15.429278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.430426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:15.430443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.430952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:15.430993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:15.431003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:54:15.431346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:15.431359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:15.431533Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:15.431721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.432496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211655482, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:15.432506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:54:15.432556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:54:15.432933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:15.432977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:15.433009Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:54:15.433028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:54:15.433041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:54:15.433059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:54:15.433460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:15.433485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:54:15.433489Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:15.433505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:54:15.619928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Logs, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.620169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:54:15.620671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:15.620690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:15.621368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Logs 2024-11-21T17:54:15.621417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:15.621478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:15.621493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:54:15.621691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:54:15.621727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:15.621736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:15.621739Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:15.621771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:15.621778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:15.621780Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:54:15.624003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:15.624366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 2814749 ... e TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T17:54:19.826001Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T17:54:19.826002Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 3 2024-11-21T17:54:19.826014Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T17:54:19.826016Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T17:54:19.826018Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 1 2024-11-21T17:54:19.826520Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715660, at schemeshard: 72057594046644480 2024-11-21T17:54:19.829302Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211659878, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:19.829320Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211659878, at schemeshard: 72057594046644480 2024-11-21T17:54:19.829357Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2024-11-21T17:54:19.829386Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211659878, at schemeshard: 72057594046644480 2024-11-21T17:54:19.829399Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:1 128 -> 240 2024-11-21T17:54:19.829429Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715660:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211659878, at schemeshard: 72057594046644480 2024-11-21T17:54:19.829441Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:2 128 -> 240 2024-11-21T17:54:19.829455Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715660:3, HandleReply TEvOperationPlan: step# 1732211659878 2024-11-21T17:54:19.829466Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:3 128 -> 240 2024-11-21T17:54:19.829901Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:19.830006Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:19.830018Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2024-11-21T17:54:19.830029Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/4 2024-11-21T17:54:19.830054Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:2 ProgressState 2024-11-21T17:54:19.830058Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:2 progress is 2/4 2024-11-21T17:54:19.830068Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:3 ProgressState 2024-11-21T17:54:19.830073Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:3 progress is 3/4 2024-11-21T17:54:19.830082Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:1 ProgressState 2024-11-21T17:54:19.830086Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:1 progress is 4/4 2024-11-21T17:54:19.830092Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T17:54:19.830100Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2024-11-21T17:54:19.830104Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:2 2024-11-21T17:54:19.830106Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:3 2024-11-21T17:54:19.830112Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 5, subscribers: 1 2024-11-21T17:54:19.830370Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T17:54:19.830377Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T17:54:19.830381Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T17:54:19.830411Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T17:54:19.830413Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T17:54:19.830415Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:54:19.830429Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T17:54:19.830431Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T17:54:19.830432Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T17:54:19.830444Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T17:54:19.830447Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T17:54:19.830449Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2024-11-21T17:54:19.830461Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T17:54:19.830463Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T17:54:19.830465Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 2 2024-11-21T17:54:19.830471Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 2024-11-21T17:54:19.831130Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439792426117240727:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:54:19.925422Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715661:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:54:19.925467Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:54:19.926107Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715661, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:54:19.944465Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xrn1f19a9qg0dwcqk34wh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YWRiMDZlODktZDA5MTMyYjQtNzc4OGUzMmUtNTdiZDVlZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:19.951220Z node 10 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715663 scanId: 1 version: {1732211659885:max} readable: {1732211660000:max} at tablet 72075186224037888 2024-11-21T17:54:19.951269Z node 10 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715663 scanId: 1 at tablet 72075186224037888 2024-11-21T17:54:19.951399Z node 10 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[10:7439792426117240501:2298];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1732211659885:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2024-11-21T17:54:19.956732Z node 10 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[10:7439792426117240501:2298];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715663;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1732211659885:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[date;dateTimeS;dateTimeU;id;stringToString;timestamp;utf8ToString;];};]; 2024-11-21T17:54:19.957642Z node 10 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T17:54:19.957997Z node 10 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T17:54:19.958664Z node 10 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2024-11-21T17:54:19.961821Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211659885, txId: 18446744073709551615] shutting down >> test.py::test[pg-tpcds-q58-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-Debug] >> TOlapReboots::CreateMultipleTables >> test.py::test[simple_columns-simple_columns_base-default.txt-Debug] [GOOD] >> test.py::test[simple_columns-simple_columns_base-default.txt-Plan] |85.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-ForceBlocks] >> test.py::test[simple_columns-simple_columns_base-default.txt-Plan] [GOOD] >> test.py::test[simple_columns-simple_columns_base-default.txt-Results] |85.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |85.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] >> test.py::test[simple_columns-simple_columns_base-default.txt-Results] [GOOD] >> test.py::test[solomon-BadDownsamplingFill--Debug] [SKIPPED] >> test.py::test[solomon-BadDownsamplingFill--Plan] [SKIPPED] >> test.py::test[solomon-BadDownsamplingFill--Results] [SKIPPED] >> test.py::test[type_v3-append_diff_layout1--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T17:53:24.379559Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.379583Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.382943Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:24.384750Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T17:53:24.384952Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T17:53:24.385403Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T17:53:24.385792Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T17:53:24.386198Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:24.387494Z node 1 :PERSQUEUE INFO: new Cookie owner|7da5f9ef-b7924666-de9a72f9-c7c6222c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner 2024-11-21T17:53:24.387573Z node 1 :PERSQUEUE INFO: new Cookie owner|2f060dfb-2bf5fea4-39cf00a2-862d8c79_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner 2024-11-21T17:53:24.387614Z node 1 :PERSQUEUE INFO: new Cookie owner|f53ed1ef-d2a9770c-634cad98-a5fdcbe6_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR 2024-11-21T17:53:25.670867Z node 1 :PERSQUEUE INFO: new Cookie default|58e6ba1f-7bbe7914-2533c675-4effa5a7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2024-11-21T17:53:25.671026Z node 1 :PERSQUEUE INFO: new Cookie owner2|5b2945b5-38da3e56-b0fc3065-649468a5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2024-11-21T17:53:25.671213Z node 1 :PERSQUEUE INFO: new Cookie owner|99eb01b9-60648f0-b56567-a98dfded_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents:: ... :TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:20.851915Z node 86 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2024-11-21T17:54:20.852860Z node 86 :PERSQUEUE INFO: new Cookie default|3c565a67-50ab5f2b-e3eb8d9e-d50092ba_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [87:101:2057] recipient: [87:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [87:101:2057] recipient: [87:99:2133] Leader for TabletID 72057594037927937 is [87:105:2137] sender: [87:106:2057] recipient: [87:99:2133] 2024-11-21T17:54:21.009493Z node 87 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:21.009522Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [87:147:2057] recipient: [87:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [87:147:2057] recipient: [87:145:2168] Leader for TabletID 72057594037927938 is [87:151:2172] sender: [87:152:2057] recipient: [87:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [87:105:2137] sender: [87:177:2057] recipient: [87:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:21.014351Z node 87 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:21.014624Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 87 actor [87:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 87 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 87 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 87 Important: true } 2024-11-21T17:54:21.014794Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [87:184:2197] 2024-11-21T17:54:21.015467Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [87:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:21.015890Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [87:185:2198] 2024-11-21T17:54:21.016385Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [87:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:21.016728Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [87:186:2199] 2024-11-21T17:54:21.017191Z node 87 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [87:186:2199] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:21.022611Z node 87 :PERSQUEUE INFO: new Cookie default|42354034-419dfe6c-120a40fd-f1cbc085_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:21.036358Z node 87 :PERSQUEUE INFO: new Cookie default|12fbb6bf-b8d20cbe-e96b28ff-8f42c411_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:21.051970Z node 87 :PERSQUEUE INFO: new Cookie default|efce2692-1be181e7-aea7d054-1d8a8f4e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:54:21.065214Z node 87 :PERSQUEUE INFO: new Cookie default|4efbf742-65ee995-dfc91245-6a612ac7_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:21.073202Z node 87 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2024-11-21T17:54:21.074023Z node 87 :PERSQUEUE INFO: new Cookie default|b7fefa62-b7798df1-ba3046ff-5443e5a4_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [88:101:2057] recipient: [88:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [88:101:2057] recipient: [88:99:2133] Leader for TabletID 72057594037927937 is [88:105:2137] sender: [88:106:2057] recipient: [88:99:2133] 2024-11-21T17:54:21.248772Z node 88 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:21.248793Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [88:147:2057] recipient: [88:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [88:147:2057] recipient: [88:145:2168] Leader for TabletID 72057594037927938 is [88:151:2172] sender: [88:152:2057] recipient: [88:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [88:105:2137] sender: [88:175:2057] recipient: [88:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:21.252060Z node 88 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:21.252288Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 88 actor [88:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 88 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 88 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 88 Important: true } 2024-11-21T17:54:21.252409Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [88:182:2195] 2024-11-21T17:54:21.252876Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [88:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:21.253157Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [88:183:2196] 2024-11-21T17:54:21.253454Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [88:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:21.253662Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [88:184:2197] 2024-11-21T17:54:21.253933Z node 88 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [88:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:21.257776Z node 88 :PERSQUEUE INFO: new Cookie default|e658b6cd-d967809-84170a47-f2a10fce_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:21.269270Z node 88 :PERSQUEUE INFO: new Cookie default|a33c1be0-cdafef6b-423ea2c3-52839e5e_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:21.281602Z node 88 :PERSQUEUE INFO: new Cookie default|d4d67022-7276bcb-355cbb1e-9890f58b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:54:21.291889Z node 88 :PERSQUEUE INFO: new Cookie default|da804962-4404f41f-fecd9970-35e3385a_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:21.299505Z node 88 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2024-11-21T17:54:21.300161Z node 88 :PERSQUEUE INFO: new Cookie default|51f0ee2d-bd472e22-4c961d3a-3d620ba_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> test.py::test[pg-tpcds-q67-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-Results] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Plan] [GOOD] >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] >> YdbTableBulkUpsert::RetryOperationSync [GOOD] >> YdbTableBulkUpsert::RetryOperation |85.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> test.py::test[union_all-union_all_with_parenthesis-default.txt-Results] [GOOD] >> test.py::test[view-view_with_lambda--Analyze] >> test.py::test[type_v3-append_diff_layout1--Debug] [GOOD] >> test.py::test[type_v3-append_diff_layout1--Plan] [GOOD] >> test.py::test[type_v3-append_diff_layout1--Results] >> TPQTest::TestTimeRetention [GOOD] >> TPQTest::TestStorageRetention >> test.py::test[pg-tpcds-q67-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q73-default.txt-Debug] >> test.py::test[type_v3-append_diff_layout1--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield-Debug] >> test.py::test[view-view_with_lambda--Analyze] [GOOD] >> test.py::test[view-view_with_lambda--Debug] >> TDatabaseQuotas::DisableWritesToDatabase [GOOD] >> GrpcConnectionStringParserTest::NoDatabaseFlag >> TStorageBalanceTest::TestScenario1 [GOOD] >> TStorageBalanceTest::TestScenario2 >> YdbTableBulkUpsert::RetryOperation [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--Results] >> test.py::test[join-right_trivial-off-Analyze] >> test.py::test[pg-tpcds-q73-default.txt-Debug] [GOOD] >> test.py::test[pg-tpcds-q73-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q73-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::RetryOperation [GOOD] Test command err: 2024-11-21T17:54:10.616625Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792389899766401:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:10.616830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001128/r3tmp/tmpHCspIQ/pdisk_1.dat 2024-11-21T17:54:10.675184Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25329, node 1 2024-11-21T17:54:10.693606Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:10.693619Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:10.693622Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:10.693659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63865 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:10.717163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:10.717197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:10.718714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:10.750353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:10.751520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:10.751538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:10.752049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:10.752113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:10.752122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T17:54:10.752514Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:10.752525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:10.752616Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:10.752870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:10.753640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211650799, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:10.753651Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:54:10.753706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:54:10.754028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:10.754083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:10.754099Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:54:10.754110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:54:10.754123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:54:10.754138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:54:10.754510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:10.754537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:54:10.754546Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:10.754557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:54:10.906078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestNulls_0x0006, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:10.906220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:54:10.906396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:10.906411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:10.907205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestNulls_0x0006 2024-11-21T17:54:10.907272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:10.907331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:10.907357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:54:10.907424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:54:10.907530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:10.907546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:10.907549Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:10.907593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:10.907598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:10.907599Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:54:10.909330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:10.909362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:54:10.909749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:54:10.961882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:54:10.961907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:54:10.961939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:54:10.962441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:54:10.963306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211651009, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:10.963320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211651009 2024-11-21T17:54:10.963353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:54:10.963761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:10.963835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:10.963852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:54:10.964031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:10.964062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:10.964075Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:54:10.964136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:10.964146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:10.964149Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 7205759404 ... 657:0 2 -> 3 waiting... 2024-11-21T17:54:22.214284Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:22.214297Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:22.214475Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:22.214637Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:22.215426Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211662265, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:22.215438Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:54:22.215495Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:54:22.215812Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:22.215852Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:22.215864Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:54:22.215874Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:54:22.215884Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:54:22.215893Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:54:22.215978Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:22.215990Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:54:22.215994Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:22.216004Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T17:54:22.372135Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:22.372339Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T17:54:22.372555Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:22.372572Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:22.373391Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T17:54:22.373460Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:22.373522Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:22.373550Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T17:54:22.373624Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T17:54:22.373731Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:22.373748Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:22.373752Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:22.373785Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:22.373793Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:22.373794Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T17:54:22.375523Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T17:54:22.375547Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T17:54:22.375879Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T17:54:22.428135Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T17:54:22.428148Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T17:54:22.428171Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T17:54:22.428733Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T17:54:22.429920Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211662475, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:22.429935Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732211662475 2024-11-21T17:54:22.429967Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T17:54:22.430396Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:22.430495Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:22.430512Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T17:54:22.430763Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:22.430777Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:22.430781Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:54:22.430868Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T17:54:22.430882Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T17:54:22.430885Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:54:22.431319Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732211662475 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 378 } } 2024-11-21T17:54:22.431360Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T17:54:22.431375Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:22.431380Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T17:54:22.431604Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T17:54:22.431620Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T17:54:22.431630Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS >> test.py::test[view-view_with_lambda--Debug] [GOOD] >> test.py::test[view-view_with_lambda--ForceBlocks] >> GrpcConnectionStringParserTest::NoDatabaseFlag [GOOD] >> GrpcConnectionStringParserTest::IncorrectConnectionString >> GrpcConnectionStringParserTest::IncorrectConnectionString [GOOD] >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString >> test.py::test[type_v3-ignore_v3_hint-protofield-Debug] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield-Plan] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-protofield-Results] |85.6%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[pg-tpcds-q73-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-Debug] >> test.py::test[join-right_trivial-off-Analyze] [GOOD] >> test.py::test[join-right_trivial-off-Debug] |85.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas |85.6%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString [GOOD] >> LocalityOperation::LocksFromAnotherTenants >> test.py::test[type_v3-ignore_v3_hint-protofield-Results] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-Debug] [GOOD] >> test.py::test[view-view_with_lambda--ForceBlocks] [GOOD] >> test.py::test[union-union_multiin--Debug] >> test.py::test[pg-tpcds-q95-default.txt-Plan] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-Results] >> test.py::test[view-view_with_lambda--Plan] [GOOD] >> test.py::test[view-view_with_lambda--Results] >> YdbLogStore::LogStore [GOOD] >> test.py::test[join-right_trivial-off-Debug] [GOOD] >> test.py::test[pg-tpcds-q95-default.txt-Results] [GOOD] >> test.py::test[view-view_with_lambda--Results] [GOOD] >> YdbLogStore::LogStoreNegative >> test.py::test[join-right_trivial-off-ForceBlocks] [SKIPPED] >> test.py::test[pg-tpch-q06-default.txt-Debug] >> test.py::test[weak_field-weak_field_aggregation--Analyze] >> test.py::test[join-right_trivial-off-Plan] [GOOD] >> test.py::test[join-right_trivial-off-Results] [GOOD] >> test.py::test[join-strict_keys--Analyze] [SKIPPED] >> test.py::test[join-strict_keys--Debug] [SKIPPED] >> test.py::test[join-strict_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-strict_keys--Plan] [SKIPPED] >> test.py::test[join-strict_keys--Results] >> TPersQueueTest::TestReadRuleServiceTypePassword [GOOD] >> TPersQueueTest::TestReadPartitionByGroupId >> test.py::test[produce-reduce_multi_in_keytuple--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Debug] >> test.py::test[join-strict_keys--Results] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Analyze] [GOOD] >> TPQTest::TestPartitionWriteQuota [GOOD] >> test.py::test[join-yql-4275--Analyze] >> test.py::test[union-union_multiin--Debug] [GOOD] >> YdbLogStore::LogStoreNegative [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Debug] >> TPQTest::TestPQSmallRead >> YdbLogStore::Dirs >> test.py::test[union-union_multiin--Plan] >> test.py::test[join-yql-4275--Analyze] [GOOD] >> test.py::test[pg-tpch-q06-default.txt-Debug] [GOOD] >> LocalityOperation::LocksFromAnotherTenants [GOOD] >> test.py::test[join-yql-4275--Debug] >> test.py::test[union-union_multiin--Plan] [GOOD] >> test.py::test[pg-tpch-q06-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q06-default.txt-Results] >> test.py::test[union-union_multiin--Results] >> YdbLogStore::Dirs [GOOD] >> YdbLogStore::LogTable >> test.py::test[weak_field-weak_field_aggregation--Debug] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--ForceBlocks] >> TOlapReboots::AlterTtlSettings [GOOD] |85.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> LocalityOperation::LocksFromAnotherTenants [GOOD] Test command err: 2024-11-21T17:54:13.906806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:54:13.906840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:54:13.906848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0010fd/r3tmp/tmpGr9UYA/pdisk_1.dat 2024-11-21T17:54:14.068519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:54:14.132905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:14.132946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:14.144630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:14.468359Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:942:2768], Recipient [1:501:2435]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T17:54:14.468381Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T17:54:14.468386Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T17:54:14.468403Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:939:2766], Recipient [1:501:2435]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:54:14.468408Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T17:54:14.476308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "tenant" } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:54:14.476382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/tenant, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:14.476404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: tenant, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T17:54:14.476459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T17:54:14.476501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T17:54:14.476525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:14.476531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:14.476541Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:54:14.476550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:54:14.476558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T17:54:14.477434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2024-11-21T17:54:14.477466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/tenant 2024-11-21T17:54:14.477473Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T17:54:14.477479Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715657:0 2024-11-21T17:54:14.477548Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:501:2435], Recipient [1:501:2435]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T17:54:14.477555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T17:54:14.477590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:14.477596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:54:14.477631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T17:54:14.477649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:14.477657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:714:2584], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2024-11-21T17:54:14.477661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:714:2584], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 2 2024-11-21T17:54:14.477680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:14.477689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxCreateSubDomain, at tablet72057594046644480 2024-11-21T17:54:14.477692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2024-11-21T17:54:14.477697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:54:14.477713Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:54:14.477818Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [1:942:2768], Recipient [1:501:2435]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:54:14.477824Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:54:14.477828Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T17:54:14.478187Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [1:714:2584], Recipient [1:501:2435]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 4 } 2024-11-21T17:54:14.478200Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T17:54:14.478214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:14.478229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:14.478233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:54:14.478238Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:14.478244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:54:14.478261Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:54:14.478460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [1:714:2584], Recipient [1:501:2435]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 2 } 2024-11-21T17:54:14.478468Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T17:54:14.478480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:14.478493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:14.478498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:54:14.478503Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:54:14.478508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T17:54:14.478521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2024-11-21T17:54:14.478525Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:54:14.479099Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T17:54:14.479112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715657:0 2024-11-21T17:54:14.479369Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:501:2435], Recipient [1:501:2435]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T17:54:14.479384Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T17:54:14.479396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:14.479402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:14.479407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:54:14.479437Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T17:54:14.479659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715657 2024-11-21T17:54:14.479667Z node 1 :FLAT_TX_SCHEMESHA ... ion: 3 2024-11-21T17:54:26.631154Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 9 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T17:54:26.631160Z node 9 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T17:54:26.631161Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 9], version: 1 2024-11-21T17:54:26.632809Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211666682, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:26.632821Z node 9 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715664:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211666682, at schemeshard: 72057594046644480 2024-11-21T17:54:26.632847Z node 9 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:0 128 -> 240 2024-11-21T17:54:26.632865Z node 9 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715664:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211666682, at schemeshard: 72057594046644480 2024-11-21T17:54:26.632876Z node 9 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:1 128 -> 240 2024-11-21T17:54:26.632887Z node 9 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715664:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211666682, at schemeshard: 72057594046644480 2024-11-21T17:54:26.632892Z node 9 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:2 128 -> 240 2024-11-21T17:54:26.632904Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715664:3, HandleReply TEvOperationPlan: step# 1732211666682 2024-11-21T17:54:26.632912Z node 9 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715664:3 128 -> 240 2024-11-21T17:54:26.633206Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:26.633268Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:26.633279Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715664:3 ProgressState 2024-11-21T17:54:26.633287Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:3 progress is 1/4 2024-11-21T17:54:26.633312Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715664:1 ProgressState 2024-11-21T17:54:26.633319Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:1 progress is 2/4 2024-11-21T17:54:26.633324Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715664:0 ProgressState 2024-11-21T17:54:26.633327Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:0 progress is 3/4 2024-11-21T17:54:26.633333Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715664:2 ProgressState 2024-11-21T17:54:26.633339Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:2 progress is 4/4 2024-11-21T17:54:26.633344Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 2024-11-21T17:54:26.633353Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:1 2024-11-21T17:54:26.633355Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:2 2024-11-21T17:54:26.633356Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:3 2024-11-21T17:54:26.633359Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715664, publications: 5, subscribers: 1 2024-11-21T17:54:26.633559Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T17:54:26.633573Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T17:54:26.633576Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2024-11-21T17:54:26.633610Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T17:54:26.633616Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T17:54:26.633617Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2024-11-21T17:54:26.633628Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T17:54:26.633634Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T17:54:26.633635Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 5 2024-11-21T17:54:26.633646Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 8 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T17:54:26.633652Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T17:54:26.633653Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], version: 5 2024-11-21T17:54:26.633663Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 9 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715664 2024-11-21T17:54:26.633665Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715664 2024-11-21T17:54:26.633666Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 9], version: 2 2024-11-21T17:54:26.633670Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715664, subscribers: 1 2024-11-21T17:54:26.634035Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7439792457808448105:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2024-11-21T17:54:26.689288Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715665:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:54:26.689322Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:54:26.690056Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:54:26.698952Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd7xrvp56jzpj16nrj2zq6w8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NGYyNjJmN2UtY2Y2NmM3NTctMjg1YTA2ZGItNGNjMDExYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:26.775109Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd7xrvtfd0cx6b9xrwerag52, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NGYyNjJmN2UtY2Y2NmM3NTctMjg1YTA2ZGItNGNjMDExYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:26.789794Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd7xrvtt53td73v99gm8erhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NGYyNjJmN2UtY2Y2NmM3NTctMjg1YTA2ZGItNGNjMDExYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:26.791305Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd7xrvtt53td73v99gm8erhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NGYyNjJmN2UtY2Y2NmM3NTctMjg1YTA2ZGItNGNjMDExYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:26.791755Z node 9 :KQP_EXECUTER ERROR: ActorId: [9:7439792457808448277:2310] TxId: 281474976715669. Ctx: { TraceId: 01jd7xrvtt53td73v99gm8erhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NGYyNjJmN2UtY2Y2NmM3NTctMjg1YTA2ZGItNGNjMDExYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Handle TEvProposeTransactionResult: unable to select coordinator. Tx canceled, actorId: [9:7439792457808448277:2310], previously selected coordinator: 72075186224037888, coordinator selected at propose result: 72075186224037890 2024-11-21T17:54:26.791843Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=NGYyNjJmN2UtY2Y2NmM3NTctMjg1YTA2ZGItNGNjMDExYzI=, ActorId: [9:7439792457808447917:2310], ActorState: ExecuteState, TraceId: 01jd7xrvtt53td73v99gm8erhb, Create QueryResponse for error on request, msg: 2024-11-21T17:54:26.791930Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd7xrvtt53td73v99gm8erhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NGYyNjJmN2UtY2Y2NmM3NTctMjg1YTA2ZGItNGNjMDExYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:26.793360Z node 9 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T17:54:26.793458Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:54:26.793511Z node 9 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2024-11-21T17:54:26.793544Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:54:27.214984Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> test.py::test[union-union_multiin--Results] [GOOD] >> test.py::test[union-union_trivial-default.txt-Debug] >> test.py::test[produce-reduce_multi_in_presort--Debug] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Plan] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Results] >> TOlapReboots::CreateDropTable [GOOD] >> TOlapReboots::CreateDropStore >> test.py::test[pg-tpch-q06-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q11-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::AlterTtlSettings [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:53:29.805280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:53:29.805306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:53:29.805311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:53:29.805315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:53:29.805329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:53:29.805333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:53:29.805342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:53:29.805417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:53:29.813799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:53:29.813818Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:53:29.816491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:53:29.816609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:53:29.816637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:53:29.819208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:53:29.819285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:53:29.819377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:53:29.819534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:53:29.820070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:53:29.820371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:53:29.820384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:53:29.820399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:53:29.820406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:53:29.820412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:53:29.820459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:53:29.821643Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:53:29.837915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:53:29.838030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:29.838106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:53:29.838180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:53:29.838188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:29.842477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:53:29.842523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:53:29.842596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:29.842628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:53:29.842633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:53:29.842638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:53:29.843446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:29.843463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:53:29.843470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:53:29.843948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:29.843962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:29.843967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:53:29.843974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:53:29.844675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:53:29.846546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:53:29.846620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:53:29.846816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:53:29.846865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:53:29.846882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:53:29.846947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:53:29.846954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:53:29.846985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:53:29.846996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:53:29.848885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:53:29.848902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:53:29.848944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:53:29.848949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:53:29.849036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:53:29.849045Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:53:29.849058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:53:29.849062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:53:29.849068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:53:29.849073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:53:29.849078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:53:29.849082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:53:29.849094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:53:29.849100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:53:29.849104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... Send Plan to tablet 72075186233409546 for txId: 1005 at step: 5000006 2024-11-21T17:54:27.526030Z node 190 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:27.526044Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 816043788393 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:27.526048Z node 190 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId#1005:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000006 2024-11-21T17:54:27.526132Z node 190 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 128 -> 129 2024-11-21T17:54:27.526147Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:27.526156Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2024-11-21T17:54:27.526661Z node 190 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:27.526666Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:27.526691Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:54:27.526707Z node 190 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:27.526710Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [190:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2024-11-21T17:54:27.526714Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [190:201:2204], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2024-11-21T17:54:27.526768Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:54:27.526773Z node 190 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TProposedWaitParts operationId#1005:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:54:27.526777Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TAlterColumnTable TProposedWaitParts operationId#1005:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T17:54:27.526843Z node 190 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:54:27.526851Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:54:27.526854Z node 190 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:54:27.526857Z node 190 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2024-11-21T17:54:27.526863Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:54:27.526952Z node 190 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:54:27.526958Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:54:27.526961Z node 190 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:54:27.526963Z node 190 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T17:54:27.526965Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:54:27.526972Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T17:54:27.527279Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T17:54:27.527533Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:54:27.527694Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:54:27.538171Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1005 MinStep: 0 Step: 5000006 2024-11-21T17:54:27.538182Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2024-11-21T17:54:27.538194Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1005 MinStep: 0 Step: 5000006 2024-11-21T17:54:27.538201Z node 190 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1005 MinStep: 0 Step: 5000006 2024-11-21T17:54:27.538239Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2024-11-21T17:54:27.538242Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2024-11-21T17:54:27.538248Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T17:54:27.538610Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:54:27.538663Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:54:27.538674Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:54:27.538679Z node 190 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:0 ProgressState 2024-11-21T17:54:27.538689Z node 190 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T17:54:27.538691Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:54:27.538694Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T17:54:27.538704Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [190:356:2336] message: TxId: 1005 2024-11-21T17:54:27.538708Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:54:27.538711Z node 190 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T17:54:27.538713Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T17:54:27.538735Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:54:27.538972Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:54:27.538978Z node 190 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [190:499:2477] TestWaitNotification: OK eventTxId 1005 2024-11-21T17:54:27.539059Z node 190 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:27.539094Z node 190 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 42us result status StatusSuccess 2024-11-21T17:54:27.539199Z node 190 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[join-yql-4275--Debug] [GOOD] >> test.py::test[join-yql-4275--ForceBlocks] >> test.py::test[weak_field-weak_field_aggregation--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Plan] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Results] >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Debug] >> TSchemeShardSubDomainTest::Create ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:28.731755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:28.731774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:28.731778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:28.731781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:28.731790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:28.731793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:28.731799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:28.731866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:28.738717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:28.738734Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:28.740726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:28.741230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:28.741254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:28.742392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:28.742586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:28.742663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:28.742742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:28.743696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:28.743922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:28.743931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:28.743958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:28.743963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:28.743968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:28.743978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:28.744914Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:28.755737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:28.755804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:28.755855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:28.755910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:28.755915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:28.756608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:28.756629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:28.756664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:28.756672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:28.756675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:28.756679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:28.756952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:28.756959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:28.756962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:28.757190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:28.757196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:28.757199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:28.757204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:28.757602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:28.757870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:28.757907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:28.758046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:28.758062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:28.758067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:28.758105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:28.758109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:28.758129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:28.758138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:28.758393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:28.758398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:28.758426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:28.758429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:28.758491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:28.758496Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:28.758504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:28.758507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:28.758512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:28.758517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:28.758521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:28.758524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:28.758533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:28.758538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:28.758542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:28.758767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:28.758776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:28.758779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:28.758782Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:28.758785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:28.758793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... xecute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:28.821725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:54:28.821728Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2024-11-21T17:54:28.821732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T17:54:28.821791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:28.821796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:28.821798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:54:28.821801Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:54:28.821803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:54:28.821808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T17:54:28.821922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 243 } } 2024-11-21T17:54:28.821929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2024-11-21T17:54:28.821941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 243 } } 2024-11-21T17:54:28.821949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 243 } } 2024-11-21T17:54:28.822205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 497 RawX2: 4294969748 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:54:28.822212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2024-11-21T17:54:28.822222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 497 RawX2: 4294969748 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:54:28.822226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:54:28.822230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 497 RawX2: 4294969748 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T17:54:28.822236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:28.822238Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:54:28.822241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:54:28.822244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T17:54:28.822680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:28.822700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:28.822708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:54:28.822718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:54:28.822747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:54:28.822752Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:54:28.822760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:54:28.822762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:54:28.822765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:54:28.822774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:305:2297] message: TxId: 102 2024-11-21T17:54:28.822777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:54:28.822781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:54:28.822783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:54:28.822796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:28.823090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:54:28.823096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:533:2475] TestWaitNotification: OK eventTxId 102 2024-11-21T17:54:28.823168Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:28.823195Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 33us result status StatusSuccess 2024-11-21T17:54:28.823257Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:28.823330Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:28.823343Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 14us result status StatusSuccess 2024-11-21T17:54:28.823396Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> test.py::test[join-yql-4275--ForceBlocks] [GOOD] >> test.py::test[join-yql-4275--Plan] [GOOD] >> test.py::test[join-yql-4275--Results] >> test.py::test[weak_field-weak_field_aggregation--Results] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Analyze] >> test.py::test[union-union_trivial-default.txt-Debug] [GOOD] >> test.py::test[union-union_trivial-default.txt-Plan] [GOOD] >> test.py::test[union-union_trivial-default.txt-Results] >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:29.199704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:29.199726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:29.199731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:29.199735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:29.199750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:29.199754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:29.199762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:29.199827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:29.206684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:29.206698Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:29.208543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:29.209041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:29.209065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:29.209969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:29.210223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:29.210324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:29.210449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:29.211318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:29.211540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:29.211546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:29.211572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:29.211577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:29.211582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:29.211590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:29.212643Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:29.223640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:29.223716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:29.223785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:29.223854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:29.223860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:29.224499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:29.224515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:29.224545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:29.224552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:29.224555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:29.224559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:29.224812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:29.224819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:29.224821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:29.225028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:29.225032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:29.225036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:29.225041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:29.225423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:29.225688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:29.225723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:29.225849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:29.225863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:29.225868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:29.225904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:29.225908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:29.225928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:29.225938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:29.226211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:29.226215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:29.226244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:29.226248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:29.226312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:29.226316Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:29.226325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:29.226328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:29.226332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:29.226335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:29.226338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:29.226340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:29.226346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:29.226350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:29.226353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:29.226541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:29.226549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:29.226553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:29.226555Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:29.226558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:29.226566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... d: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:54:29.644981Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:54:29.644983Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:54:29.644985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:29.645049Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:54:29.645055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T17:54:29.645057Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T17:54:29.645059Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:29.645061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:29.645065Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2024-11-21T17:54:29.645237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:29.645242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:29.645244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:29.645247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:29.645277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:54:29.645365Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:54:29.645522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:29.645562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2024-11-21T17:54:29.645667Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:54:29.645836Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:54:29.645849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:29.646133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:29.646255Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T17:54:29.646283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:29.646300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2024-11-21T17:54:29.646368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:54:29.646380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2024-11-21T17:54:29.646543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:29.646548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:54:29.646555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:29.646755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:29.646760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:29.646777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:29.646831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:54:29.646875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T17:54:29.646885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:29.646889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:29.647069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:29.647073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:29.647081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:29.647083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:29.647303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:54:29.647308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:54:29.647327Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:29.647335Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:29.647340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:29.647344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:29.647350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:29.647530Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T17:54:29.647570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T17:54:29.647574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T17:54:29.647619Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T17:54:29.647631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T17:54:29.647634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:654:2608] TestWaitNotification: OK eventTxId 105 2024-11-21T17:54:29.647682Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:29.647700Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 26us result status StatusPathDoesNotExist 2024-11-21T17:54:29.647730Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:29.647772Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:29.647782Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 11us result status StatusPathDoesNotExist 2024-11-21T17:54:29.647793Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[weak_field-weak_field_long_name--Analyze] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Debug] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] >> test.py::test[join-yql-4275--Results] [GOOD] >> test.py::test[json-json_query/on_error-default.txt-Analyze] >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:30.170839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:30.170859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:30.170862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:30.170866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:30.170878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:30.170881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:30.170887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:30.170941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:30.178323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:30.178338Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:30.180128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:30.180636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:30.180660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:30.181749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:30.181988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:30.182082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:30.182173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:30.183016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:30.183247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:30.183255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:30.183283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:30.183287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:30.183291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:30.183300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.184179Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:30.194299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:30.194358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.194408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:30.194485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:30.194491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.195044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:30.195061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:30.195091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.195097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:30.195100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:30.195104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:30.195350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.195356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:30.195358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:30.195598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.195604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.195607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:30.195612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:30.196025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:30.196338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:30.196375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:30.196498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:30.196517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:30.196521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:30.196556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:30.196560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:30.196580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:30.196588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:30.196906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:30.196911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:30.196941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:30.196944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:30.197004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.197008Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:30.197016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:30.197019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:30.197023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:30.197026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:30.197029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:30.197031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:30.197038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:30.197042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:30.197045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:30.197230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:30.197240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:30.197244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:30.197249Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:30.197254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:30.197264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 6678944 2024-11-21T17:54:30.201037Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#100:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:30.201040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2024-11-21T17:54:30.201159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:30.201179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:30.201357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.201363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.201367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:30.201371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2024-11-21T17:54:30.201388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:30.201594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T17:54:30.201609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T17:54:30.201647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:30.201658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:30.201661Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:30.201705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T17:54:30.201709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:30.201725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:30.201730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:30.201735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T17:54:30.201996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:30.202001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:30.202021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:30.202032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:30.202035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T17:54:30.202038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T17:54:30.202043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.202047Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T17:54:30.202053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T17:54:30.202056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:30.202059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T17:54:30.202062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:30.202065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T17:54:30.202067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T17:54:30.202074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:30.202077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T17:54:30.202079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:54:30.202081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:54:30.202169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:30.202175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:30.202177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:30.202180Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:54:30.202184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:30.202238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:30.202243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:30.202245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:30.202247Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:54:30.202249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:30.202254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T17:54:30.202613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:30.202768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2024-11-21T17:54:30.202804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:54:30.202819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2024-11-21T17:54:30.202828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:30.202830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:54:30.202866Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:54:30.202881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:30.202884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:310:2302] 2024-11-21T17:54:30.202895Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:30.202905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:30.202907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:310:2302] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:30.202943Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:30.202960Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 22us result status StatusSuccess 2024-11-21T17:54:30.203014Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[union-union_trivial-default.txt-Results] [GOOD] >> test.py::test[union_all-path_and_record-default.txt-Debug] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Debug] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Plan] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] >> TOlapReboots::CreateStore [GOOD] >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> test.py::test[weak_field-weak_field_long_name--Debug] [GOOD] >> test.py::test[weak_field-weak_field_long_name--ForceBlocks] >> test.py::test[pg-tpch-q11-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q11-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q11-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateStore [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:54:13.889994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:13.890018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:13.890024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:13.890029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:13.890042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:13.890047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:13.890056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:13.890138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:13.900554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:13.900574Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:54:13.902201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:13.902262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:13.902285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:13.904553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:13.904613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:13.904687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:13.904942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:13.905500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:13.905683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:13.905690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:13.905697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:13.905702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:13.905707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:13.905733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:54:13.906812Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:54:13.919448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:13.919544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.919604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:13.919654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:13.919659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.920350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:13.920370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:13.920414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.920422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:13.920425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:13.920428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:13.920713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.920720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:13.920722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:13.921012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.921021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.921026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:13.921033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:13.921420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:13.921763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:13.921799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:13.921948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:13.921965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:13.921976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:13.922019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:13.922023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:13.922044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:13.922053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:13.922376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:13.922382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:13.922415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:13.922420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:13.922478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:13.922482Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:13.922489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:13.922492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:13.922496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:13.922499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:13.922502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:13.922505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:13.922511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:13.922515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:13.922518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... terRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1002 at step: 5000003 2024-11-21T17:54:30.573832Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:30.573845Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 292057778277 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:30.573849Z node 68 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TPropose operationId#1002:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000003 2024-11-21T17:54:30.573899Z node 68 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 129 2024-11-21T17:54:30.573920Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:30.573929Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T17:54:30.574384Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:30.574393Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:30.574433Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:30.574453Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:30.574456Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2024-11-21T17:54:30.574461Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:203:2206], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T17:54:30.574494Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.574499Z node 68 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TProposedWaitParts operationId#1002:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:54:30.574506Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TProposedWaitParts operationId#1002:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T17:54:30.574634Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:54:30.574644Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:54:30.574651Z node 68 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:54:30.574654Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:54:30.574657Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:54:30.574766Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:54:30.574774Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:54:30.574776Z node 68 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:54:30.574779Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:54:30.574781Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:54:30.574787Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2024-11-21T17:54:30.575020Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T17:54:30.575322Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:54:30.575448Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:54:30.586148Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1002 MinStep: 0 Step: 5000003 2024-11-21T17:54:30.586168Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T17:54:30.586189Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1002 MinStep: 0 Step: 5000003 2024-11-21T17:54:30.586199Z node 68 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1002 MinStep: 0 Step: 5000003 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T17:54:30.586277Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 2024-11-21T17:54:30.586281Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T17:54:30.586290Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 2024-11-21T17:54:30.586782Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.586852Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.586869Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:54:30.586875Z node 68 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T17:54:30.586887Z node 68 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:54:30.586891Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:54:30.586896Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2024-11-21T17:54:30.586907Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [68:360:2340] message: TxId: 1002 2024-11-21T17:54:30.586913Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:54:30.586918Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:54:30.586921Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:54:30.586946Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:30.587241Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:54:30.587247Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [68:361:2341] TestWaitNotification: OK eventTxId 1002 2024-11-21T17:54:30.587329Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:30.587376Z node 68 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 52us result status StatusSuccess 2024-11-21T17:54:30.587464Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[json-json_query/on_error-default.txt-Analyze] [GOOD] >> test.py::test[json-json_query/on_error-default.txt-Debug] >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionReconnect >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:31.110841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:31.110861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:31.110865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:31.110868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:31.110878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:31.110881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:31.110887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:31.110942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:31.118503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:31.118525Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:31.121239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:31.121989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:31.122024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:31.123357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:31.123507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:31.123593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:31.123686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:31.124616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:31.124823Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:31.124830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:31.124856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:31.124860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:31.124865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:31.124874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.125748Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:31.135277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:31.135332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.135376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:31.135429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:31.135434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.136128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:31.136155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:31.136199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.136209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:31.136213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:31.136218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:31.136623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.136632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:31.136635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:31.136875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.136880Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.136883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:31.136887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:31.137260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:31.137572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:31.137617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:31.137744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:31.137762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:31.137767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:31.137818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:31.137822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:31.137843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:31.137852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:31.138159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:31.138164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:31.138192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:31.138195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:31.138253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.138257Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:31.138264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:31.138266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:31.138270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:31.138273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:31.138276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:31.138278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:31.138284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:31.138288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:31.138290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:31.138496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:31.138505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:31.138508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:31.138511Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:31.138513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:31.138521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 9551615 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T17:54:31.430450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 106 2024-11-21T17:54:31.430452Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:54:31.430454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:54:31.430664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T17:54:31.430674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T17:54:31.430677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2024-11-21T17:54:31.430681Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:31.430687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:31.430695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2024-11-21T17:54:31.431003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:31.431009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:31.431012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:31.431014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:31.431016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:31.431085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T17:54:31.431288Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2024-11-21T17:54:31.431322Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:54:31.431354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T17:54:31.431385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409550 2024-11-21T17:54:31.431680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186233409546 2024-11-21T17:54:31.431731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:31.431756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:31.431809Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:54:31.431907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T17:54:31.431987Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:54:31.432052Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409548 2024-11-21T17:54:31.432128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:31.432149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409547 2024-11-21T17:54:31.432218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:31.432248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2024-11-21T17:54:31.432292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:54:31.432303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:54:31.432413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:31.432418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:54:31.432424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:31.432602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:31.432606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:31.432618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:31.432750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T17:54:31.432757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T17:54:31.432913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:31.432916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:31.432975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:31.432978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:31.432983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:31.432986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:31.432992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:54:31.432995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:54:31.433218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:31.433229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:31.433231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:31.433238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:31.433258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:31.433459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T17:54:31.433521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T17:54:31.433527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T17:54:31.433575Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T17:54:31.433587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T17:54:31.433590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:762:2678] TestWaitNotification: OK eventTxId 106 2024-11-21T17:54:31.433643Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:31.433662Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 25us result status StatusSuccess 2024-11-21T17:54:31.433705Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] >> test.py::test[union_all-path_and_record-default.txt-Debug] [GOOD] >> test.py::test[union_all-path_and_record-default.txt-Plan] [GOOD] >> test.py::test[union_all-path_and_record-default.txt-Results] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery--Debug] >> test.py::test[weak_field-weak_field_long_name--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Plan] [GOOD] >> test.py::test[weak_field-weak_field_long_name--Results] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> test.py::test[json-json_query/on_error-default.txt-Debug] [GOOD] >> test.py::test[json-json_query/on_error-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:31.613161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:31.613181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:31.613184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:31.613187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:31.613197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:31.613200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:31.613206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:31.613259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:31.620441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:31.620460Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:31.622444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:31.622916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:31.622937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:31.623829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:31.623959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:31.624020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:31.624081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:31.624764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:31.624964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:31.624970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:31.624997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:31.625002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:31.625007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:31.625016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.625987Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:31.636145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:31.636208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.636283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:31.636340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:31.636345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.636994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:31.637009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:31.637037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.637045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:31.637047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:31.637050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:31.637327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.637334Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:31.637338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:31.637606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.637612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.637615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:31.637620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:31.638024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:31.638330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:31.638365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:31.638497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:31.638513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:31.638518Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:31.638550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:31.638554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:31.638588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:31.638596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:31.638912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:31.638919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:31.638959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:31.638964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:31.639044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.639050Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:31.639059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:31.639062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:31.639068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:31.639072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:31.639076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:31.639079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:31.639089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:31.639094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:31.639098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:31.639376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:31.639391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:31.639395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:31.639400Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:31.639405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:31.639418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:54:31.640027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:54:31.640105Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2024-11-21T17:54:31.640224Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:54:31.641042Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:54:31.641392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:31.641420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.641428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, at schemeshard: 72057594046678944 2024-11-21T17:54:31.641531Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:54:31.641940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: TimeCastBucketsPerMediator is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:31.641957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2024-11-21T17:54:31.642017Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T17:54:31.642043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:54:31.642056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T17:54:31.642087Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:54:31.642096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:31.642098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:281:2273] TestWaitNotification: OK eventTxId 100 2024-11-21T17:54:31.642136Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:31.642150Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 18us result status StatusPathDoesNotExist 2024-11-21T17:54:31.642170Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> test.py::test[weak_field-weak_field_long_name--Results] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:32.135966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:32.135992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:32.135997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:32.136002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:32.136019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:32.136024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:32.136033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:32.136109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:32.146614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:32.146632Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:32.148659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:32.149121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:32.149144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:32.150026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:32.150148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:32.150215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:32.150280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:32.150969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:32.151177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:32.151183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:32.151211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:32.151216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:32.151220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:32.151229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.152098Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:32.162478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:32.162540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.162603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:32.162657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:32.162662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.163264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:32.163280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:32.163312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.163319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:32.163322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:32.163326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:32.163603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.163611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:32.163614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:32.163832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.163837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.163841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:32.163845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:32.164259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:32.164592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:32.164630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:32.164759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:32.164774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:32.164779Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:32.164816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:32.164821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:32.164844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:32.164852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:32.165276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:32.165286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:32.165322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:32.165326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:32.165407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.165413Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:32.165423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:32.165427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:32.165431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:32.165434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:32.165437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:32.165440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:32.165451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:32.165456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:32.165459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:32.165704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:32.165715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:32.165719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:32.165722Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:32.165725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:32.165735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 1T17:54:32.169915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:32.170181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T17:54:32.170207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T17:54:32.170256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:32.170268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:32.170272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:32.170347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T17:54:32.170354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:32.170374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:32.170379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:32.170385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T17:54:32.170674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:32.170678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:32.170697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:32.170706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:32.170708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T17:54:32.170711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T17:54:32.170749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.170754Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T17:54:32.170764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T17:54:32.170767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:32.170772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T17:54:32.170777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:32.170781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T17:54:32.170785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T17:54:32.170793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:32.170798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T17:54:32.170801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:54:32.170803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:54:32.170865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:32.170871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:32.170874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:32.170876Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:54:32.170880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:32.170932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:32.170937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:32.170939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:32.170942Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:54:32.170944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:32.170948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T17:54:32.171420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:32.171450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T17:54:32.171484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:54:32.171501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T17:54:32.171543Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:54:32.171554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:32.171557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:306:2298] TestWaitNotification: OK eventTxId 100 2024-11-21T17:54:32.171602Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:32.171620Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 25us result status StatusSuccess 2024-11-21T17:54:32.171679Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:32.171723Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:32.171731Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 9us result status StatusSuccess 2024-11-21T17:54:32.171757Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeleteAndRestart >> TPersQueueTest::TClusterTrackerTest [GOOD] >> TPersQueueTest::SrcIdCompatibility >> test.py::test[union_all-path_and_record-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_multiin--Debug] >> test.py::test[json-json_query/on_error-default.txt-ForceBlocks] [GOOD] >> test.py::test[json-json_query/on_error-default.txt-Plan] [GOOD] >> test.py::test[json-json_query/on_error-default.txt-Results] >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:32.744425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:32.744446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:32.744449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:32.744452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:32.744466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:32.744469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:32.744475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:32.744538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:32.751773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:32.751792Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:32.754176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:32.754740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:32.754766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:32.755731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:32.755840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:32.755906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:32.755976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:32.756881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:32.757091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:32.757098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:32.757125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:32.757129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:32.757133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:32.757142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.758029Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:32.767800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:32.767853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.767902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:32.767953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:32.767958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.768482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:32.768499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:32.768530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.768537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:32.768541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:32.768546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:32.768808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.768816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:32.768819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:32.769029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.769034Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.769038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:32.769042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:32.769444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:32.769733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:32.769769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:32.769900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:32.769915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:32.769922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:32.769957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:32.769962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:32.769982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:32.769990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:32.770276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:32.770280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:32.770311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:32.770315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:32.770376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.770380Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:32.770388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:32.770391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:32.770395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:32.770398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:32.770402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:32.770404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:32.770411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:32.770415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:32.770418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:32.770635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:32.770644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:32.770648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:32.770651Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:32.770654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:32.770663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... NFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 101 2024-11-21T17:54:32.847092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:32.847113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 Leader for TabletID 72057594046678944 is [1:543:2478] sender: [1:601:2058] recipient: [1:15:2062] 2024-11-21T17:54:32.847267Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:32.847298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:32.847301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:599:2522] TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:32.847366Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:32.847401Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 51us result status StatusPathDoesNotExist 2024-11-21T17:54:32.847453Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:32.847580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:543:2478] sender: [1:605:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:543:2478] sender: [1:608:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:543:2478] sender: [1:609:2058] recipient: [1:607:2527] Leader for TabletID 72057594046678944 is [1:610:2528] sender: [1:611:2058] recipient: [1:607:2527] 2024-11-21T17:54:32.851583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:32.851602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:32.851605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:32.851609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:32.851613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:32.851615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:32.851622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:32.851662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:32.852398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:32.852586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:32.852617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:32.852661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:32.852665Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:32.852726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:32.852770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.852996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.853010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.853023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.853032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.853035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.853041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:32.854227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:32.854240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:32.854248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:32.854252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:32.854256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:32.854652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:610:2528] sender: [1:668:2058] recipient: [1:15:2062] 2024-11-21T17:54:32.885320Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:32.885372Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 69us result status StatusPathDoesNotExist 2024-11-21T17:54:32.885398Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:32.885474Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:32.885495Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 22us result status StatusSuccess 2024-11-21T17:54:32.885550Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] >> test.py::test[json-json_query/on_error-default.txt-Results] [GOOD] >> test.py::test[json-json_query/wrapper-default.txt-Analyze] >> test.py::test[pg-tpch-q11-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q16-default.txt-Debug] >> TPersQueueTest::TestReadPartitionByGroupId [GOOD] >> TPersQueueTest::TestReadPartitionStatus >> test.py::test[window-leading/aggregations_leadlag--Analyze] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--Debug] >> TSchemeShardSubDomainTest::SimultaneousDefine ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:33.075946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:33.075964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:33.075968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:33.075971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:33.075981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:33.075984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:33.075989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:33.076040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:33.083033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:33.083048Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:33.085035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:33.085502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:33.085525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:33.086438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:33.086623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:33.086685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:33.086748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:33.087391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:33.087598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:33.087605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:33.087630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:33.087634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:33.087638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:33.087647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.088489Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:33.098123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:33.098177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.098221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:33.098276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:33.098281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.098794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:33.098810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:33.098838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.098844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:33.098847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:33.098851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:33.099116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.099122Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:33.099125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:33.099328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.099333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.099336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:33.099340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:33.099719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:33.100118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:33.100191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:33.100452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:33.100476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:33.100485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:33.100530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:33.100536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:33.100563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:33.100573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:33.100984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:33.100992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:33.101032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:33.101037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:33.101117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.101123Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:33.101134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:33.101139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:33.101146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:33.101152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:33.101158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:33.101163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:33.101176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:33.101183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:33.101188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:33.101492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:33.101510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:33.101515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:33.101519Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:33.101523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:33.101537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T17:54:33.104614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:54:33.104647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T17:54:33.104701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:33.104713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:33.104722Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:33.104756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T17:54:33.104761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:33.104779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:33.104784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:33.104789Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:33.104860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:33.105093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:33.105097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:33.105113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:33.105122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:33.105125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:54:33.105127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:54:33.105171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.105177Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:54:33.105187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:54:33.105190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:33.105193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:54:33.105196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:33.105198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:54:33.105201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:54:33.105207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:33.105210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:54:33.105212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:54:33.105216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:54:33.105282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:33.105288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:33.105291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:33.105293Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:54:33.105296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:33.105347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:33.105352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:33.105354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:33.105356Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:54:33.105358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:33.105362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:54:33.105869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:33.105897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T17:54:33.106327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:33.106347Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2024-11-21T17:54:33.106351Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2024-11-21T17:54:33.106370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2024-11-21T17:54:33.106376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2024-11-21T17:54:33.106655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:33.106673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2024-11-21T17:54:33.106716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:33.106728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T17:54:33.106737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:54:33.106739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:54:33.106778Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:33.106789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:33.106792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:304:2296] 2024-11-21T17:54:33.106802Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:54:33.106811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:54:33.106813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:304:2296] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] >> test.py::test[union_all-union_all_multiin--Debug] [GOOD] >> test.py::test[union_all-union_all_multiin--Plan] [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery--Debug] [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery--Plan] [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery--Results] >> test.py::test[union_all-union_all_multiin--Results] >> TGRpcStreamingTest::SimpleEcho >> TSchemeShardSubDomainTest::Restart >> TGRpcStreamingTest::ReadFinish ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:33.596777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:33.596796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:33.596800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:33.596804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:33.596815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:33.596818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:33.596823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:33.596878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:33.603631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:33.603646Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:33.605548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:33.606047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:33.606067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:33.606894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:33.607034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:33.607103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:33.607193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:33.607793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:33.607989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:33.607995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:33.608022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:33.608027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:33.608032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:33.608054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.608814Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:33.620161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:33.620224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.620287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:33.620339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:33.620344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.620951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:33.620967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:33.620997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.621003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:33.621006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:33.621009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:33.621227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.621233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:33.621235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:33.621418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.621422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.621426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:33.621430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:33.621794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:33.622054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:33.622091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:33.622213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:33.622227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:33.622233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:33.622266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:33.622270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:33.622289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:33.622297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:33.622566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:33.622581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:33.622613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:33.622616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:33.622676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.622681Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:33.622689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:33.622691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:33.622695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:33.622698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:33.622701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:33.622703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:33.622709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:33.622713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:33.622716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:33.622939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:33.622949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:33.622952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:33.622955Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:33.622958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:33.622968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 46 2024-11-21T17:54:33.633411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 0, tablet: 72075186233409547 2024-11-21T17:54:33.633413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 0, tablet: 72075186233409548 2024-11-21T17:54:33.639859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T17:54:33.639915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409546 2024-11-21T17:54:33.639924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2024-11-21T17:54:33.639932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409546 shardIdx# 72057594046678944:1 at schemeshard# 72057594046678944 2024-11-21T17:54:33.640450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.640744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T17:54:33.640759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409547 2024-11-21T17:54:33.640763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2024-11-21T17:54:33.640768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409547 shardIdx# 72057594046678944:2 at schemeshard# 72057594046678944 2024-11-21T17:54:33.640827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409548, partId: 0 2024-11-21T17:54:33.640833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409548 2024-11-21T17:54:33.640838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2024-11-21T17:54:33.640841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2024-11-21T17:54:33.640845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2024-11-21T17:54:33.641201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.641218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.641230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.641233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.641237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:33.641241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T17:54:33.641263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:33.641467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T17:54:33.641483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2024-11-21T17:54:33.641529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:33.641542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:33.641545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:33.641581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T17:54:33.641585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:33.641607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:33.641615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:54:33.641951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:33.641956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:33.641994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:33.641999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:54:33.642070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:33.642075Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:54:33.642084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:54:33.642086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:33.642090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:54:33.642093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:33.642096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:54:33.642098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:54:33.642124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T17:54:33.642128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2024-11-21T17:54:33.642130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2024-11-21T17:54:33.642200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:33.642207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:33.642210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:33.642214Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T17:54:33.642216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:33.642222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2024-11-21T17:54:33.642225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:305:2297] 2024-11-21T17:54:33.642691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:33.642705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:33.642709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:312:2304] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:33.642783Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:33.642806Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 29us result status StatusSuccess 2024-11-21T17:54:33.642867Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects >> TGRpcStreamingTest::ClientNeverWrites >> test.py::test[json-json_query/wrapper-default.txt-Analyze] [GOOD] >> test.py::test[json-json_query/wrapper-default.txt-Debug] >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest |85.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:24.914565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:24.914597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:24.914600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:24.914604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:24.914614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:24.914617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:24.914623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:24.914679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:24.921397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:24.921412Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:24.923192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:24.923669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:24.923690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:24.924608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:24.924729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:24.924788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:24.924853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:24.925507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:24.925704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:24.925710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:24.925735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:24.925740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:24.925744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:24.925752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:24.926738Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:24.936054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:24.936107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:24.936148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:24.936205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:24.936211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:24.936736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:24.936753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:24.936779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:24.936786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:24.936788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:24.936791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:24.937069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:24.937079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:24.937082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:24.937481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:24.937488Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:24.937491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:24.937496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:24.937873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:24.938131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:24.938170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:24.938295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:24.938311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:24.938315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:24.938347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:24.938351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:24.938371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:24.938378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:24.938653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:24.938657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:24.938689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:24.938692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:24.938756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:24.938761Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:24.938772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:24.938775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:24.938778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:24.938781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:24.938783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:24.938786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:24.938792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:24.938795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:24.938798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:24.938981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:24.938989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:24.938992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:24.938995Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:24.938997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:24.939004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 158 } } 2024-11-21T17:54:34.012794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 762 RawX2: 4294969995 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2024-11-21T17:54:34.012807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 0 2024-11-21T17:54:34.012822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 762 RawX2: 4294969995 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2024-11-21T17:54:34.012826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T17:54:34.012928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.012936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T17:54:34.012942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:54:34.012945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2024-11-21T17:54:34.012952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T17:54:34.012974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2024-11-21T17:54:34.012990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:34.012998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:54:34.013413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.013735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.013791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:34.013797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:34.013841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:54:34.013862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:34.013866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 107, path id: 2 2024-11-21T17:54:34.013869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 107, path id: 4 2024-11-21T17:54:34.013940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.013945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:54:34.013958Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.013961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:54:34.013964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2024-11-21T17:54:34.014091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T17:54:34.014101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T17:54:34.014104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T17:54:34.014107Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 14 2024-11-21T17:54:34.014113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:34.014230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T17:54:34.014237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T17:54:34.014239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T17:54:34.014242Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:54:34.014244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:54:34.014250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2024-11-21T17:54:34.014607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.014615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:34.014676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:54:34.014697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2024-11-21T17:54:34.014700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T17:54:34.014704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2024-11-21T17:54:34.014706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T17:54:34.014709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T17:54:34.014712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T17:54:34.014723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:54:34.014957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:34.014961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:34.015052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T17:54:34.015274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T17:54:34.015504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:34.015509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2024-11-21T17:54:34.015594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2024-11-21T17:54:34.015702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2024-11-21T17:54:34.015711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2024-11-21T17:54:34.015773Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2024-11-21T17:54:34.015786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T17:54:34.015789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:991:2919] TestWaitNotification: OK eventTxId 107 2024-11-21T17:54:34.015860Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:34.015887Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 37us result status StatusSuccess 2024-11-21T17:54:34.015963Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:34.127189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:34.127209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:34.127213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:34.127216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:34.127230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:34.127233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:34.127239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:34.127308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:34.134526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:34.134543Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:34.136725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:34.137339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:34.137369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:34.138418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:34.138549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:34.138635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:34.138708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:34.139398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:34.139610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:34.139616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:34.139644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:34.139648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:34.139652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:34.139662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.140581Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:34.151075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:34.151141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.151195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:34.151252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:34.151256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.151893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:34.151912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:34.151946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.151953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:34.151956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:34.151960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:34.152196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.152202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:34.152205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:34.152463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.152469Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.152473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:34.152478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:34.152849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:34.153151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:34.153187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:34.153313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:34.153327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:34.153334Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:34.153369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:34.153373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:34.153394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:34.153402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:34.153671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:34.153675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:34.153703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:34.153706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:34.153767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.153772Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:34.153780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:34.153783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:34.153787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:34.153790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:34.153793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:34.153796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:34.153803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:34.153807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:34.153809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:34.153993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:34.154001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:34.154004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:34.154008Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:34.154011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:34.154021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:34.169572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:458:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:461:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:462:2058] recipient: [1:460:2413] Leader for TabletID 72057594046678944 is [1:463:2414] sender: [1:464:2058] recipient: [1:460:2413] 2024-11-21T17:54:34.173159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:34.173177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:34.173182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:34.173187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:34.173192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:34.173196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:34.173206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:34.173239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:34.174087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:34.174321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:34.174351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:34.174362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:34.174365Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:34.174424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:34.174468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:34.174486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T17:54:34.174553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:34.174614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:34.174616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:34.174623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.174869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.175889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:34.175901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:34.175949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:34.175955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:34.175958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:34.176316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:463:2414] sender: [1:522:2058] recipient: [1:15:2062] 2024-11-21T17:54:34.217309Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:34.217382Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 94us result status StatusSuccess 2024-11-21T17:54:34.217473Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:34.217553Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:34.217573Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 22us result status StatusSuccess 2024-11-21T17:54:34.217624Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[union_all-union_all_multiin--Results] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-Debug] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas >> test.py::test[produce-reduce_with_assume_in_subquery--Results] [GOOD] >> test.py::test[json-json_query/wrapper-default.txt-Debug] [GOOD] >> test.py::test[json-json_query/wrapper-default.txt-ForceBlocks] >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Debug] >> test.py::test[window-leading/aggregations_leadlag--Debug] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--ForceBlocks] >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools >> TGRpcStreamingTest::ClientDisconnects [GOOD] >> TGRpcStreamingTest::ClientNeverWrites [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:34.523116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:34.523140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:34.523144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:34.523148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:34.523161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:34.523164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:34.523170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:34.523232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:34.530395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:34.530416Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:34.532456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:34.532939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:34.532967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:34.533833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:34.533954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:34.534040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:34.534135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:34.534779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:34.535011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:34.535018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:34.535045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:34.535050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:34.535053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:34.535062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.535940Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:34.546568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:34.546661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.546721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:34.546781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:34.546786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.547517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:34.547547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:34.547590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.547598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:34.547601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:34.547605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:34.548057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.548074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:34.548079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:34.548489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.548502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.548507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:34.548511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:34.548937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:34.549334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:34.549381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:34.549529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:34.549551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:34.549556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:34.549598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:34.549602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:34.549625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:34.549633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:34.549995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:34.550000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:34.550036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:34.550042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:34.550123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.550129Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:34.550137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:34.550140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:34.550144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:34.550148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:34.550153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:34.550155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:34.550163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:34.550168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:34.550170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:34.550427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:34.550443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:34.550449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:34.550453Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:34.550458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:34.550470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:34.560805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:54:34.560813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:34.560816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:34.560838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2024-11-21T17:54:34.560865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:34.560872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:34.561063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:34.561207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:54:34.561337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:34.561342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:34.561362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:34.561378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:34.561382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:54:34.561387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:54:34.561424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:34.561429Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2024-11-21T17:54:34.561435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:54:34.561438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:34.561441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:54:34.561444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:34.561447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:54:34.561450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:54:34.561459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:34.561463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:54:34.561465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:54:34.561467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:54:34.561534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:34.561540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:34.561543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:34.561546Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:54:34.561548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:34.561619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:34.561625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:34.561628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:34.561630Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:34.561632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:34.561639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:54:34.561795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:34.561803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:34.561820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:34.561882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:34.561886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:34.561893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:34.562196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:34.562502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:34.562517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:34.562529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T17:54:34.562590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:34.562596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:54:34.562663Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:34.562680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:34.562685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:336:2328] TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:34.562751Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:34.562779Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 37us result status StatusPathDoesNotExist 2024-11-21T17:54:34.562821Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:34.562890Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:34.562907Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 18us result status StatusSuccess 2024-11-21T17:54:34.562963Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TGRpcStreamingTest::SimpleEcho [GOOD] >> TGRpcStreamingTest::ReadFinish [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] >> TSchemeShardSubDomainTest::RmDir >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock >> BasicStatistics::TwoNodes [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2024-11-21T17:54:34.486447Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792491773672131:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:34.486474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00185d/r3tmp/tmp36IBeb/pdisk_1.dat 2024-11-21T17:54:34.749456Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:34.835411Z node 1 :GRPC_SERVER DEBUG: [0x5794be945400] stream accepted Name# Session ok# true peer# ipv6:[::1]:60306 2024-11-21T17:54:34.835513Z node 1 :GRPC_SERVER DEBUG: [0x5794be945400] facade attach Name# Session actor# [1:7439792491773672469:2256] peer# ipv6:[::1]:60306 2024-11-21T17:54:34.835529Z node 1 :GRPC_SERVER DEBUG: [0x5794be945400] facade read Name# Session peer# ipv6:[::1]:60306 2024-11-21T17:54:34.835551Z node 1 :GRPC_SERVER DEBUG: [0x5794be945400] facade finish Name# Session peer# ipv6:[::1]:60306 grpc status# (0) message# 2024-11-21T17:54:34.835677Z node 1 :GRPC_SERVER DEBUG: [0x5794be945400] read finished Name# Session ok# false data# peer# ipv6:[::1]:60306 2024-11-21T17:54:34.835693Z node 1 :GRPC_SERVER DEBUG: [0x5794be945400] stream finished Name# Session ok# true peer# ipv6:[::1]:60306 grpc status# (0) message# 2024-11-21T17:54:34.835699Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2024-11-21T17:54:34.835709Z node 1 :GRPC_SERVER DEBUG: [0x5794be945400] deregistering request Name# Session peer# ipv6:[::1]:60306 (finish done) 2024-11-21T17:54:34.835725Z node 1 :GRPC_SERVER DEBUG: [0x5794be945400] stream done notification Name# Session ok# true peer# ipv6:[::1]:60306 2024-11-21T17:54:34.839375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:34.839397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:34.840468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2024-11-21T17:54:34.486457Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792491712101706:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:34.486479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0017ee/r3tmp/tmpU88lFh/pdisk_1.dat 2024-11-21T17:54:34.750157Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:34.827101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:34.827125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:34.828282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:34.834846Z node 1 :GRPC_SERVER DEBUG: [0x4183e945400] stream accepted Name# Session ok# true peer# ipv6:[::1]:35630 2024-11-21T17:54:34.834946Z node 1 :GRPC_SERVER DEBUG: [0x4183e945400] facade attach Name# Session actor# [1:7439792491712102043:2258] peer# ipv6:[::1]:35630 2024-11-21T17:54:34.835001Z node 1 :GRPC_SERVER DEBUG: [0x4183e945400] stream done notification Name# Session ok# true peer# ipv6:[::1]:35630 2024-11-21T17:54:34.835020Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2024-11-21T17:54:34.835098Z node 1 :GRPC_SERVER DEBUG: [0x4183e945400] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2024-11-21T17:54:34.835116Z node 1 :GRPC_SERVER DEBUG: [0x4183e945400] deregistering request Name# Session peer# unknown (finish done) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2024-11-21T17:54:34.486456Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792493536020095:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:34.486479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0017b8/r3tmp/tmpGEjZMw/pdisk_1.dat 2024-11-21T17:54:34.752729Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:34.811237Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:54:34.811361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439792493536020400:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:54:34.826179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:34.826207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:34.827179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:34.834869Z node 1 :GRPC_SERVER DEBUG: [0x16dc3e945400] stream accepted Name# Session ok# true peer# ipv6:[::1]:42300 2024-11-21T17:54:34.834979Z node 1 :GRPC_SERVER DEBUG: [0x16dc3e945400] facade attach Name# Session actor# [1:7439792493536020413:2255] peer# ipv6:[::1]:42300 2024-11-21T17:54:34.834993Z node 1 :GRPC_SERVER DEBUG: [0x16dc3e945400] facade read Name# Session peer# ipv6:[::1]:42300 2024-11-21T17:54:34.835010Z node 1 :GRPC_SERVER DEBUG: [0x16dc3e945400] facade write Name# Session data# peer# ipv6:[::1]:42300 2024-11-21T17:54:34.835106Z node 1 :GRPC_SERVER DEBUG: [0x16dc3e945400] facade finish Name# Session peer# ipv6:[::1]:42300 grpc status# (0) message# 2024-11-21T17:54:34.835124Z node 1 :GRPC_SERVER DEBUG: [0x16dc3e945400] write finished Name# Session ok# true peer# ipv6:[::1]:42300 2024-11-21T17:54:34.835136Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2024-11-21T17:54:34.835211Z node 1 :GRPC_SERVER DEBUG: [0x16dc3e945400] read finished Name# Session ok# false data# peer# ipv6:[::1]:42300 2024-11-21T17:54:34.835233Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2024-11-21T17:54:34.835254Z node 1 :GRPC_SERVER DEBUG: [0x16dc3e945400] stream finished Name# Session ok# true peer# ipv6:[::1]:42300 grpc status# (0) message# 2024-11-21T17:54:34.835254Z node 1 :GRPC_SERVER DEBUG: [0x16dc3e945400] stream done notification Name# Session ok# true peer# ipv6:[::1]:42300 2024-11-21T17:54:34.835261Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2024-11-21T17:54:34.835270Z node 1 :GRPC_SERVER DEBUG: [0x16dc3e945400] deregistering request Name# Session peer# ipv6:[::1]:42300 (finish done) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2024-11-21T17:54:34.486474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792490455615403:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:34.486490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00182d/r3tmp/tmpOHHZkd/pdisk_1.dat 2024-11-21T17:54:34.752644Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:34.811213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:54:34.811354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439792490455615706:2277], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:54:34.834873Z node 1 :GRPC_SERVER DEBUG: [0x156c3e945400] stream accepted Name# Session ok# true peer# ipv6:[::1]:56058 2024-11-21T17:54:34.835007Z node 1 :GRPC_SERVER DEBUG: [0x156c3e945400] facade attach Name# Session actor# [1:7439792490455615716:2252] peer# ipv6:[::1]:56058 2024-11-21T17:54:34.835024Z node 1 :GRPC_SERVER DEBUG: [0x156c3e945400] facade read Name# Session peer# ipv6:[::1]:56058 2024-11-21T17:54:34.835132Z node 1 :GRPC_SERVER DEBUG: [0x156c3e945400] read finished Name# Session ok# true data# peer# ipv6:[::1]:56058 2024-11-21T17:54:34.835155Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 1 2024-11-21T17:54:34.835164Z node 1 :GRPC_SERVER DEBUG: [0x156c3e945400] facade write Name# Session data# peer# ipv6:[::1]:56058 2024-11-21T17:54:34.835250Z node 1 :GRPC_SERVER DEBUG: [0x156c3e945400] facade finish Name# Session peer# ipv6:[::1]:56058 grpc status# (0) message# 2024-11-21T17:54:34.835290Z node 1 :GRPC_SERVER DEBUG: [0x156c3e945400] write finished Name# Session ok# true peer# ipv6:[::1]:56058 2024-11-21T17:54:34.835361Z node 1 :GRPC_SERVER DEBUG: [0x156c3e945400] stream done notification Name# Session ok# true peer# ipv6:[::1]:56058 2024-11-21T17:54:34.835372Z node 1 :GRPC_SERVER DEBUG: [0x156c3e945400] stream finished Name# Session ok# true peer# ipv6:[::1]:56058 grpc status# (0) message# 2024-11-21T17:54:34.835387Z node 1 :GRPC_SERVER DEBUG: [0x156c3e945400] deregistering request Name# Session peer# ipv6:[::1]:56058 (finish done) 2024-11-21T17:54:34.837222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:34.837250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:34.838317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:35.146163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:35.146189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.146194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:35.146198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:35.146214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:35.146218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:35.146226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.146298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:35.156194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:35.156216Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:35.158766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:35.159393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:35.159428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:35.160761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:35.160953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:35.161042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.161135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:35.162056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.162319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.162329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.162364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:35.162371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.162376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:35.162388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.163569Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:35.178493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:35.178552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.178617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:35.178673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:35.178678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.179269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.179287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:35.179322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.179328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:35.179332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:35.179335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:35.179605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.179610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:35.179613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:35.179837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.179842Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.179846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.179850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.180241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:35.180621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:35.180655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:35.180781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.180795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.180799Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.180835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:35.180839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.180864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.180873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:35.181294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.181304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.181341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.181344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:35.181425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.181433Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:35.181446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:35.181451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.181457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:35.181463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.181468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:35.181475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:35.181489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:35.181495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:35.181499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:35.181835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.181850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.181855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:35.181858Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:35.181862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.181875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 04, tablet: 72075186233409552, partId: 0 2024-11-21T17:54:35.290718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxId: 104 Origin: 72075186233409552 Status: OK 2024-11-21T17:54:35.290724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse at tablet72057594046678944 2024-11-21T17:54:35.290731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse message: TxId: 104 Origin: 72075186233409552 Status: OK at tablet72057594046678944 2024-11-21T17:54:35.290925Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.292615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2024-11-21T17:54:35.292644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxId: 104 Origin: 72075186233409550 Status: OK 2024-11-21T17:54:35.292649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse at tablet72057594046678944 2024-11-21T17:54:35.292654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse message: TxId: 104 Origin: 72075186233409550 Status: OK at tablet72057594046678944 2024-11-21T17:54:35.292935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.292950Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 40us result status StatusSuccess 2024-11-21T17:54:35.293016Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/PQGroup_2" PathDescription { Self { Name: "PQGroup_2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: false CreateTxId: 104 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 1 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 1 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 } BalancerTabletID: 72075186233409552 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 5 ShardsInside: 7 ShardsLimit: 7 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 50 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.293841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.295017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2024-11-21T17:54:35.295038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxId: 104 Origin: 72075186233409551 Status: OK 2024-11-21T17:54:35.295044Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse at tablet72057594046678944 2024-11-21T17:54:35.295048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TConfigureParts operationId#104:0 HandleReply TEvUpdateConfigResponse message: TxId: 104 Origin: 72075186233409551 Status: OK at tablet72057594046678944 2024-11-21T17:54:35.295055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 3 -> 128 2024-11-21T17:54:35.296699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.296739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.296745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.296752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2024-11-21T17:54:35.296779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:35.297778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2024-11-21T17:54:35.297814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000003 2024-11-21T17:54:35.297913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.297934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.297942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvOperationPlan, step: 5000003, at tablet: 72057594046678944 2024-11-21T17:54:35.297988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T17:54:35.298017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:35.298026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 FAKE_COORDINATOR: Erasing txId 104 2024-11-21T17:54:35.299577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.299584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.299621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:35.299651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.299655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:332:2310], at schemeshard: 72057594046678944, txId: 104, path id: 1 2024-11-21T17:54:35.299658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:332:2310], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T17:54:35.299752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.299758Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T17:54:35.299767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T17:54:35.299770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:54:35.299775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T17:54:35.299778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:54:35.299782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:54:35.299785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:54:35.299809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T17:54:35.299813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2024-11-21T17:54:35.299816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:54:35.299818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T17:54:35.299952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:54:35.299959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:54:35.299962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:54:35.299965Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:54:35.299968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:54:35.300114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:54:35.300122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:54:35.300124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:54:35.300126Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T17:54:35.300129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:54:35.300135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2024-11-21T17:54:35.300947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:54:35.301327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:35.152271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:35.152295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.152300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:35.152305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:35.152319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:35.152323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:35.152332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.152404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:35.161988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:35.162005Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:35.164885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:35.165418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:35.165449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:35.166838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:35.167032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:35.167105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.167176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:35.167942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.168156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.168163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.168189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:35.168194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.168198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:35.168207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.169315Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:35.179239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:35.179290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.179334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:35.179384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:35.179388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.179909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.179923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:35.179953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.179959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:35.179961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:35.179964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:35.180241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.180250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:35.180254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:35.180507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.180512Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.180515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.180519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.180906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:35.181198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:35.181231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:35.181345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.181360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.181364Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.181400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:35.181404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.181423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.181432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:35.181845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.181857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.181897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.181903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:35.181989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.181996Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:35.182008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:35.182012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.182017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:35.182022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.182027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:35.182030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:35.182043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:35.182048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:35.182052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:35.182411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.182429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.182434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:35.182438Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:35.182443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.182458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T17:54:35.183134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T17:54:35.183221Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2024-11-21T17:54:35.183353Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T17:54:35.184676Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T17:54:35.185237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { Coordinators: 1 Mediators: 1 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:35.185288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.185302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.185469Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:54:35.186081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: plan resolution is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.186110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2024-11-21T17:54:35.186221Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T17:54:35.186264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:54:35.186281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T17:54:35.186332Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:54:35.186348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:35.186352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:281:2273] TestWaitNotification: OK eventTxId 100 2024-11-21T17:54:35.186410Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:35.186434Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 32us result status StatusPathDoesNotExist 2024-11-21T17:54:35.186466Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::RmDir [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DisableDeduplication [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T17:51:57.884875Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791817767028727:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:57.885067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:57.917684Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002019/r3tmp/tmp3xJc3j/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27210, node 1 2024-11-21T17:51:57.950646Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:57.955271Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/002019/r3tmp/yandex7j4fKg.tmp 2024-11-21T17:51:57.955287Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/002019/r3tmp/yandex7j4fKg.tmp 2024-11-21T17:51:57.955359Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/002019/r3tmp/yandex7j4fKg.tmp 2024-11-21T17:51:57.955397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:57.959589Z INFO: TTestServer started on Port 14379 GrpcPort 27210 TClient is connected to server localhost:14379 PQClient connected to localhost:27210 === TenantModeEnabled() = 0 === Init PQ - start server on port 27210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:57.986280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.986307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:57.987367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:58.036535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:58.036584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.036641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:51:58.036700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:58.036710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.036902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:58.036928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:58.037000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.037013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:58.037016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2024-11-21T17:51:58.037019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T17:51:58.037118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.037126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:58.037128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T17:51:58.037199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.037209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.037212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:58.037218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 waiting... 2024-11-21T17:51:58.037771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:58.037827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:58.037834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2024-11-21T17:51:58.037838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:58.037861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2024-11-21T17:51:58.037893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:51:58.038100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211518086, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:58.038122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 7439791817767029178 RawX2: 4294969508 } } Step: 1732211518086 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:51:58.038130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:58.038185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T17:51:58.038194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T17:51:58.038215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:51:58.038231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:51:58.038319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:58.038328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:51:58.038363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:58.038370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439791817767029213:2235], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2024-11-21T17:51:58.038382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:58.038390Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T17:51:58.038400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T17:51:58.038407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T17:51:58.038411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2024-11-21T17:51:58.038414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T17:51:58.038421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T17:51:58.038423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T17:51:58.038433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:51:58.038441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T17:51:58.038442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T17:51:58.038807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:58.038829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T17:51:58.038831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T17:51:58.038835Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:51:58.038838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPat ... ERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2024-11-21T17:53:52.421507Z node 25 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:53:52.421523Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: cookie: 0 2024-11-21T17:53:52.421545Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T17:53:52.421546Z node 25 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T17:53:52.421552Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 2 2024-11-21T17:53:52.421562Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 2, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 2 user debug offset 0 count 3 size 530 endOffset 3 max time lag 0ms effective offset 0 2024-11-21T17:53:52.421563Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 2, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T17:53:52.421567Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 2, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2024-11-21T17:53:52.421569Z node 25 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:53:52.421580Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 2 messageNo: 0 requestId: cookie: 0 2024-11-21T17:53:52.421600Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1732211632313 CreateTimestampMS: 1732211632312 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1732211632313 CreateTimestampMS: 1732211632312 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1732211632314 CreateTimestampMS: 1732211632312 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T17:53:52.421602Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1732211632315 CreateTimestampMS: 1732211632314 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1732211632315 CreateTimestampMS: 1732211632314 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1732211632315 CreateTimestampMS: 1732211632314 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T17:53:52.421628Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) wait data in partition inited, cookie 1 from offset3 2024-11-21T17:53:52.421634Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) EndOffset 3 ReadOffset 3 ReadGuid 461294d2-eac22c86-9b8088f8-1b0dcc09 has messages 1 2024-11-21T17:53:52.421649Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) wait data in partition inited, cookie 1 from offset3 2024-11-21T17:53:52.421656Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) EndOffset 3 ReadOffset 3 ReadGuid a1f10f0f-9599a292-3176c2e2-525141a6 has messages 1 2024-11-21T17:53:52.421660Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1732211632317 CreateTimestampMS: 1732211632316 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1732211632318 CreateTimestampMS: 1732211632316 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1732211632318 CreateTimestampMS: 1732211632316 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T17:53:52.421671Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1) wait data in partition inited, cookie 1 from offset3 2024-11-21T17:53:52.421673Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid abd35599-25502238-3ce2e5af-b404a107 has messages 1 2024-11-21T17:53:52.421685Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 read done: guid# 461294d2-eac22c86-9b8088f8-1b0dcc09, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), size# 276 2024-11-21T17:53:52.421691Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 response to read: guid# 461294d2-eac22c86-9b8088f8-1b0dcc09 2024-11-21T17:53:52.421750Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 Process answer. Aval parts: 0 2024-11-21T17:53:52.421775Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 read done: guid# a1f10f0f-9599a292-3176c2e2-525141a6, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3), size# 386 2024-11-21T17:53:52.421784Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 response to read: guid# a1f10f0f-9599a292-3176c2e2-525141a6 2024-11-21T17:53:52.421810Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 Process answer. Aval parts: 0 2024-11-21T17:53:52.421821Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 read done: guid# abd35599-25502238-3ce2e5af-b404a107, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1), size# 386 2024-11-21T17:53:52.421827Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 response to read: guid# abd35599-25502238-3ce2e5af-b404a107 2024-11-21T17:53:52.421849Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 Process answer. Aval parts: 0 Got data event with total 3 messages, current total messages: 3 Got data event with total 3 messages, current total messages: 6 2024-11-21T17:53:52.422106Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 grpc read done: success# 1, data# { read_request { bytes_size: 276 } } 2024-11-21T17:53:52.422170Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 got read request: guid# 7044f6f3-597f73cc-24052c1e-2279fd2d Got data event with total 3 messages, current total messages: 9 2024-11-21T17:53:52.422666Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 grpc read done: success# 1, data# { read_request { bytes_size: 386 } } 2024-11-21T17:53:52.422729Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 got read request: guid# 876c3365-ccc36bb7-86a31aa8-d8ffa97d 2024-11-21T17:53:52.422884Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 grpc read done: success# 0, data# { } 2024-11-21T17:53:52.422895Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 grpc read failed 2024-11-21T17:53:52.422899Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 grpc closed 2024-11-21T17:53:52.422916Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_24_1_14924976173356710581_v1 is DEAD 2024-11-21T17:53:52.423032Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:52.423039Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_24_1_14924976173356710581_v1 2024-11-21T17:53:52.423049Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792312500716824:2500] destroyed 2024-11-21T17:53:52.423052Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:52.423057Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_24_1_14924976173356710581_v1 2024-11-21T17:53:52.423059Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792312500716822:2499] destroyed 2024-11-21T17:53:52.423060Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:53:52.423061Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_24_1_14924976173356710581_v1 2024-11-21T17:53:52.423063Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7439792312500716821:2498] destroyed 2024-11-21T17:53:52.423064Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_24_1_14924976173356710581_v1 2024-11-21T17:53:52.423067Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_24_1_14924976173356710581_v1 2024-11-21T17:53:52.423069Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_24_1_14924976173356710581_v1 2024-11-21T17:53:52.423141Z node 24 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [24:7439792312500716817:2495] disconnected; active server actors: 1 2024-11-21T17:53:52.423149Z node 24 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [24:7439792312500716817:2495] client debug disconnected session shared/debug_24_1_14924976173356710581_v1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:35.188137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:35.188157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.188161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:35.188165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:35.188179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:35.188182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:35.188188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.188263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:35.195421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:35.195439Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:35.197973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:35.198724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:35.198762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:35.200306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:35.200518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:35.200618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.200716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:35.201693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.201970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.201980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.202019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:35.202026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.202032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:35.202046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.203382Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:35.220265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:35.220344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.220417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:35.220493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:35.220501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.221204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.221234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:35.221282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.221292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:35.221297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:35.221302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:35.221686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.221699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:35.221704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:35.222058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.222069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.222075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.222081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.222637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:35.223097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:35.223152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:35.223326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.223353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.223360Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.223416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:35.223423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.223456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.223469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:35.224091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.224101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.224143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.224148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:35.224273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.224282Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:35.224295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:35.224299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.224305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:35.224311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.224316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:35.224320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:35.224331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:35.224337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:35.224341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:35.224631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.224648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.224653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:35.224658Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:35.224663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.224677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 6678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2024-11-21T17:54:35.363432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409550 2024-11-21T17:54:35.363863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:35.363868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:54:35.363876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:54:35.363879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:54:35.363882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 Forgetting tablet 72075186233409552 2024-11-21T17:54:35.363974Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:54:35.364071Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:54:35.364213Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 Forgetting tablet 72075186233409546 2024-11-21T17:54:35.364305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.364333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T17:54:35.364645Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:54:35.364688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:35.364714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:35.364755Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T17:54:35.364828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T17:54:35.364848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409551 2024-11-21T17:54:35.365023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2024-11-21T17:54:35.365151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:35.365183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:35.365201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:35.365264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:54:35.365283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:35.365353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:35.365444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:35.365449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:35.365469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:35.365859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T17:54:35.365870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T17:54:35.365887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2024-11-21T17:54:35.365890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2024-11-21T17:54:35.365902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:35.365913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:35.365916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:35.365923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.366372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:35.366380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:35.366391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:35.366394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:35.366404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T17:54:35.366407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T17:54:35.366416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:35.366419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:35.366427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:54:35.366432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:54:35.366723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:35.366756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T17:54:35.366811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:54:35.366817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:54:35.366871Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:54:35.366887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:54:35.366891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:777:2667] TestWaitNotification: OK eventTxId 103 2024-11-21T17:54:35.366955Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:35.366986Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 41us result status StatusPathDoesNotExist 2024-11-21T17:54:35.367027Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:35.367079Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:35.367099Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 21us result status StatusSuccess 2024-11-21T17:54:35.367156Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[json-json_query/wrapper-default.txt-ForceBlocks] [GOOD] >> test.py::test[json-json_query/wrapper-default.txt-Plan] [GOOD] >> test.py::test[json-json_query/wrapper-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: 2024-11-21T17:52:16.182513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:16.182545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:16.182553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001667/r3tmp/tmpNyghhg/pdisk_1.dat 2024-11-21T17:52:16.287954Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31683, node 1 2024-11-21T17:52:16.383025Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:16.383042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:16.383044Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:16.383106Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:16.387331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:16.463199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:16.463228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:16.474834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29077 2024-11-21T17:52:16.886275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:17.901555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:17.901592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:17.901686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:17.901696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:17.935038Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:17.935159Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:52:17.936061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:17.936190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:17.975748Z node 3 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:52:17.975770Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:52:17.985667Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:17.988584Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:52:17.988910Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:52:17.988932Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:52:17.988938Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:52:17.988944Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:52:17.988950Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:52:17.988953Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:52:17.988958Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:52:17.989132Z node 3 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:52:18.176247Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:2149:2417] 2024-11-21T17:52:18.177555Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T17:52:18.180353Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:18.180388Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:2206:2425], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:18.182354Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:52:18.182369Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:52:18.182397Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T17:52:18.185347Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:2247:2434] 2024-11-21T17:52:18.185459Z node 3 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [3:2247:2434], schemeshard id = 72075186224037889 2024-11-21T17:52:18.189750Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:18.189781Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:18.191042Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:18.191064Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:18.191948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:52:18.198361Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:52:18.198407Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:52:18.215592Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:52:18.229285Z node 2 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:52:18.229597Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:18.230155Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:18.264319Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:52:18.419614Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:52:18.557747Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:52:19.362268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2574:3039], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:19.362300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:19.365455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T17:52:19.460903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2735:3077], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:19.460948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:19.461386Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2740:3081]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:52:19.461428Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:52:19.461437Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2742:3083] 2024-11-21T17:52:19.461447Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2742:3083] 2024-11-21T17:52:19.461610Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:2743:2569] 2024-11-21T17:52:19.461678Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2742:3083], server id = [3:2743:2569], tablet id = 72075186224037897, status = OK 2024-11-21T17:52:19.461745Z node 3 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [3:2743:2569], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T17:52:19.461764Z node 3 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T17:52:19.461807Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:52:19.461821Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2740:3081], StatRequests.size() = 1 2024-11-21T17:52:19.464089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2747:3087], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:19.464112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:19.464192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2752:3092], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:19.465406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:52:19.638843Z node 3 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T17:52:19.638871Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:52:19.764519Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2742:3083], schemeshard count = 1 2024-11-21T17:52:19.979198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2754:3094], DatabaseId: /Root, PoolId: d ... 4-11-21T17:54:28.355262Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T17:54:28.355266Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T17:54:28.355269Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T17:54:28.900918Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7350:3925]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:28.900997Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T17:54:28.901002Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7350:3925], StatRequests.size() = 1 2024-11-21T17:54:29.254095Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T17:54:29.254224Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:54:29.254281Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:54:29.410466Z node 3 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2024-11-21T17:54:29.410488Z node 3 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 196.000000s, at schemeshard: 72075186224037889 2024-11-21T17:54:29.410563Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 49 2024-11-21T17:54:29.421654Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T17:54:29.935693Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7387:3935]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:29.935763Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T17:54:29.935768Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7387:3935], StatRequests.size() = 1 2024-11-21T17:54:30.298513Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:30.298538Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:30.298546Z node 3 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T17:54:30.298549Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:54:30.298648Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:54:30.315708Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:54:30.316591Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7412:3559], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:30.316612Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7422:3564], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:30.316694Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:30.318738Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T17:54:30.327419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7426:3567], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T17:54:30.552279Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:7545:3626]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:30.552344Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:54:30.552351Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [3:7547:3628] 2024-11-21T17:54:30.552359Z node 3 :STATISTICS DEBUG: SyncNode(), pipe client id = [3:7547:3628] 2024-11-21T17:54:30.552451Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:7548:3629] 2024-11-21T17:54:30.552466Z node 3 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [3:7548:3629], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T17:54:30.552475Z node 3 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2024-11-21T17:54:30.552500Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:7547:3628], server id = [3:7548:3629], tablet id = 72075186224037897, status = OK 2024-11-21T17:54:30.552514Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2024-11-21T17:54:30.552521Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [3:7545:3626], StatRequests.size() = 1 2024-11-21T17:54:30.564550Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=MjA5ODAwNTktNTc4ZDM1MWItNmJmODcxZDMtZTY0ZjczNQ==, TxId: 2024-11-21T17:54:30.564570Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=MjA5ODAwNTktNTc4ZDM1MWItNmJmODcxZDMtZTY0ZjczNQ==, TxId: 2024-11-21T17:54:30.564707Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:54:30.575824Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:54:30.575839Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:54:30.648035Z node 3 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T17:54:30.648056Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:54:30.699608Z node 3 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [3:7547:3628], schemeshard count = 1 2024-11-21T17:54:31.001846Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7589:3950]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:31.001948Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T17:54:31.001955Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7589:3950], StatRequests.size() = 1 2024-11-21T17:54:31.959377Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7634:3960]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:31.959458Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T17:54:31.959463Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7634:3960], StatRequests.size() = 1 2024-11-21T17:54:32.270814Z node 3 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T17:54:32.301806Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:32.301829Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:32.301837Z node 3 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T17:54:32.301841Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:54:32.301936Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:54:32.302462Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:54:32.305715Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YTU4MjhmZDctZmNhM2I5YmEtYjdlMjRkMDMtY2VmZDMzYjg=, TxId: 2024-11-21T17:54:32.305729Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YTU4MjhmZDctZmNhM2I5YmEtYjdlMjRkMDMtY2VmZDMzYjg=, TxId: 2024-11-21T17:54:32.305810Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:54:32.337764Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:54:32.337784Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:54:32.973723Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7708:3970]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:32.973821Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T17:54:32.973828Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7708:3970], StatRequests.size() = 1 2024-11-21T17:54:33.993105Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7753:3980]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:33.993183Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T17:54:33.993187Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7753:3980], StatRequests.size() = 1 2024-11-21T17:54:34.306082Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 3, schemeshard count = 1 2024-11-21T17:54:34.306206Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:54:34.306276Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:54:34.306306Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2024-11-21T17:54:34.337288Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:34.337314Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:34.942831Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7790:3992]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:34.942922Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2024-11-21T17:54:34.942927Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7790:3992], StatRequests.size() = 1 2024-11-21T17:54:34.943038Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [3:7792:3722]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:34.943717Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T17:54:34.943725Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [3:7792:3722], StatRequests.size() = 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:35.507621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:35.507639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.507642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:35.507646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:35.507658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:35.507660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:35.507667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.507724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:35.515423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:35.515438Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:35.517601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:35.518082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:35.518104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:35.519223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:35.519358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:35.519419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.519479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:35.520367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.520626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.520647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.520682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:35.520690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.520696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:35.520709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.521761Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:35.532215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:35.532308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.532359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:35.532412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:35.532417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.533136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.533165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:35.533209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.533218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:35.533221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:35.533226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:35.533653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.533666Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:35.533671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:35.533967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.533976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.533981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.533987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.534525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:35.534955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:35.535006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:35.535181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.535204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.535210Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.535257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:35.535263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.535289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.535301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:35.535688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.535696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.535739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.535743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:35.535824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.535831Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:35.535843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:35.535847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.535853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:35.535858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.535862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:35.535865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:35.535875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:35.535880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:35.535884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:35.536160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.536174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.536178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:35.536183Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:35.536187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.536199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... de 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:35.600268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2024-11-21T17:54:35.600301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:35.600749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T17:54:35.600786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T17:54:35.600865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.600890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.600897Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:35.600988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T17:54:35.600997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:35.601029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.601039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2024-11-21T17:54:35.601049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T17:54:35.601615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.601627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.601673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:35.601690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.601696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T17:54:35.601701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T17:54:35.601776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.601783Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T17:54:35.601794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T17:54:35.601798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:35.601803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T17:54:35.601808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:35.601813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T17:54:35.601816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T17:54:35.601856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2024-11-21T17:54:35.601862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2024-11-21T17:54:35.601865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:54:35.601868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:54:35.601983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:35.601998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:35.602002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:35.602007Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:54:35.602011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:35.602120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:35.602129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:35.602133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:35.602136Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:54:35.602140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2024-11-21T17:54:35.602147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2024-11-21T17:54:35.602151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:562:2472] 2024-11-21T17:54:35.603111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:35.603167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:35.603182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:35.603187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:563:2473] TestWaitNotification: OK eventTxId 100 2024-11-21T17:54:35.603292Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:35.603331Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 48us result status StatusSuccess 2024-11-21T17:54:35.603433Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T17:54:35.604007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:35.604041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.604064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, at schemeshard: 72057594046678944 2024-11-21T17:54:35.604618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.604654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardSubDomainTest::CreateAndWait >> test.py::test[union_all-union_all_subexpr-default.txt-Debug] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-Plan] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:35.239716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:35.239738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.239743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:35.239748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:35.239763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:35.239767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:35.239775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.239839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:35.249445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:35.249465Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:35.252110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:35.252902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:35.252935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:35.254794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:35.255034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:35.255134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.255238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:35.256200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.256499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.256510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.256550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:35.256556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.256562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:35.256576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.257826Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:35.272439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:35.272530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.272590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:35.272666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:35.272673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.273355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.273378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:35.273418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.273427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:35.273431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:35.273436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:35.273813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.273823Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:35.273828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:35.274118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.274125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.274131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.274138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.274737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:35.275114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:35.275156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:35.275312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.275334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.275344Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.275391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:35.275396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.275423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.275434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:35.275799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.275806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.275840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.275845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:35.275922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.275927Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:35.275938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:35.275944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.275949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:35.275954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.275958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:35.275964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:35.275973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:35.275978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:35.275983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:35.276270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.276286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.276291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:35.276295Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:35.276301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.276315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... de 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:54:35.425302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T17:54:35.425705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:35.425728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:35.426128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.426156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.426215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.426222Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T17:54:35.426235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:54:35.426240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:54:35.426247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T17:54:35.426260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:488:2439] message: TxId: 103 2024-11-21T17:54:35.426268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:54:35.426274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:54:35.426278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:54:35.426298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:54:35.426834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:54:35.426843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:489:2440] TestWaitNotification: OK eventTxId 103 2024-11-21T17:54:35.426952Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:35.426999Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 56us result status StatusSuccess 2024-11-21T17:54:35.427129Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.427203Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:35.427231Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 29us result status StatusSuccess 2024-11-21T17:54:35.427312Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.427367Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:35.427379Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 13us result status StatusSuccess 2024-11-21T17:54:35.427420Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.427458Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:35.427473Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 16us result status StatusSuccess 2024-11-21T17:54:35.427523Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:35.635840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:35.635863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.635867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:35.635871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:35.635883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:35.635885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:35.635892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.635955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:35.643286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:35.643312Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:35.645932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:35.646740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:35.646781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:35.648206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:35.648407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:35.648508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.648609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:35.649467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.649752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.649761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.649800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:35.649807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.649813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:35.649824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.650952Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:35.665108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:35.665198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.665272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:35.665345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:35.665351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.666108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.666133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:35.666184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.666195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:35.666200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:35.666205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:35.666606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.666615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:35.666619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:35.667039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.667056Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.667063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.667070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.667661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:35.668075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:35.668123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:35.668292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.668312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.668317Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.668376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:35.668383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.668421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.668430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:35.668744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.668750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.668782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.668785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:35.668851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.668856Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:35.668867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:35.668871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.668876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:35.668881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.668886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:35.668889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:35.668899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:35.668905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:35.668908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:35.669136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.669146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.669149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:35.669152Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:35.669156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.669164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:54:35.764141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2024-11-21T17:54:35.764395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:35.764405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:35.764408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:35.764410Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:54:35.764413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:54:35.764420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T17:54:35.764453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 334 } } 2024-11-21T17:54:35.764460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2024-11-21T17:54:35.764471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 334 } } 2024-11-21T17:54:35.764479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 334 } } 2024-11-21T17:54:35.764774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 621 RawX2: 4294969827 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:54:35.764781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2024-11-21T17:54:35.764790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 621 RawX2: 4294969827 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:54:35.764794Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:54:35.764798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 621 RawX2: 4294969827 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T17:54:35.764805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.764808Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.764810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2024-11-21T17:54:35.764814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T17:54:35.765238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:35.765250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:35.765354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.765387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.765419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.765423Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:54:35.765431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:54:35.765434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:35.765438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T17:54:35.765446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:272:2264] message: TxId: 101 2024-11-21T17:54:35.765450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:35.765453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:54:35.765455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:54:35.765470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:35.765708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:35.765717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:273:2265] TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:35.765792Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:35.765819Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 34us result status StatusSuccess 2024-11-21T17:54:35.765886Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.765953Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:35.765969Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 17us result status StatusSuccess 2024-11-21T17:54:35.766021Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock >> YdbLogStore::LogTable [GOOD] >> YdbLogStore::AlterLogTable >> test.py::test[pg-tpch-q16-default.txt-Debug] [GOOD] >> test.py::test[pg-tpch-q16-default.txt-Plan] [GOOD] >> test.py::test[pg-tpch-q16-default.txt-Results] >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> TPQTest::TestPQSmallRead [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName >> TPQTest::TestPQReadAhead >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> TSchemeShardSubDomainTest::RestartAtInFly >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Debug] [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Plan] [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables >> test.py::test[json-json_query/wrapper-default.txt-Results] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Analyze] >> TSchemeShardSubDomainTest::SimultaneousDeclare >> TSchemeShardSubDomainTest::ForceDropTwice ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:36.136294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:36.136313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.136316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:36.136319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:36.136331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:36.136334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:36.136340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.136407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:36.143201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:36.143217Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:36.145015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:36.145484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:36.145509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:36.146358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:36.146495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:36.146561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.146642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:36.147305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.147507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.147514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.147540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:36.147545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.147548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:36.147559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.148453Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:36.158949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:36.159016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.159067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:36.159125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:36.159130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.159689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.159708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:36.159740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.159747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:36.159750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:36.159753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:36.159987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.159994Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:36.159996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:36.160190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.160195Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.160199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.160203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.160620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:36.160921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:36.160960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:36.161097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.161115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.161119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.161156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:36.161161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.161183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.161191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:36.161525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.161533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.161569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.161572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:36.161639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.161645Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:36.161659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:36.161663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.161669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:36.161674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.161678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:36.161681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:36.161691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:36.161696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:36.161700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:36.161959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.161975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.161979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:36.161982Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:36.161985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.161995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... for txId: 101 at step: 5000003 2024-11-21T17:54:36.168540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.168551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.168555Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:36.168606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T17:54:36.168611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:36.168628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:36.168635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:36.168643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:54:36.168911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.168917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:36.168934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:36.168945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.168948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:54:36.168951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:54:36.168983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.168986Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:54:36.168993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:54:36.168996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:36.168999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:54:36.169002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:36.169005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:54:36.169008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:54:36.169014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:36.169018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:54:36.169022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T17:54:36.169024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:54:36.169084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:36.169090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:36.169093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:36.169096Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:54:36.169098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:36.169174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:36.169183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:36.169186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:36.169190Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:54:36.169194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:36.169202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:54:36.169564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:36.169704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 100, wait until txId: 101 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2024-11-21T17:54:36.169745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:54:36.169762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2024-11-21T17:54:36.169772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:36.169774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:54:36.169817Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:54:36.169828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:36.169831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:327:2319] 2024-11-21T17:54:36.169852Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:36.169858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:36.169861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:327:2319] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:36.169901Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:36.169920Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/USER_0" took 25us result status StatusSuccess 2024-11-21T17:54:36.169985Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.170033Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:36.170041Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir" took 9us result status StatusSuccess 2024-11-21T17:54:36.170075Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] >> TSchemeShardSubDomainTest::DeleteAdd >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] >> test.py::test[window-leading/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--Plan] [GOOD] >> test.py::test[window-leading/aggregations_leadlag--Results] >> TSchemeShardSubDomainTest::CopyRejects >> TStoragePoolsQuotasTest::DifferentQuotasInteraction >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] [GOOD] >> test.py::test[view-secure_eval--Debug] >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable >> test.py::test[view-secure_eval--Debug] [SKIPPED] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir >> test.py::test[view-secure_eval--Plan] [SKIPPED] >> test.py::test[view-secure_eval--Results] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] |85.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:36.534416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:36.534437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.534441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:36.534444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:36.534454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:36.534456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:36.534463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.534525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:36.541776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:36.541793Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:36.544139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:36.545200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:36.545236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:36.546545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:36.546709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:36.546795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.546876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:36.547710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.547972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.547980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.548017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:36.548024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.548030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:36.548044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.549248Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:36.566741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:36.566814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.566876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:36.566944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:36.566952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.567614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.567633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:36.567671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.567679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:36.567683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:36.567688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:36.568027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.568036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:36.568041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:36.568337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.568345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.568349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.568357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.568949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:36.569341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:36.569387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:36.569553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.569575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.569582Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.569633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:36.569640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.569669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.569696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:36.570044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.570051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.570092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.570098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:36.570179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.570185Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:36.570196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:36.570201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.570206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:36.570210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.570215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:36.570218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:36.570228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:36.570234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:36.570238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:36.570535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.570549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.570554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:36.570559Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:36.570564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.570606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... emeshard# 72057594046678944 2024-11-21T17:54:36.632224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409551, partId: 0 2024-11-21T17:54:36.632249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409551 2024-11-21T17:54:36.632254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2024-11-21T17:54:36.632257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409551 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2024-11-21T17:54:36.632261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2024-11-21T17:54:36.632651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.632846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.632862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.632879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.632882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.632886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:36.632890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T17:54:36.632913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:36.633197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T17:54:36.633236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T17:54:36.633290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.633305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.633309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:36.633384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T17:54:36.633390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T17:54:36.633411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.633417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2024-11-21T17:54:36.633422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:54:36.633805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.633813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.633845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:36.633860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.633864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:54:36.633868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:54:36.633927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.633933Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:54:36.633943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:54:36.633946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:36.633951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:54:36.633956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:36.633960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:54:36.633964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:54:36.634009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2024-11-21T17:54:36.634015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2024-11-21T17:54:36.634019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:54:36.634022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:54:36.634113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:36.634119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:36.634122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:36.634125Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:54:36.634128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:36.634188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:36.634194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:36.634196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:36.634199Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:54:36.634201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2024-11-21T17:54:36.634205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2024-11-21T17:54:36.634208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:559:2469] 2024-11-21T17:54:36.634737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:36.634754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:36.634762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:36.634765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:560:2470] TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:36.634834Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:36.634858Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 29us result status StatusSuccess 2024-11-21T17:54:36.634930Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:36.667542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:36.667564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.667569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:36.667574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:36.667589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:36.667592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:36.667601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.667666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:36.678646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:36.678662Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:36.681294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:36.682008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:36.682040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:36.683224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:36.683387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:36.683474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.683553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:36.685018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.685272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.685283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.685314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:36.685322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.685327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:36.685339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.686369Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:36.703030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:36.703088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.703139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:36.703208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:36.703215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.703714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.703736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:36.703766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.703774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:36.703778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:36.703784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:36.704111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.704121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:36.704125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:36.704531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.704541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.704548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.704554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.705155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:36.705538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:36.705580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:36.705737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.705758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.705764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.705813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:36.705820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.705850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.705862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:36.706255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.706263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.706299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.706304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:36.706387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.706394Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:36.706406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:36.706410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.706415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:36.706421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.706426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:36.706430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:36.706441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:36.706447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:36.706451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:36.706756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.706770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.706775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:36.706780Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:36.706785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.706797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... AT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:36.711651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:36.711655Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T17:54:36.711658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:36.711665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2024-11-21T17:54:36.711866Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:54:36.711952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.711959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#100:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:36.711964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2024-11-21T17:54:36.712367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:36.712484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:36.712570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.712576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.712583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:36.712588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2024-11-21T17:54:36.712614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:36.712875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T17:54:36.712901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T17:54:36.712966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.712983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.712989Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:36.713050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T17:54:36.713056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:36.713083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.713090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:36.713099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:36.713435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.713444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.713469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:36.713482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.713487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T17:54:36.713492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T17:54:36.713564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.713570Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T17:54:36.713581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T17:54:36.713585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:36.713591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T17:54:36.713596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:36.713601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T17:54:36.713605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T17:54:36.713616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:36.713621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2024-11-21T17:54:36.713625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:54:36.713629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:54:36.713706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:36.713716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:36.713720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:36.713724Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:54:36.713728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:36.713810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:36.713819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:36.713823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:36.713827Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:54:36.713830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:36.713839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2024-11-21T17:54:36.713844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:272:2264] 2024-11-21T17:54:36.714448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:36.714512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:36.714525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:36.714530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:273:2265] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2024-11-21T17:54:36.714626Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:36.714651Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 32us result status StatusSuccess 2024-11-21T17:54:36.714733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:36.584797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:36.584816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.584819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:36.584823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:36.584837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:36.584841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:36.584849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.584922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:36.592735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:36.592756Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:36.594522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:36.595011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:36.595033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:36.595885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:36.596008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:36.596072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.596136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:36.596742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.596935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.596942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.596966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:36.596970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.596976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:36.596988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.597782Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:36.608479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:36.608542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.608597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:36.608662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:36.608669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.609285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.609306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:36.609338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.609344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:36.609348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:36.609351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:36.609662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.609669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:36.609672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:36.609924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.609930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.609936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.609942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.610376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:36.610745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:36.610785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:36.610935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.610958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.610965Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.611013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:36.611020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.611046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.611058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:36.611481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.611488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.611527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.611532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:36.611612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.611619Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:36.611631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:36.611635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.611641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:36.611646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.611651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:36.611655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:36.611665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:36.611670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:36.611674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:36.611905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.611914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.611918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:36.611921Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:36.611924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.611932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... D 72057594046678944 is [1:121:2147] sender: [1:455:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:456:2058] recipient: [1:454:2407] Leader for TabletID 72057594046678944 is [1:457:2408] sender: [1:458:2058] recipient: [1:454:2407] 2024-11-21T17:54:36.629952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:36.629967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.629971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:36.629974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:36.629977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:36.629979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:36.629984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.630018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:36.630722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:36.630902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:36.630924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:36.630935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:36.630938Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:36.630964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:36.631003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:36.631018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T17:54:36.631080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:36.631122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:36.631124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:36.631131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.631341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.632374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.632384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.632409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:36.632414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.632417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:36.632434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2024-11-21T17:54:36.673093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:54:36.673120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:457:2408] sender: [1:516:2058] recipient: [1:15:2062] 2024-11-21T17:54:36.673253Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:54:36.673276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:36.673281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:514:2453] TestWaitNotification: OK eventTxId 100 2024-11-21T17:54:36.673347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:36.673388Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 53us result status StatusSuccess 2024-11-21T17:54:36.673484Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.673556Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:36.673571Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 18us result status StatusSuccess 2024-11-21T17:54:36.673615Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:36.549034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:36.549057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.549061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:36.549065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:36.549077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:36.549080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:36.549086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.549146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:36.556735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:36.556755Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:36.559162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:36.559695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:36.559725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:36.560821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:36.560976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:36.561049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.561122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:36.562361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.562655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.562664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.562690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:36.562695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.562699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:36.562714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.563856Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:36.575168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:36.575222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.575273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:36.575330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:36.575335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.575838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.575855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:36.575880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.575886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:36.575889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:36.575892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:36.576209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.576217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:36.576220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:36.576621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.576633Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.576637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.576642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.577103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:36.577462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:36.577496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:36.577619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.577646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.577652Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.577705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:36.577711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.577737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.577747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:36.578166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.578174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.578210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.578216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:36.578291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.578298Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:36.578308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:36.578312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.578317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:36.578323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.578327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:36.578331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:36.578341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:36.578347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:36.578350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:36.578604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.578615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.578619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:36.578621Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:36.578624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.578634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:36.663968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:36.664207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.664242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.664287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.664293Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:54:36.664304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:54:36.664308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:54:36.664313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T17:54:36.664325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:632:2561] message: TxId: 102 2024-11-21T17:54:36.664330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:54:36.664335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:54:36.664339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:54:36.664359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:36.664690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:54:36.664699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:633:2562] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 108 2024-11-21T17:54:36.665187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:36.665221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.665235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, at schemeshard: 72057594046678944 2024-11-21T17:54:36.665569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154" TxId: 108 SchemeshardId: 72057594046678944 PathId: 5 PathCreateTxId: 106, at schemeshard: 72057594046678944 2024-11-21T17:54:36.665592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:154, operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2024-11-21T17:54:36.665662Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:36.665688Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 28us result status StatusSuccess 2024-11-21T17:54:36.665743Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.665780Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:36.665794Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 15us result status StatusSuccess 2024-11-21T17:54:36.665845Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.665884Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:36.665891Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 8us result status StatusSuccess 2024-11-21T17:54:36.665908Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.665934Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:36.665942Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 9us result status StatusSuccess 2024-11-21T17:54:36.665961Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 5 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 5 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:36.681558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:36.681579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.681585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:36.681590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:36.681606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:36.681610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:36.681619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.681684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:36.692794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:36.692817Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:36.695382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:36.696113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:36.696147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:36.697369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:36.697532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:36.697629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.697720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:36.698596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.698873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.698883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.698921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:36.698929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.698935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:36.698948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.700140Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:36.716823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:36.716878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.716919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:36.716977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:36.716983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.717473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.717489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:36.717509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.717516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:36.717518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:36.717521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:36.718302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.718315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:36.718319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:36.718642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.718649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.718653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.718656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.719067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:36.719369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:36.719402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:36.719528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.719543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.719548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.719583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:36.719587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.719608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.719616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:36.719906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.719911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.719933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.719936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:36.719992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.719997Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:36.720004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:36.720007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.720011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:36.720014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.720017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:36.720019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:36.720026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:36.720030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:36.720032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:36.720251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.720263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.720267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:36.720271Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:36.720275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.720287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 46678944 2024-11-21T17:54:36.785193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:36.785196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:36.785198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:36.785353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:36.785500Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2024-11-21T17:54:36.785560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T17:54:36.785607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2024-11-21T17:54:36.785665Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:54:36.785768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.785787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T17:54:36.785828Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:54:36.785850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:36.785861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:36.785933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409546 2024-11-21T17:54:36.786042Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2024-11-21T17:54:36.786172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T17:54:36.786200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2024-11-21T17:54:36.786270Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:54:36.786358Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T17:54:36.786376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:36.786391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2024-11-21T17:54:36.786652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:54:36.786675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2024-11-21T17:54:36.786906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:36.786911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:36.786926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:36.786953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:36.786957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:36.786962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.787057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T17:54:36.787064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T17:54:36.787076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:36.787079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:36.787434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:36.787441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:36.787451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T17:54:36.787453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T17:54:36.787459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:36.787461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:36.787474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:54:36.787478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:54:36.787698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:36.787712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2024-11-21T17:54:36.787745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:54:36.787750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2024-11-21T17:54:36.787760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:54:36.787762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:54:36.787803Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:54:36.787815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:54:36.787819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:655:2559] 2024-11-21T17:54:36.787831Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:54:36.787839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:54:36.787841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:655:2559] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2024-11-21T17:54:36.787886Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:36.787906Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 28us result status StatusPathDoesNotExist 2024-11-21T17:54:36.787937Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:36.787968Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:36.787978Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 11us result status StatusSuccess 2024-11-21T17:54:36.788017Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[key_filter-dict_contains_optional--Analyze] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Debug] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] [GOOD] >> test.py::test[sampling-direct_read--Debug] >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:36.641673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:36.641691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.641694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:36.641697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:36.641709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:36.641712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:36.641717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.641764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:36.648384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:36.648397Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:36.650237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:36.650713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:36.650737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:36.651814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:36.651947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:36.652025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.652093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:36.652833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.653092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.653101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.653128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:36.653133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.653137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:36.653145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.653863Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:36.665842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:36.665907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.665961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:36.666016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:36.666021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.666549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.666565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:36.666608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.666614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:36.666617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:36.666621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:36.666914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.666920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:36.666923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:36.667165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.667171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.667175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.667180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.667566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:36.667868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:36.667901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:36.668048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.668069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.668075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.668117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:36.668121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.668146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.668154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:36.668466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.668471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.668499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.668502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:36.668562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.668567Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:36.668575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:36.668578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.668581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:36.668585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.668588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:36.668591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:36.668598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:36.668601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:36.668604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:36.668808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.668817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.668820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:36.668823Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:36.668826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.668834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T17:54:36.914054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T17:54:36.914057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T17:54:36.914061Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T17:54:36.914065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T17:54:36.914073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/3, is published: true 2024-11-21T17:54:36.914120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 OrderId: 107 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 236 } } 2024-11-21T17:54:36.914125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2024-11-21T17:54:36.914138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 OrderId: 107 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 236 } } 2024-11-21T17:54:36.914149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 OrderId: 107 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 236 } } 2024-11-21T17:54:36.914209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 593 RawX2: 4294969831 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2024-11-21T17:54:36.914212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2024-11-21T17:54:36.914221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 593 RawX2: 4294969831 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2024-11-21T17:54:36.914225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:54:36.914231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 593 RawX2: 4294969831 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2024-11-21T17:54:36.914237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.914240Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.914244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:54:36.914248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2024-11-21T17:54:36.915039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2024-11-21T17:54:36.915221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T17:54:36.915239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T17:54:36.915249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2024-11-21T17:54:36.915305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T17:54:36.915335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2024-11-21T17:54:36.915340Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:2 ProgressState 2024-11-21T17:54:36.915351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 2/3 2024-11-21T17:54:36.915355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2024-11-21T17:54:36.915359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2024-11-21T17:54:36.915484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T17:54:36.915498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.915511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.915522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.915526Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:0 ProgressState 2024-11-21T17:54:36.915532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 3/3 2024-11-21T17:54:36.915535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2024-11-21T17:54:36.915539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2024-11-21T17:54:36.915550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:475:2426] message: TxId: 107 2024-11-21T17:54:36.915555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2024-11-21T17:54:36.915560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T17:54:36.915563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T17:54:36.915584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:54:36.915589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:1 2024-11-21T17:54:36.915592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:1 2024-11-21T17:54:36.915596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:54:36.915599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:2 2024-11-21T17:54:36.915602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:2 2024-11-21T17:54:36.915608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:54:36.916058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T17:54:36.916069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:526:2477] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2024-11-21T17:54:36.916909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:36.917003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2024-11-21T17:54:36.917017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2024-11-21T17:54:36.917023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2024-11-21T17:54:36.917441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.917469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2024-11-21T17:54:36.917533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2024-11-21T17:54:36.917539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2024-11-21T17:54:36.917604Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2024-11-21T17:54:36.917622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T17:54:36.917626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:715:2636] TestWaitNotification: OK eventTxId 108 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:36.897225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:36.897245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.897248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:36.897251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:36.897261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:36.897264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:36.897270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.897322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:36.904114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:36.904131Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:36.906065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:36.906532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:36.906559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:36.907560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:36.907685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:36.907767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.907845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:36.908563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.908761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.908768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.908792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:36.908797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.908802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:36.908810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.909844Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:36.919972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:36.920024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.920068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:36.920118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:36.920123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.920631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.920647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:36.920672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.920677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:36.920680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:36.920683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:36.920941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.920947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:36.920950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:36.921166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.921170Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.921173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.921177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.921550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:36.921881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:36.921913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:36.922024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.922039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.922044Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.922076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:36.922080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.922099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.922106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:36.922382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.922386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.922415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.922418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:36.922474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.922478Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:36.922484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:36.922487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.922490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:36.922493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.922496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:36.922498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:36.922504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:36.922507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:36.922509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:36.922713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.922722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.922724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:36.922727Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:36.922732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.922739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 4 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:37.022022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T17:54:37.022040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000004 2024-11-21T17:54:37.022092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.022107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:37.022111Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet 72057594046678944 2024-11-21T17:54:37.022168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T17:54:37.022174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet 72057594046678944 2024-11-21T17:54:37.022198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:37.022206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T17:54:37.022211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:54:37.022477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:37.022482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:37.022517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:37.022528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.022533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T17:54:37.022536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T17:54:37.022614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.022619Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T17:54:37.022626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:54:37.022629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:54:37.022633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T17:54:37.022636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:54:37.022640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:54:37.022642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:54:37.022670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2024-11-21T17:54:37.022673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T17:54:37.022676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T17:54:37.022678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:54:37.022749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:37.022756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:37.022759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:54:37.022761Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T17:54:37.022764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:37.022814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:37.022819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:37.022821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:54:37.022823Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:54:37.022825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2024-11-21T17:54:37.022830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T17:54:37.023213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:37.023361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T17:54:37.023400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:54:37.023404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:54:37.023447Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:54:37.023460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:54:37.023464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:978:2802] TestWaitNotification: OK eventTxId 102 2024-11-21T17:54:37.023536Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:37.023565Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 39us result status StatusSuccess 2024-11-21T17:54:37.023643Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:37.023709Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:37.023724Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 17us result status StatusSuccess 2024-11-21T17:54:37.023766Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:37.027998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:37.028017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:37.028020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:37.028023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:37.028034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:37.028037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:37.028042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:37.028098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:37.034991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:37.035009Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:37.037010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:37.037510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:37.037531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:37.038401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:37.038514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:37.038569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.038638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:37.039252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.039418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:37.039427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.039455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:37.039459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:37.039464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:37.039473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.040363Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:37.050919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:37.050974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.051014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:37.051064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:37.051069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.051504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.051522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:37.051544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.051550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:37.051553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:37.051556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:37.051783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.051789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:37.051791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:37.052027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.052031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.052035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:37.052039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:37.052511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:37.052812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:37.052845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:37.052961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.052977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:37.052983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:37.053017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:37.053021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:37.053039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:37.053046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:37.053306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:37.053311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:37.053340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.053343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:37.053402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.053409Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:37.053419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:37.053423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:37.053429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:37.053432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:37.053435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:37.053437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:37.053444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:37.053447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:37.053450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:37.053663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:37.053673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:37.053676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:37.053679Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:37.053682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:37.053691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:37.057964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T17:54:37.057980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T17:54:37.058017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.058028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:37.058031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:37.058074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T17:54:37.058078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:37.058093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:37.058099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:37.058104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T17:54:37.058362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:37.058366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:37.058382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:37.058390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.058393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T17:54:37.058395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T17:54:37.058400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.058403Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T17:54:37.058409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T17:54:37.058412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:37.058415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T17:54:37.058418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:37.058420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T17:54:37.058423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T17:54:37.058427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:37.058430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T17:54:37.058433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:54:37.058434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:54:37.058516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:37.058521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:37.058524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:37.058526Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:54:37.058528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:37.058598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:37.058603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:37.058606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:37.058608Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:54:37.058610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:37.058614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T17:54:37.059036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:37.059169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2024-11-21T17:54:37.059203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:54:37.059216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2024-11-21T17:54:37.059225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:37.059227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:54:37.059262Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:54:37.059277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:37.059280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:310:2302] 2024-11-21T17:54:37.059289Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:37.059298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:37.059300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:310:2302] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:37.059336Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:37.059351Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 21us result status StatusSuccess 2024-11-21T17:54:37.059405Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:37.059452Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:37.059463Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 12us result status StatusPathDoesNotExist 2024-11-21T17:54:37.059478Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeLimitsRejects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:37.070308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:37.070328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:37.070331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:37.070335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:37.070348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:37.070350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:37.070356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:37.070414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:37.077343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:37.077359Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:37.079335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:37.079824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:37.079849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:37.080955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:37.081102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:37.081169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.081235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:37.081941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.082145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:37.082151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.082178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:37.082182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:37.082186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:37.082194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.083024Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:37.093126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:37.093186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.093235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:37.093287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:37.093292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.093805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.093822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:37.093852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.093859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:37.093862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:37.093865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:37.094100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.094107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:37.094110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:37.094312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.094316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.094320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:37.094324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:37.094728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:37.095021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:37.095054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:37.095171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.095185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:37.095189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:37.095223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:37.095227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:37.095249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:37.095257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:37.095537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:37.095541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:37.095567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.095570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:37.095630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.095635Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:37.095644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:37.095648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:37.095652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:37.095655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:37.095658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:37.095661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:37.095667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:37.095671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:37.095673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:37.095858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:37.095866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:37.095869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:37.095872Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:37.095875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:37.095882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 4-11-21T17:54:37.103318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:54:37.103432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:37.103454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:54:37.103667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:37.103671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:37.103685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:37.103693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.103696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:54:37.103698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2024-11-21T17:54:37.103704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.103707Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T17:54:37.103713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:54:37.103715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:37.103718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:54:37.103723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:37.103726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:54:37.103728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:54:37.103734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:37.103738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:54:37.103740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T17:54:37.103742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:54:37.103813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:37.103819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:37.103822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:37.103824Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:54:37.103826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:37.103875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:37.103880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:37.103882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:37.103883Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:54:37.103885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:54:37.103890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:54:37.104393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:37.104418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T17:54:37.104448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:37.104452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:54:37.104496Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:37.104506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:37.104509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:332:2324] TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:37.104549Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:37.104566Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 23us result status StatusSuccess 2024-11-21T17:54:37.104654Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:37.104710Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:37.104720Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 12us result status StatusSuccess 2024-11-21T17:54:37.104745Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:37.104769Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/MyDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:37.104776Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/MyDir" took 7us result status StatusSuccess 2024-11-21T17:54:37.104793Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/MyDir" PathDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2024-11-21T17:53:43.924833Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:53:43.925562Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:43.925625Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:53:43.925742Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:53:43.925929Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T17:53:43.925936Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:53:43.926039Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:45:2073] ControllerId# 72057594037932033 2024-11-21T17:53:43.926042Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:53:43.926072Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:53:43.926127Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:53:43.927987Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:53:43.927997Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:53:43.928221Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:53:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.928272Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:54:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.928288Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:55:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.928305Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:56:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.928320Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:57:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.928337Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:58:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.928356Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:59:2084] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.928360Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:53:43.928371Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:45:2073] 2024-11-21T17:53:43.928373Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:45:2073] 2024-11-21T17:53:43.928380Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:53:43.928386Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:53:43.928543Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:53:43.928553Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:53:43.929112Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:43.929145Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:53:43.929269Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:67:2071] ControllerId# 72057594037932033 2024-11-21T17:53:43.929271Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:53:43.929282Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:53:43.929308Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:53:43.929406Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:53:43.929431Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:53:43.929433Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:53:43.929640Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:73:2075] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.929660Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:74:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.929675Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:75:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.929692Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:76:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.929705Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:77:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.929726Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:78:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.929745Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:79:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:43.929748Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:53:43.929753Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:67:2071] 2024-11-21T17:53:43.929756Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:67:2071] 2024-11-21T17:53:43.929774Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:53:43.929780Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:53:43.929823Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:53:43.929842Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:43.932868Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:45:2073] 2024-11-21T17:53:43.932887Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:43.932892Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:53:43.932951Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:67:2071] 2024-11-21T17:53:43.932962Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:43.932965Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:53:43.932991Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:53:43.933008Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:45:2073] 2024-11-21T17:53:43.933013Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:43.933017Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:53:43.933545Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:53:43.933826Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:53:43.933835Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:53:43.933860Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:43.933877Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:43.934343Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T17:53:43.934357Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:53:43.934361Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:53:43.934390Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:89:2084] 2024-11-21T17:53:43.934398Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:49:2064] 2024-11-21T17:53:43.934401Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:49:2064] 2024-11-21T17:53:43.934431Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:43.934496Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:53:43.934534Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T17:53:43.934540Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:43.934544Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:53:43.934555Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:53:43.934570Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T17:53:43.934573Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T17:53:43.934641Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T17:53:43.934647Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:43.934659Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:53:43.934662Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:53:43.934668Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:95:2091] 2024-11-21T17:53:43.934709Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID ... 4037888] lookup [24:548:2090] 2024-11-21T17:54:37.114730Z node 24 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:37.114734Z node 24 :TABLET_RESOLVER DEBUG: SelectForward node 24 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [23:316:2259] 2024-11-21T17:54:37.114738Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [24:547:2089] 2024-11-21T17:54:37.114741Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] queue send [24:548:2090] 2024-11-21T17:54:37.114749Z node 24 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:37.114770Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 23 [24:547:2089] 2024-11-21T17:54:37.114786Z node 24 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:54:37.114793Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [24:547:2089] 2024-11-21T17:54:37.114796Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [24:547:2089] 2024-11-21T17:54:37.114851Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T17:54:37.114870Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T17:54:37.114878Z node 23 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T17:54:37.114890Z node 23 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [24:547:2089] 2024-11-21T17:54:37.114934Z node 24 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [23:451:2360] CurrentLeaderTablet: [23:468:2372] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T17:54:37.114949Z node 23 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([24:547:2089]) [23:556:2424] 2024-11-21T17:54:37.114954Z node 24 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [23:451:2360] CurrentLeaderTablet: [23:468:2372] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T17:54:37.114961Z node 24 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [23:451:2360] CurrentLeaderTablet: [23:468:2372] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T17:54:37.114964Z node 24 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2024-11-21T17:54:37.114967Z node 24 :TABLET_RESOLVER DEBUG: SelectForward node 24 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [23:451:2360] 2024-11-21T17:54:37.114972Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result remote node 23 [24:548:2090] 2024-11-21T17:54:37.114996Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] remote node connected [24:548:2090] 2024-11-21T17:54:37.114998Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [24:548:2090] 2024-11-21T17:54:37.115041Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [24:547:2089] 2024-11-21T17:54:37.115044Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [24:547:2089] 2024-11-21T17:54:37.115046Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [24:547:2089] 2024-11-21T17:54:37.115055Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [24:547:2089] 2024-11-21T17:54:37.115063Z node 24 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594037927937 Status=OK ClientId=[24:547:2089] 2024-11-21T17:54:37.115081Z node 23 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [24:548:2090] 2024-11-21T17:54:37.115097Z node 23 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [24:544:2089] EventType# 268959744 2024-11-21T17:54:37.115127Z node 23 :HIVE DEBUG: HIVE#72057594037927937 Handle TEvLocal::TEvRegisterNode from [24:544:2089] HiveId: 72057594037927937 ServicedDomains { SchemeShard: 72057594046678944 PathId: 2 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } 2024-11-21T17:54:37.115138Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2024-11-21T17:54:37.115143Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:54:37.115150Z node 23 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxRegisterNode(24)::Execute 2024-11-21T17:54:37.115168Z node 23 :HIVE DEBUG: HIVE#72057594037927937 ProcessWaitQueue (0) 2024-11-21T17:54:37.115171Z node 23 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue (0) 2024-11-21T17:54:37.115173Z node 23 :HIVE TRACE: HIVE#72057594037927937 ProcessBootQueue - sending 2024-11-21T17:54:37.115176Z node 23 :HIVE DEBUG: HIVE#72057594037927937 ProcessWaitQueue (0) 2024-11-21T17:54:37.115178Z node 23 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue (0) 2024-11-21T17:54:37.115184Z node 23 :HIVE WARN: HIVE#72057594037927937 Node(24, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:37.115191Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 208b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2024-11-21T17:54:37.115196Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:54:37.115229Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [24:548:2090] 2024-11-21T17:54:37.115232Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [24:548:2090] 2024-11-21T17:54:37.115233Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [24:548:2090] 2024-11-21T17:54:37.115238Z node 24 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [24:548:2090] 2024-11-21T17:54:37.115242Z node 23 :HIVE TRACE: HIVE#72075186224037888 Handle TEvTabletPipe::TEvServerConnected([24:548:2090]) [23:557:2425] 2024-11-21T17:54:37.115247Z node 23 :HIVE TRACE: HIVE#72057594037927937 ProcessBootQueue - executing 2024-11-21T17:54:37.115251Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2024-11-21T17:54:37.115253Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:54:37.115256Z node 23 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxProcessBootQueue()::Execute 2024-11-21T17:54:37.115259Z node 23 :HIVE DEBUG: HIVE#72057594037927937 Handle ProcessBootQueue (size: 0) 2024-11-21T17:54:37.115261Z node 23 :HIVE DEBUG: HIVE#72057594037927937 Handle ProcessWaitQueue (size: 0) 2024-11-21T17:54:37.115265Z node 23 :HIVE DEBUG: HIVE#72057594037927937 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T17:54:37.115268Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T17:54:37.115271Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:54:37.115277Z node 24 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72075186224037888 Status=OK ClientId=[24:548:2090] 2024-11-21T17:54:37.115291Z node 23 :HIVE DEBUG: HIVE#72057594037927937 TEvInterconnect::TEvNodeInfo NodeId 24 Location DataCenter: "2" Module: "2" Rack: "2" Unit: "2" 2024-11-21T17:54:37.115303Z node 23 :PIPE_SERVER DEBUG: [72075186224037888] Push Sender# [24:545:2090] EventType# 268959744 2024-11-21T17:54:37.115332Z node 23 :HIVE DEBUG: HIVE#72075186224037888 Handle TEvLocal::TEvRegisterNode from [24:545:2090] HiveId: 72075186224037888 ServicedDomains { SchemeShard: 72057594046678944 PathId: 2 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } 2024-11-21T17:54:37.115337Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2024-11-21T17:54:37.115341Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:54:37.115345Z node 23 :HIVE DEBUG: HIVE#72075186224037888 THive::TTxRegisterNode(24)::Execute 2024-11-21T17:54:37.115356Z node 23 :HIVE WARN: HIVE#72075186224037888 Node(24, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:37.115359Z node 23 :HIVE DEBUG: HIVE#72075186224037888 ProcessWaitQueue (0) 2024-11-21T17:54:37.115360Z node 23 :HIVE DEBUG: HIVE#72075186224037888 ProcessBootQueue (0) 2024-11-21T17:54:37.115362Z node 23 :HIVE TRACE: HIVE#72075186224037888 ProcessBootQueue - sending 2024-11-21T17:54:37.115364Z node 23 :HIVE DEBUG: HIVE#72075186224037888 ProcessWaitQueue (0) 2024-11-21T17:54:37.115366Z node 23 :HIVE DEBUG: HIVE#72075186224037888 ProcessBootQueue (0) 2024-11-21T17:54:37.115370Z node 23 :HIVE WARN: HIVE#72075186224037888 Node(24, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:37.115374Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2024-11-21T17:54:37.115378Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:54:37.115396Z node 23 :HIVE TRACE: HIVE#72075186224037888 ProcessBootQueue - executing 2024-11-21T17:54:37.115398Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2024-11-21T17:54:37.115401Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:54:37.115403Z node 23 :HIVE DEBUG: HIVE#72075186224037888 THive::TTxProcessBootQueue()::Execute 2024-11-21T17:54:37.115405Z node 23 :HIVE DEBUG: HIVE#72075186224037888 0 nodes connected out of 0 2024-11-21T17:54:37.115407Z node 23 :HIVE DEBUG: HIVE#72075186224037888 Handle ProcessBootQueue (size: 0) 2024-11-21T17:54:37.115408Z node 23 :HIVE DEBUG: HIVE#72075186224037888 Handle ProcessWaitQueue (size: 0) 2024-11-21T17:54:37.115411Z node 23 :HIVE DEBUG: HIVE#72075186224037888 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T17:54:37.115413Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T17:54:37.115416Z node 23 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:54:37.115425Z node 23 :HIVE DEBUG: HIVE#72075186224037888 TEvInterconnect::TEvNodeInfo NodeId 24 Location DataCenter: "2" Module: "2" Rack: "2" Unit: "2" >> test.py::test[view-secure_eval--Results] [GOOD] >> test.py::test[view-trivial_view--Debug] >> LabeledDbCounters::TwoTabletsKillOneTablet [GOOD] >> SystemView::CollectPreparedQueries >> TSchemeShardSubDomainTest::DeclareDefineAndDelete >> TSchemeShardSubDomainTest::DeclareAndDelete >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop >> TSchemeShardSubDomainTest::SetSchemeLimits ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:36.921646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:36.921663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.921667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:36.921670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:36.921680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:36.921682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:36.921687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.921742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:36.928742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:36.928759Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:36.930697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:36.931191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:36.931228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:36.932280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:36.932424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:36.932513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.932588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:36.933688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.933966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.933976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.934011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:36.934017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.934021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:36.934033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.934970Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:36.946502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:36.946588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.946659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:36.946724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:36.946733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.947228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.947246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:36.947281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.947291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:36.947295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:36.947299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:36.947576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.947584Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:36.947587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:36.947814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.947820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.947823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.947828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.948186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:36.948491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:36.948531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:36.948674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.948696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.948701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.948745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:36.948750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.948774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.948785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:36.949142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.949149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.949181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.949186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:36.949257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.949264Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:36.949274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:36.949280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.949285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:36.949290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.949294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:36.949297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:36.949307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:36.949312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:36.949316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:36.949586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.949598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.949602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:36.949607Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:36.949611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.949622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 0 2024-11-21T17:54:37.559749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Source { RawX1: 429 RawX2: 8589936986 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2024-11-21T17:54:37.559757Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:54:37.559765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 429 RawX2: 8589936986 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2024-11-21T17:54:37.559776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.559780Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.559805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2024-11-21T17:54:37.559810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:54:37.559815Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 129 -> 240 2024-11-21T17:54:37.560321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.560363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.560369Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0ProgressState, operation type TxCopyTable 2024-11-21T17:54:37.560377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 106:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:54:37.560381Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 106, done: 0, blocked: 1 2024-11-21T17:54:37.560391Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T17:54:37.560395Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 240 -> 240 2024-11-21T17:54:37.560773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.560781Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2024-11-21T17:54:37.560791Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2024-11-21T17:54:37.560794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T17:54:37.560797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2024-11-21T17:54:37.560809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:652:2566] message: TxId: 106 2024-11-21T17:54:37.560814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T17:54:37.560821Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2024-11-21T17:54:37.560823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2024-11-21T17:54:37.560845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:54:37.560848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:37.561112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T17:54:37.561119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:809:2712] TestWaitNotification: OK eventTxId 106 2024-11-21T17:54:37.561220Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:37.561265Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 52us result status StatusSuccess 2024-11-21T17:54:37.561346Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:37.561402Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:37.561412Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 11us result status StatusSuccess 2024-11-21T17:54:37.561443Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:37.561492Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:37.561504Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 14us result status StatusSuccess 2024-11-21T17:54:37.561543Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbLogStore::AlterLogTable [FAIL] >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] >> TSchemeShardSubDomainTest::CreateForceDropSolomon >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] >> test.py::test[sampling-direct_read--Debug] [GOOD] >> test.py::test[sampling-direct_read--Plan] [GOOD] >> test.py::test[sampling-direct_read--Results] >> test.py::test[window-leading/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Analyze] >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe >> SystemView::CollectPreparedQueries [GOOD] >> SystemView::CollectScanQueries >> test.py::test[key_filter-dict_contains_optional--Debug] [GOOD] >> test.py::test[key_filter-dict_contains_optional--ForceBlocks] >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::Delete >> TSchemeShardSubDomainTest::LS ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:38.060816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:38.060838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.060842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:38.060846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:38.060860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:38.060863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:38.060870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.060933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:38.067946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:38.067962Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:38.069780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:38.070219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:38.070245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:38.071156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:38.071318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:38.071387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.071450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:38.072092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.072376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.072389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.072425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:38.072433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.072439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:38.072450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.073251Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:38.083489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:38.083564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.083611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:38.083665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:38.083670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.084120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.084138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:38.084163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.084170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:38.084172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:38.084175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:38.084514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.084521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:38.084525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:38.084800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.084804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.084808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.084812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.085172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:38.085441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:38.085473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:38.085592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.085606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.085613Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.085650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:38.085653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.085674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.085681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:38.085982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.085987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.086017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.086020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:38.086083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.086087Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:38.086095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:38.086098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.086102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:38.086105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.086108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:38.086110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:38.086117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.086120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:38.086123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:38.086307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.086315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.086319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:38.086321Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:38.086324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.086334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... xId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.094654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropSubdomain TPropose operationId#101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:54:38.094661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.094664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:38.094677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2024-11-21T17:54:38.094690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.094695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:38.094827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:38.095009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:54:38.095228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.095234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.095262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:38.095285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.095290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:54:38.095294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:54:38.095344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.095351Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2024-11-21T17:54:38.095358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:54:38.095362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:38.095367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:54:38.095370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:38.095374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:54:38.095378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:54:38.095393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:38.095397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:54:38.095401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:54:38.095404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:54:38.095520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:38.095528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:38.095532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:38.095536Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:54:38.095540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.095630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:38.095639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:38.095642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:38.095644Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:38.095646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:38.095653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:54:38.095811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.095815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.095827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:38.095879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.095883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.095889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.096180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:38.096594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:38.096617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:38.096625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T17:54:38.096661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:38.096666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:54:38.096710Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:38.096722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:38.096725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:336:2328] TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:38.096782Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.096802Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 40us result status StatusPathDoesNotExist 2024-11-21T17:54:38.096833Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:38.096883Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.096895Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 12us result status StatusSuccess 2024-11-21T17:54:38.096932Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:38.095909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:38.095929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.095933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:38.095937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:38.095949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:38.095951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:38.095958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.096017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:38.103525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:38.103542Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:38.105577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:38.106080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:38.106105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:38.107175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:38.107321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:38.107386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.107444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:38.108157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.108380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.108388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.108412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:38.108418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.108422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:38.108430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.109324Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:38.121332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:38.121392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.121442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:38.121500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:38.121505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.122049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.122067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:38.122094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.122100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:38.122103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:38.122106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:38.122362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.122368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:38.122370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:38.122624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.122632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.122639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.122645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.123067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:38.123364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:38.123401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:38.123534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.123549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.123553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.123590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:38.123595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.123619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.123627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:38.123894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.123899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.123932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.123937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:38.124023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.124030Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:38.124042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:38.124045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.124051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:38.124056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.124061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:38.124066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:38.124076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.124082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:38.124086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:38.124373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.124388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.124391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:38.124394Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:38.124398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.124407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:38.131406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:38.131408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:38.131410Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:54:38.131412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:38.131419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T17:54:38.131513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T17:54:38.131532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2024-11-21T17:54:38.131701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.131716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.131721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropSubdomain TPropose operationId#101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T17:54:38.131729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.131731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:38.131745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2024-11-21T17:54:38.131759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.131764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:38.131936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:38.131986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T17:54:38.132160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.132163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.132176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:38.132189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.132192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T17:54:38.132194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T17:54:38.132250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.132258Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2024-11-21T17:54:38.132265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T17:54:38.132268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:38.132271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T17:54:38.132274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T17:54:38.132277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T17:54:38.132279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T17:54:38.132287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T17:54:38.132290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T17:54:38.132294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:54:38.132296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:54:38.132348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:38.132354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:38.132356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:38.132358Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:54:38.132361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.132416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:38.132422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:38.132424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:38.132426Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:38.132428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:38.132434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:54:38.132454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.132457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.132466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:38.132498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.132501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.132505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.132854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:38.133017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:38.133028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:38.133037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T17:54:38.133064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:38.133067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:54:38.133104Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:38.133113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:38.133116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:337:2329] TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:38.133156Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.133170Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 21us result status StatusPathDoesNotExist 2024-11-21T17:54:38.133193Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[pg-tpch-q16-default.txt-Results] [GOOD] >> test.py::test[pg-unknown-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:38.039587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:38.039604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.039608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:38.039612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:38.039625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:38.039628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:38.039634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.039690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:38.046538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:38.046552Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:38.048458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:38.048919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:38.048941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:38.049815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:38.049941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:38.050004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.050080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:38.050823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.051030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.051037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.051067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:38.051074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.051079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:38.051095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.051942Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:38.064400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:38.064468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.064521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:38.064576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:38.064581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.065339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.065355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:38.065390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.065396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:38.065399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:38.065402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:38.065672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.065678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:38.065680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:38.065908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.065913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.065917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.065922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.066305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:38.066606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:38.066644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:38.066770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.066788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.066793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.066829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:38.066833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.066853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.066861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:38.067232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.067241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.067287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.067291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:38.067355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.067360Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:38.067368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:38.067371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.067375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:38.067378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.067381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:38.067385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:38.067395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.067399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:38.067402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:38.067640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.067649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.067652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:38.067655Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:38.067660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.067668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 1T17:54:38.094316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:38.094403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T17:54:38.094757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.094766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.094785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:38.094801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.094804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T17:54:38.094806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T17:54:38.094813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.094818Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2024-11-21T17:54:38.094823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T17:54:38.094825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:54:38.094828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T17:54:38.094831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T17:54:38.094835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T17:54:38.094838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T17:54:38.094865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T17:54:38.094871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T17:54:38.094874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:54:38.094879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:54:38.095022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:38.095030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:38.095033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:54:38.095036Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:54:38.095038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.095240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:38.095247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T17:54:38.095250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T17:54:38.095252Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:38.095254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:38.095260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T17:54:38.095539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.095546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.095549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.095744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:38.095784Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:54:38.095820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.095881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2024-11-21T17:54:38.096118Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:54:38.096141Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:54:38.096159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:38.096180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2024-11-21T17:54:38.096308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:38.096322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:38.096367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.096370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.096382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409547 2024-11-21T17:54:38.096440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.096443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.096449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.096505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:38.096705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:38.096711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:38.096957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:38.096961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:38.096968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:38.096971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:38.096989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:38.096996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T17:54:38.097026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:54:38.097029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:54:38.097066Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:54:38.097076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:54:38.097079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:520:2475] TestWaitNotification: OK eventTxId 102 2024-11-21T17:54:38.097120Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.097139Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 26us result status StatusPathDoesNotExist 2024-11-21T17:54:38.097168Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:38.078949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:38.078965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.078969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:38.078973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:38.078985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:38.078987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:38.078993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.079044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:38.085832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:38.085845Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:38.087626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:38.088142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:38.088165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:38.089230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:38.089374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:38.089446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.089516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:38.090253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.090434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.090440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.090463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:38.090467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.090471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:38.090479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.091535Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:38.102906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:38.102972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.103025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:38.103082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:38.103087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.103611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.103631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:38.103663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.103671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:38.103674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:38.103677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:38.103974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.103983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:38.103999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:38.104224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.104251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.104258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.104262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.104679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:38.104986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:38.105024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:38.105147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.105165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.105172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.105210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:38.105214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.105234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.105242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:38.105517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.105521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.105552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.105555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:38.105623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.105627Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:38.105636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:38.105639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.105642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:38.105646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.105649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:38.105651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:38.105659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.105663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:38.105666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:38.105874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.105883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.105887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:38.105890Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:38.105893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.105918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... opose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:38.166467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T17:54:38.166484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T17:54:38.166559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.166583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.166588Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:38.166635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T17:54:38.166639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:38.166658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.166664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:38.166669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T17:54:38.167002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.167010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.167037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:38.167049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.167052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:332:2310], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T17:54:38.167055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:332:2310], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T17:54:38.167115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.167120Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T17:54:38.167128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T17:54:38.167130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:38.167135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T17:54:38.167139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:38.167144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T17:54:38.167147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T17:54:38.167172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:38.167176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T17:54:38.167178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:54:38.167180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:54:38.167272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:38.167279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:38.167282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:38.167285Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:54:38.167288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.167357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:38.167363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:38.167365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:38.167367Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:54:38.167369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:38.167374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T17:54:38.167856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:38.167904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T17:54:38.167939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:54:38.167955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T17:54:38.167995Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:54:38.168006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:38.168009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:476:2427] TestWaitNotification: OK eventTxId 100 2024-11-21T17:54:38.168058Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.168080Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 30us result status StatusSuccess 2024-11-21T17:54:38.168141Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.168181Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.168189Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 9us result status StatusSuccess 2024-11-21T17:54:38.168215Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:38.214381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:38.214398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.214401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:38.214404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:38.214413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:38.214416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:38.214421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.214472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:38.221424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:38.221441Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:38.223302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:38.223793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:38.223816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:38.224830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:38.224954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:38.225015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.225074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:38.225750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.225945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.225950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.225975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:38.225979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.225983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:38.225991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.226858Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:38.237470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:38.237542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.237589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:38.237643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:38.237650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.238255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.238273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:38.238307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.238313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:38.238316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:38.238319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:38.238589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.238597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:38.238601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:38.238893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.238899Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.238903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.238907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.239287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:38.239593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:38.239629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:38.239763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.239778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.239783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.239816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:38.239821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.239841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.239850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:38.240143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.240148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.240180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.240184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:38.240273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.240280Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:38.240290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:38.240293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.240297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:38.240300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.240302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:38.240305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:38.240312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.240316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:38.240318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:38.240540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.240549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.240552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:38.240556Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:38.240559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.240567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... HARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 Forgetting tablet 72075186233409546 2024-11-21T17:54:38.354244Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2024-11-21T17:54:38.354296Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:54:38.354472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:38.354504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 2024-11-21T17:54:38.354757Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T17:54:38.354900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T17:54:38.354951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2024-11-21T17:54:38.355253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:38.355294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2024-11-21T17:54:38.355493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T17:54:38.355890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:54:38.355946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:38.356131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.356142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.356170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:38.356411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T17:54:38.356431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T17:54:38.356476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2024-11-21T17:54:38.356482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2024-11-21T17:54:38.357198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:38.357217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:38.357247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:38.357269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:38.357275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:38.357367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.357377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.357400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.357532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T17:54:38.357540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T17:54:38.357555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:38.357559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:38.357874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:54:38.357889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:54:38.357941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:38.358347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2024-11-21T17:54:38.358414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:38.358424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T17:54:38.358443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T17:54:38.358448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T17:54:38.358543Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:38.358587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:38.358595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:727:2613] 2024-11-21T17:54:38.358618Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T17:54:38.358634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T17:54:38.358638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:727:2613] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2024-11-21T17:54:38.358702Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.358735Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 43us result status StatusPathDoesNotExist 2024-11-21T17:54:38.358773Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:38.358827Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.358839Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 15us result status StatusPathDoesNotExist 2024-11-21T17:54:38.358853Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:38.358888Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.358908Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 21us result status StatusSuccess 2024-11-21T17:54:38.358963Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects >> TSchemeShardSubDomainTest::Delete [GOOD] >> TSchemeShardSubDomainTest::Redefine >> TSchemeShardSubDomainTest::LS [GOOD] >> test.py::test[view-trivial_view--Debug] [GOOD] >> test.py::test[view-trivial_view--Plan] [GOOD] >> test.py::test[view-trivial_view--Results] >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] >> SystemView::CollectScanQueries [GOOD] >> test.py::test[sampling-direct_read--Results] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Delete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:38.508034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:38.508052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.508055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:38.508058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:38.508068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:38.508070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:38.508076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.508128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:38.515027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:38.515040Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:38.516866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:38.517343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:38.517367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:38.518293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:38.518419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:38.518480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.518536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:38.519201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.519401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.519407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.519432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:38.519437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.519440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:38.519449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.520262Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:38.530615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:38.530671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.530718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:38.530766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:38.530771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.531339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.531360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:38.531391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.531398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:38.531401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:38.531404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:38.531701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.531710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:38.531713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:38.531933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.531938Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.531942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.531946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.532348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:38.532609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:38.532643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:38.532753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.532768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.532772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.532804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:38.532808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.532828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.532836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:38.533098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.533102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.533131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.533134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:38.533194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.533199Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:38.533206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:38.533210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.533214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:38.533217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.533219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:38.533221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:38.533227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.533231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:38.533233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:38.533419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.533427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.533430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:38.533433Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:38.533436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.533443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 2024-11-21T17:54:38.553415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T17:54:38.553419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T17:54:38.553422Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:38.553425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:38.553431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T17:54:38.553736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.553743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.553745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.553808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:38.553969Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:54:38.563246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.563310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:38.563369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T17:54:38.563399Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:54:38.563472Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2024-11-21T17:54:38.563653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:38.563680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2024-11-21T17:54:38.563750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:38.563768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:38.563942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.563950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.563969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:38.564000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.564002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.564010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.564437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:38.564445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:38.564493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:38.564495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:38.564500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:38.564503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:38.564771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:38.564788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T17:54:38.564833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T17:54:38.564839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T17:54:38.564891Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T17:54:38.564904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T17:54:38.564907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:492:2447] TestWaitNotification: OK eventTxId 101 2024-11-21T17:54:38.564967Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.564998Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 43us result status StatusPathDoesNotExist 2024-11-21T17:54:38.565032Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:38.565073Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.565085Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 13us result status StatusSuccess 2024-11-21T17:54:38.565127Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T17:54:38.565172Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:54:38.565189Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:54:38.565194Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2024-11-21T17:54:38.565228Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.565236Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 9us result status StatusSuccess 2024-11-21T17:54:38.565256Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:38.549553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:38.549570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.549573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:38.549576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:38.549585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:38.549588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:38.549593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.549644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:38.556181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:38.556193Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:38.557863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:38.558364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:38.558383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:38.559312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:38.559435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:38.559496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.559550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:38.560148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.560335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.560342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.560362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:38.560367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.560371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:38.560380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.561123Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:38.572552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:38.572613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.572660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:38.572711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:38.572715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.573309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.573325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:38.573353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.573360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:38.573363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:38.573366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:38.573623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.573628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:38.573631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:38.573833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.573837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.573841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.573845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.574233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:38.574564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:38.574619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:38.574804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.574824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.574832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.574878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:38.574885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.574910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.574921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:38.575249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.575255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.575290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.575294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:38.575370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.575376Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:38.575386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:38.575390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.575395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:38.575400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.575405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:38.575409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:38.575418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.575423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:38.575427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:38.575710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.575721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.575725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:38.575729Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:38.575733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.575746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:38.589146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T17:54:38.589162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T17:54:38.589205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.589218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.589222Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:38.589282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T17:54:38.589289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet 72057594046678944 2024-11-21T17:54:38.589309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.589315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:38.589320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T17:54:38.589600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.589605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.589627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:38.589638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.589641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T17:54:38.589644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T17:54:38.589650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.589654Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T17:54:38.589661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T17:54:38.589664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:38.589668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T17:54:38.589671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T17:54:38.589674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T17:54:38.589676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T17:54:38.589696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T17:54:38.589699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T17:54:38.589702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T17:54:38.589704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T17:54:38.589843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:38.589851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:38.589853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:38.589856Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T17:54:38.589859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.589897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:38.589902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T17:54:38.589904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T17:54:38.589908Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T17:54:38.589910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:38.589915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T17:54:38.590442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T17:54:38.590538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T17:54:38.590602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T17:54:38.590618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T17:54:38.590657Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T17:54:38.590670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T17:54:38.590673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:452:2407] TestWaitNotification: OK eventTxId 100 2024-11-21T17:54:38.590716Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.590750Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 30us result status StatusSuccess 2024-11-21T17:54:38.590835Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.590901Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.590913Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 13us result status StatusSuccess 2024-11-21T17:54:38.590944Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:38.275209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:38.275231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.275235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:38.275239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:38.275252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:38.275255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:38.275261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.275319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:38.282106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:38.282122Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:38.284052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:38.284543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:38.284569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:38.285643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:38.285769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:38.285835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.285899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:38.287081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.287405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.287418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.287457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:38.287464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.287470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:38.287484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.288318Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:38.298052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:38.298107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.298154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:38.298207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:38.298212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.298716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.298733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:38.298760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.298766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:38.298769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:38.298772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:38.299075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.299081Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:38.299084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:38.299297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.299302Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.299306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.299310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.299739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:38.300038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:38.300074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:38.300193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.300208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.300213Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.300262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:38.300267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.300288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.300296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:38.300570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.300574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.300602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.300605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:38.300665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.300669Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:38.300677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:38.300680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.300684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:38.300687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.300690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:38.300692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:38.300698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.300703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:38.300706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:38.300891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.300898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.300902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:38.300904Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:38.300907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.300915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... SHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:39 tabletId 72075186233409584 2024-11-21T17:54:38.469069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2024-11-21T17:54:38.469072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2024-11-21T17:54:38.469087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2024-11-21T17:54:38.469090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2024-11-21T17:54:38.469100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2024-11-21T17:54:38.469104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2024-11-21T17:54:38.469116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2024-11-21T17:54:38.469119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2024-11-21T17:54:38.469126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2024-11-21T17:54:38.469129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2024-11-21T17:54:38.469137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2024-11-21T17:54:38.469141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2024-11-21T17:54:38.469855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2024-11-21T17:54:38.469867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2024-11-21T17:54:38.469880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:38.469884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:38.469896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2024-11-21T17:54:38.469900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2024-11-21T17:54:38.469913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:8 2024-11-21T17:54:38.469916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:8 tabletId 72075186233409553 2024-11-21T17:54:38.469925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:12 2024-11-21T17:54:38.469929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:12 tabletId 72075186233409557 2024-11-21T17:54:38.470064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2024-11-21T17:54:38.470068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2024-11-21T17:54:38.470160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2024-11-21T17:54:38.470164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409561 2024-11-21T17:54:38.470420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:25 2024-11-21T17:54:38.470427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:25 tabletId 72075186233409570 2024-11-21T17:54:38.471033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:20 2024-11-21T17:54:38.471045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:20 tabletId 72075186233409565 2024-11-21T17:54:38.471064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:29 2024-11-21T17:54:38.471067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:29 tabletId 72075186233409574 2024-11-21T17:54:38.471077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:33 2024-11-21T17:54:38.471083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:33 tabletId 72075186233409578 2024-11-21T17:54:38.471100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:38.471103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:38.471115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:37 2024-11-21T17:54:38.471119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:37 tabletId 72075186233409582 2024-11-21T17:54:38.471135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2024-11-21T17:54:38.471139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2024-11-21T17:54:38.471151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T17:54:38.471155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T17:54:38.471167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2024-11-21T17:54:38.471170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2024-11-21T17:54:38.471294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2024-11-21T17:54:38.471302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2024-11-21T17:54:38.471312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2024-11-21T17:54:38.471315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2024-11-21T17:54:38.471327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2024-11-21T17:54:38.471330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2024-11-21T17:54:38.471339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2024-11-21T17:54:38.471343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2024-11-21T17:54:38.471350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2024-11-21T17:54:38.471353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2024-11-21T17:54:38.471365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2024-11-21T17:54:38.471369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2024-11-21T17:54:38.471951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:38.471961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:38.471977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2024-11-21T17:54:38.471983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2024-11-21T17:54:38.471996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:38.472022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.472027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.472042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.472103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:38.472489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T17:54:38.472547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:54:38.472554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:54:38.472619Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:54:38.472640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:54:38.472645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:2056:3659] TestWaitNotification: OK eventTxId 103 2024-11-21T17:54:38.472718Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.472750Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 43us result status StatusPathDoesNotExist 2024-11-21T17:54:38.472795Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:38.472868Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.472885Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 19us result status StatusPathDoesNotExist 2024-11-21T17:54:38.472901Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots >> TSchemeShardSubDomainTest::Redefine [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Analyze] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Debug] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:37.646822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:37.646841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:37.646844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:37.646847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:37.646857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:37.646859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:37.646865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:37.646929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:37.653526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:37.653541Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:37.655357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:37.655814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:37.655835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:37.656835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:37.656975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:37.657038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.657101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:37.658033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.658234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:37.658242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.658267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:37.658271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:37.658276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:37.658285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.659104Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:37.668889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:37.668964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.669023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:37.669077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:37.669081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.670035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.670057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:37.670089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.670096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:37.670098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:37.670101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:37.670541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.670555Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:37.670559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:37.670894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.670901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.670904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:37.670909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:37.671328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:37.671693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:37.671735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:37.671864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.671882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:37.671887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:37.671925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:37.671929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:37.671953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:37.671963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:37.672320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:37.672326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:37.672363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.672367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:37.672433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.672438Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:37.672446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:37.672449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:37.672452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:37.672456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:37.672458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:37.672461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:37.672469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:37.672473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:37.672475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:37.672709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:37.672721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:37.672725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:37.672729Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:37.672733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:37.672744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... nerId: 72057594046678944, cookie: 139 2024-11-21T17:54:38.735630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 139 2024-11-21T17:54:38.735633Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 16], version: 18446744073709551615 2024-11-21T17:54:38.735635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 4 2024-11-21T17:54:38.735838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2024-11-21T17:54:38.735846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2024-11-21T17:54:38.735848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 139 2024-11-21T17:54:38.735850Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:38.735855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:38.735861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 139, subscribers: 0 2024-11-21T17:54:38.736073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.736079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:15 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.736081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:14 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.736083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.736085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:16 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.736171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2024-11-21T17:54:38.736258Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:54:38.740549Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 15 TabletID: 72075186233409556 2024-11-21T17:54:38.740706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.740752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:38.740857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 15 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2024-11-21T17:54:38.740875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409556 2024-11-21T17:54:38.741027Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 14 TabletID: 72075186233409555 2024-11-21T17:54:38.751001Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:54:38.751066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 14 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2024-11-21T17:54:38.751127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 2024-11-21T17:54:38.751203Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 16 TabletID: 72075186233409557 2024-11-21T17:54:38.751288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:38.751305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409555 Forgetting tablet 72075186233409547 2024-11-21T17:54:38.751510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 16 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2024-11-21T17:54:38.751533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 Forgetting tablet 72075186233409557 2024-11-21T17:54:38.751649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2024-11-21T17:54:38.751908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.751920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2024-11-21T17:54:38.751932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:38.751983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2024-11-21T17:54:38.752010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.752014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.752028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:38.752135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:38.752142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:38.752154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2024-11-21T17:54:38.752157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2024-11-21T17:54:38.752580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:14 2024-11-21T17:54:38.752588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2024-11-21T17:54:38.752603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:38.752606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:38.752611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2024-11-21T17:54:38.752615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2024-11-21T17:54:38.752630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:38.752653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:38.752661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.752664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.752673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.752935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2024-11-21T17:54:38.753050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2024-11-21T17:54:38.753053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2024-11-21T17:54:38.753134Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2024-11-21T17:54:38.753147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2024-11-21T17:54:38.753149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2095:3892] TestWaitNotification: OK eventTxId 139 2024-11-21T17:54:38.753263Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.753284Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 30us result status StatusSuccess 2024-11-21T17:54:38.753348Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:38.790472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:38.790490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.790493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:38.790496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:38.790506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:38.790509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:38.790514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.790599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:38.797585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:38.797603Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:38.799735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:38.800214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:38.800259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:38.801263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:38.801398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:38.801466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.801532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:38.802272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.802495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.802503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.802531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:38.802536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.802540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:38.802550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.803511Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:38.814231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:38.814298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.814348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:38.814399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:38.814404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.814892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.814911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:38.814935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.814941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:38.814944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:38.814947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:38.815180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.815186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:38.815188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:38.815397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.815403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.815406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.815410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.815796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:38.816057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:38.816090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:38.816206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.816221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.816225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.816275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:38.816279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.816298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.816305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:38.816560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.816565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.816595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.816598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:38.816656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.816660Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:38.816668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:38.816671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.816674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:38.816677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.816680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:38.816682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:38.816688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.816692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:38.816694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:38.816880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.816887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.816890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:38.816893Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:38.816896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.816904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... on IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:54:38.845071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T17:54:38.845075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:54:38.845079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:54:38.845083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:54:38.845102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T17:54:38.845106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2024-11-21T17:54:38.845108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T17:54:38.845112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T17:54:38.845189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:54:38.845196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:54:38.845198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:54:38.845201Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T17:54:38.845203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.845259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:54:38.845264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T17:54:38.845266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T17:54:38.845268Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:38.845270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:38.845275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2024-11-21T17:54:38.845513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.845519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.845521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:38.845755Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:54:38.845907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.845941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:38.845991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:54:38.846040Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:54:38.846092Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:54:38.846110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T17:54:38.846187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:38.846204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2024-11-21T17:54:38.846321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:54:38.846335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:54:38.846399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.846403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.846417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:54:38.846557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:38.846561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:54:38.846568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.846705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:38.846711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:38.846901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:38.846906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:38.846939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:54:38.846943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:54:38.847120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:38.847127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T17:54:38.847161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T17:54:38.847165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T17:54:38.847206Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T17:54:38.847217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:54:38.847220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:572:2527] TestWaitNotification: OK eventTxId 104 2024-11-21T17:54:38.847269Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.847289Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 26us result status StatusPathDoesNotExist 2024-11-21T17:54:38.847317Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:38.847352Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:38.847363Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 11us result status StatusSuccess 2024-11-21T17:54:38.847398Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> test.py::test[pg-unknown-default.txt-Debug] [GOOD] >> test.py::test[pg-unknown-default.txt-Plan] [GOOD] >> test.py::test[pg-unknown-default.txt-Results] >> test.py::test[key_filter-dict_contains_optional--ForceBlocks] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Plan] [GOOD] >> test.py::test[key_filter-dict_contains_optional--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> SystemView::CollectScanQueries [GOOD] Test command err: 2024-11-21T17:51:36.118883Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791726112791271:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:36.118983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001972/r3tmp/tmpibAgao/pdisk_1.dat 2024-11-21T17:51:36.180015Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14885, node 1 2024-11-21T17:51:36.196836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:36.196849Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:36.196850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:36.196879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:51:36.218240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.218268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:36.220070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:36.252047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.266341Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:51:36.291648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:36.297749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.297774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.298704Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:36.298896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:51:36.299575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.299593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.300720Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:51:36.301562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:36.308493Z node 2 :SYSTEM_VIEWS INFO: [72075186224037895] OnActivateExecutor 2024-11-21T17:51:36.308512Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInitSchema::Execute 2024-11-21T17:51:36.309053Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:36.309076Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2024-11-21T17:51:36.313352Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInitSchema::Complete 2024-11-21T17:51:36.313373Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInit::Execute 2024-11-21T17:51:36.313474Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2024-11-21T17:51:36.313484Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading interval metrics: query count# 0 2024-11-21T17:51:36.313489Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading interval query tops: total query count# 0 2024-11-21T17:51:36.313493Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading nodes to request: nodes count# 0, hashes count# 0 2024-11-21T17:51:36.313498Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 6, result count# 0 2024-11-21T17:51:36.313500Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 7, result count# 0 2024-11-21T17:51:36.313504Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 8, result count# 0 2024-11-21T17:51:36.313507Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 9, result count# 0 2024-11-21T17:51:36.313513Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 10, result count# 0 2024-11-21T17:51:36.313515Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 11, result count# 0 2024-11-21T17:51:36.313518Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 12, result count# 0 2024-11-21T17:51:36.313520Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 13, result count# 0 2024-11-21T17:51:36.313527Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 14, result count# 0 2024-11-21T17:51:36.313530Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 15, result count# 0 2024-11-21T17:51:36.313534Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 16, partCount count# 0 2024-11-21T17:51:36.313537Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 19, partCount count# 0 2024-11-21T17:51:36.313541Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 17, result count# 0 2024-11-21T17:51:36.313544Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 18, result count# 0 2024-11-21T17:51:36.313556Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Reset: interval end# 2024-11-21T17:51:36.000000Z 2024-11-21T17:51:36.318482Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [2:7439791727855888974:2055], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2024-11-21T17:51:36.319340Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInit::Complete 2024-11-21T17:51:36.319374Z node 2 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [2:7439791727855888974:2055], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/PQ 2024-11-21T17:51:36.319999Z node 2 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [2:7439791727855888974:2055], database# /Root/PQ, no sysview processor 2024-11-21T17:51:36.352549Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxConfigure::Execute: database# /Root/PQ 2024-11-21T17:51:36.354083Z node 2 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037895 2024-11-21T17:51:36.355544Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxConfigure::Complete iteration 0 2024-11-21T17:51:36.502197Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxAggregate::Execute 2024-11-21T17:51:36.502222Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryResults: interval end# 2024-11-21T17:51:36.000000Z, query count# 0 2024-11-21T17:51:36.502228Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 8, interval end# 2024-11-21T17:51:36.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:36.502231Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 10, interval end# 2024-11-21T17:51:36.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:36.502234Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 12, interval end# 2024-11-21T17:51:36.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:36.502236Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 14, interval end# 2024-11-21T17:51:36.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:36.502239Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 9, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:36.502241Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 11, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:36.502244Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 13, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:36.502246Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 15, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:36.503495Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxAggregate::Complete 2024-11-21T17:51:37.049093Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxCollect::Execute 2024-11-21T17:51:37.049117Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistPartitionTopResults: table id# 17, partition interval end# 2024-11-21T17:51:37.000000Z, partition count# 0 2024-11-21T17:51:37.049122Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistPartitionTopResults: table id# 18, partition interval end# 2024-11-21T18:00:00.000000Z, partition count# 0 2024-11-21T17:51:37.049143Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] Reset: interval end# 2024-11-21T17:51:37.000000Z 2024-11-21T17:51:37.050920Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxCollect::Complete 2024-11-21T17:51:37.309158Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2024-11-21T17:51:37.533439Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxAggregate::Execute 2024-11-21T17:51:37.533463Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryResults: interval end# 2024-11-21T17:51:37.000000Z, query count# 0 2024-11-21T17:51:37.533482Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 8, interval end# 2024-11-21T17:51:37.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:37.533486Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 10, interval end# 2024-11-21T17:51:37.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:37.533489Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 12, interval end# 2024-11-21T17:51:37.000000Z, query count# 0, persisted# 0 2024-11-21T17:51:37.533491Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 14, interval end# 2024-11-21T17:51:37.000000Z, query count# 0, persisted# 0 20 ... ription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:38.034041Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:38.034079Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:38.035120Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:38.035803Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:38.037772Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:38.150982Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439792507617274534:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:38.150998Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439792507617274526:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:38.151012Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:38.151518Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:54:38.152809Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439792507617274540:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:54:38.257690Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xs6y642v4j7hfv2kgesbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MmMzOTc4MTQtYTZiOWM0YzAtNDQyMGU1Y2ItY2I2ZjhkZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:38.270226Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xs71xfzxscjszq5kwsmb6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Mzc3YjYxMzItNTZhYTk2OGItYjc3OGZiMjItNGY1ZDNhYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:38.282627Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd7xs7208em1w9szws1hdr6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliYTZjNzAtYzJkYWY3NzMtMjAzODRkZTQtZTJmMzA4NGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:38.283154Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439792507617274683:2333], owner: [7:7439792507617274679:2331], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T17:54:38.283280Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439792507617274683:2333], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:54:38.283367Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439792507617274683:2333], row count: 2, finished: 1 2024-11-21T17:54:38.283375Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439792507617274683:2333], owner: [7:7439792507617274679:2331], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T17:54:38.283858Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211678282, txId: 281474976715663] shutting down 2024-11-21T17:54:38.392778Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439792510294632537:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:38.392906Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001972/r3tmp/tmpGy2ljh/pdisk_1.dat 2024-11-21T17:54:38.398960Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10949, node 8 2024-11-21T17:54:38.412630Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:38.412651Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:38.412652Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:38.412684Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:38.493374Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:38.493409Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:38.494474Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:38.495066Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:38.496958Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:38.608588Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439792510294633208:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:38.608594Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439792510294633218:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:38.608609Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:38.609136Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T17:54:38.610485Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439792510294633222:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:54:38.702202Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xs7cg4b4syp1xvan6spzt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ODNjZWRhNzctODAwNjIwZTUtN2EyMzE3ZC01NTg2MTdlYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:38.713496Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd7xs7fhcr1tvt68wbe9a01p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=NmM5ZDdjMGUtNWQyZTdmYTAtYTViNDk4ZGUtZWRhMWMxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:38.714647Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211678757, txId: 281474976715662] shutting down 2024-11-21T17:54:38.726606Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd7xs7fv0g1rpagvamtkrbva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=NWViOGViYTEtNTVmZmUwZjAtMjVkOThmMzktNDkwY2QxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:38.727060Z node 8 :SYSTEM_VIEWS INFO: Scan started, actor: [8:7439792510294633387:2333], owner: [8:7439792510294633383:2331], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T17:54:38.727203Z node 8 :SYSTEM_VIEWS INFO: Scan prepared, actor: [8:7439792510294633387:2333], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T17:54:38.727279Z node 8 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [8:7439792510294633387:2333], row count: 2, finished: 1 2024-11-21T17:54:38.727288Z node 8 :SYSTEM_VIEWS INFO: Scan finished, actor: [8:7439792510294633387:2333], owner: [8:7439792510294633383:2331], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T17:54:38.727823Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732211678726, txId: 281474976715664] shutting down >> test.py::test[view-trivial_view--Results] [GOOD] >> test.py::test[view-view_with_lambda_process--Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:38.437755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:38.437779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.437784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:38.437789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:38.437803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:38.437807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:38.437816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:38.437893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:38.448530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:38.448546Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:38.451021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:38.451729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:38.451756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:38.452995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:38.453200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:38.453285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.453360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:38.454314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.454562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.454584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.454619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:38.454626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.454633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:38.454645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.455793Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:38.467595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:38.467668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.467731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:38.467798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:38.467806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.468464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.468486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:38.468525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.468534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:38.468539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:38.468543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:38.469073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.469083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:38.469088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:38.469413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.469422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.469427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.469435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.470018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:38.470363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:38.470406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:38.470585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:38.470608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:38.470616Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.470663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:38.470669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:38.470694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.470705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:38.471092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:38.471100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:38.471140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:38.471146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:38.471226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:38.471233Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:38.471243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:38.471247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.471252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:38.471258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:38.471262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:38.471266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:38.471275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:38.471281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:38.471285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:38.471576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.471588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:38.471592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:38.471597Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:38.471601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:38.471614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:39.072685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2024-11-21T17:54:39.072719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 108 at step: 5000004 2024-11-21T17:54:39.072782Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:39.072795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:39.072799Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TPropose operationId#108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2024-11-21T17:54:39.072832Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 129 2024-11-21T17:54:39.072858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:39.072866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2024-11-21T17:54:39.073209Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:39.073213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:39.073259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:54:39.073275Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:39.073279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:333:2311], at schemeshard: 72057594046678944, txId: 108, path id: 1 2024-11-21T17:54:39.073282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:333:2311], at schemeshard: 72057594046678944, txId: 108, path id: 5 2024-11-21T17:54:39.073289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:54:39.073293Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TProposedWaitParts operationId#108:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:54:39.073298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterOlapStore TProposedWaitParts operationId#108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2024-11-21T17:54:39.073449Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T17:54:39.073456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T17:54:39.073459Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2024-11-21T17:54:39.073462Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T17:54:39.073465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:54:39.073654Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T17:54:39.073661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T17:54:39.073663Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2024-11-21T17:54:39.073665Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2024-11-21T17:54:39.073668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T17:54:39.073674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2024-11-21T17:54:39.073871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2024-11-21T17:54:39.073939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2024-11-21T17:54:39.074129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2024-11-21T17:54:39.085051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409549 TxId: 108 MinStep: 0 Step: 5000004 2024-11-21T17:54:39.085073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2024-11-21T17:54:39.085093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409549 TxId: 108 MinStep: 0 Step: 5000004 2024-11-21T17:54:39.085101Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409549 TxId: 108 MinStep: 0 Step: 5000004 2024-11-21T17:54:39.085191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2024-11-21T17:54:39.085194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2024-11-21T17:54:39.085202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2024-11-21T17:54:39.085211Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 FAKE_COORDINATOR: Erasing txId 108 2024-11-21T17:54:39.085723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:54:39.085954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:54:39.085984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T17:54:39.085991Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2024-11-21T17:54:39.086005Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2024-11-21T17:54:39.086012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T17:54:39.086018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2024-11-21T17:54:39.086032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:485:2436] message: TxId: 108 2024-11-21T17:54:39.086037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T17:54:39.086040Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2024-11-21T17:54:39.086043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2024-11-21T17:54:39.086068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:54:39.086470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T17:54:39.086478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:855:2782] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2024-11-21T17:54:39.087125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:39.087159Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2024-11-21T17:54:39.087230Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:39.087632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:39.087655Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2024-11-21T17:54:39.087718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2024-11-21T17:54:39.087722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2024-11-21T17:54:39.087782Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2024-11-21T17:54:39.087795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2024-11-21T17:54:39.087797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:893:2820] TestWaitNotification: OK eventTxId 109 >> TPersQueueTest::SrcIdCompatibility [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:36.933126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:36.933146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.933149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:36.933152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:36.933162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:36.933164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:36.933170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:36.933231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:36.940040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:36.940059Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:36.941973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:36.942452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:36.942476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:36.943422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:36.943727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:36.943791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.943854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:36.944643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.944847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.944853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.944878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:36.944883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.944887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:36.944896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.945789Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:36.955818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:36.955864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.955899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:36.955948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:36.955952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.956396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.956410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:36.956431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.956436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:36.956438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:36.956441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:36.956666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.956671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:36.956673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:36.956873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.956879Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.956882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.956886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.957247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:36.957509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:36.957538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:36.957637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:36.957651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:36.957655Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.957686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:36.957690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:36.957706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.957715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:36.957952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:36.957956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:36.957980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:36.957983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:36.958037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:36.958041Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:36.958048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:36.958051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.958054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:36.958057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:36.958061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:36.958063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:36.958069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:36.958072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:36.958074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:36.958267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.958274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:36.958277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:36.958280Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:36.958282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:36.958290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... Stats { ShardId: 72075186233409548 CpuTimeUsec: 153 } } 2024-11-21T17:54:39.117548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969695 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2024-11-21T17:54:39.117559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2024-11-21T17:54:39.117571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969695 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2024-11-21T17:54:39.117575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T17:54:39.117640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:39.117645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T17:54:39.117651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:54:39.117654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2024-11-21T17:54:39.117661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T17:54:39.117683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2024-11-21T17:54:39.117697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:39.117705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:39.118126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:39.118355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:39.118397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:39.118402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:39.118447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:39.118475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:39.118480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T17:54:39.118485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T17:54:39.118555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:39.118562Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:54:39.118586Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:39.118589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:54:39.118592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T17:54:39.118711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:54:39.118718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:54:39.118721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:54:39.118724Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2024-11-21T17:54:39.118727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:39.118842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:54:39.118849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:54:39.118851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:54:39.118853Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:54:39.118857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:54:39.118866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2024-11-21T17:54:39.119458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:39.119467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:39.119542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:39.119569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:54:39.119572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:54:39.119575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T17:54:39.119586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:404:2371] message: TxId: 103 2024-11-21T17:54:39.119590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:54:39.119593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:54:39.119597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:54:39.119609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:39.119669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:39.119672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:39.119777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:39.120048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:39.120256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:39.120264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2024-11-21T17:54:39.120281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:54:39.120285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:730:2666] 2024-11-21T17:54:39.120415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2024-11-21T17:54:39.120576Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:39.120603Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 36us result status StatusSuccess 2024-11-21T17:54:39.120669Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[pg-unknown-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_language-default.txt-Debug] >> test.py::test[window-presort_window_order_by_table-default.txt-Debug] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-ForceBlocks] >> test.py::test[key_filter-dict_contains_optional--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Analyze] >> TPersQueueTest::TestReadPartitionStatus [GOOD] >> TPersQueueTest::TxCounters >> test.py::test[view-view_with_lambda_process--Debug] [GOOD] >> test.py::test[view-view_with_lambda_process--Plan] [GOOD] >> test.py::test[view-view_with_lambda_process--Results] >> test.py::test[sampling-join_right_sample-default.txt-Debug] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Plan] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Results] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::SrcIdCompatibility [GOOD] Test command err: === Start server === Server->StartServer(false); 2024-11-21T17:51:57.182005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791816596552001:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:57.182085Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:57.184983Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791815295793746:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/002027/r3tmp/tmp38BXYo/pdisk_1.dat 2024-11-21T17:51:57.207514Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:57.208286Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:57.210162Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:57.234846Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3293, node 1 2024-11-21T17:51:57.246883Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/002027/r3tmp/yandex5yezKU.tmp 2024-11-21T17:51:57.246922Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/002027/r3tmp/yandex5yezKU.tmp 2024-11-21T17:51:57.247009Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/002027/r3tmp/yandex5yezKU.tmp 2024-11-21T17:51:57.247058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:57.250822Z INFO: TTestServer started on Port 6907 GrpcPort 3293 TClient is connected to server localhost:6907 PQClient connected to localhost:3293 === TenantModeEnabled() = 0 === Init PQ - start server on port 3293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:57.282671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.282715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:57.284754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:57.308498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.308530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:57.310509Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:57.316809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:57.321844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:57.321906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.321961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:51:57.322025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:57.322032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.322604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:57.322623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:57.322707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.322714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 waiting... 2024-11-21T17:51:57.322716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2024-11-21T17:51:57.322719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 2024-11-21T17:51:57.323055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.323061Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:57.323063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2024-11-21T17:51:57.323330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.323340Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.323344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:51:57.323349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:51:57.323948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:57.324185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:51:57.324191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2024-11-21T17:51:57.324195Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2024-11-21T17:51:57.324375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2024-11-21T17:51:57.324404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:51:57.327125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211517372, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:57.328517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 7439791816596552541 RawX2: 4294969645 } } Step: 1732211517372 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:51:57.328531Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:51:57.328631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2024-11-21T17:51:57.328639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet 72057594046644480 2024-11-21T17:51:57.328674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:51:57.328694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:51:57.330676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:57.330689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:51:57.330735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:57.330738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439791816596552583:2380], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2024-11-21T17:51:57.330745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.330751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2024-11-21T17:51:57.330766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2024-11-21T17:51:57.330769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:51:57.330774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2024-11-21T17:51:57.330782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2024-11-21T17:51:57.330785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2024-11-21T17:51:57.330787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2024-11-21T17:51:57.330800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:51:57.330804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2024-11-21T17:51:57.330806Z node 1 :FLAT_TX_ ... x_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T17:54:39.326920Z node 27 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [27:7439792512275509059:2658] (SourceId=test-src-id-compat2, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T17:54:39.327200Z node 27 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [27:7439792512275509059:2658] (SourceId=test-src-id-compat2, PreferedPartition=(NULL)) Select from the table 2024-11-21T17:54:39.328920Z node 27 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [27:7439792512275509059:2658] (SourceId=test-src-id-compat2, PreferedPartition=(NULL)) Update the table 2024-11-21T17:54:39.331177Z node 27 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [27:7439792512275509059:2658] (SourceId=test-src-id-compat2, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T17:54:39.331187Z node 27 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [27:7439792512275509059:2658] (SourceId=test-src-id-compat2, PreferedPartition=(NULL)) ReplyResult: Partition=7, SeqNo=0 2024-11-21T17:54:39.331189Z node 27 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [27:7439792512275509059:2658] (SourceId=test-src-id-compat2, PreferedPartition=(NULL)) Start idle 2024-11-21T17:54:39.331193Z node 27 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 5 sessionId: partition: 7 expectedGeneration: (NULL) 2024-11-21T17:54:39.331288Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) TEvClientConnected Status OK, TabletId: 72075186224037910, NodeId 27, Generation: 1 2024-11-21T17:54:39.331299Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:54:39.331303Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [27:7439792512275509082:2658], now have 1 active actors on pipe 2024-11-21T17:54:39.331308Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2024-11-21T17:54:39.331312Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2024-11-21T17:54:39.331330Z node 27 :PERSQUEUE INFO: new Cookie test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0 generated for partition 7 topic 'rt3.dc1--account--topic100' owner test-src-id-compat2 2024-11-21T17:54:39.331350Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 7 2024-11-21T17:54:39.331367Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2024-11-21T17:54:39.331392Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2024-11-21T17:54:39.331397Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2024-11-21T17:54:39.331408Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2024-11-21T17:54:39.331433Z node 27 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 7 MaxSeqNo: 0 sessionId: test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0 2024-11-21T17:54:39.331633Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732211679331 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T17:54:39.331658Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Write session established. Init response: session_id: "test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0" topic: "account/topic100" cluster: "dc1" partition_id: 7 supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T17:54:39.331734Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0] Write 1 messages with Id from 1 to 1 2024-11-21T17:54:39.331820Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0] Write session: try to update token 2024-11-21T17:54:39.331826Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0] Send 1 message(s) (0 left), first sequence number is 1 2024-11-21T17:54:39.331958Z node 27 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:54:39.332024Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T17:54:39.332051Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2024-11-21T17:54:39.332060Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2024-11-21T17:54:39.332078Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 1 2024-11-21T17:54:39.332096Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:54:39.332129Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2024-11-21T17:54:39.332135Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2024-11-21T17:54:39.332144Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message topic: rt3.dc1--account--topic100 partition: 7 SourceId: '\0test-src-id-compat2' SeqNo: 1 partNo : 0 messageNo: 1 size 102 offset: -1 2024-11-21T17:54:39.332175Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob processing sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 2024-11-21T17:54:39.332205Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob complete sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2024-11-21T17:54:39.332251Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic100' partition 7 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000007_00000000000000000000_00000_0000000001_00000| size 177 WTime 1732211679331 2024-11-21T17:54:39.332272Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:54:39.332795Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 2024-11-21T17:54:39.332802Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyWrite. Partition: 7 2024-11-21T17:54:39.332806Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Answering for message sourceid: '\0test-src-id-compat2', Topic: 'rt3.dc1--account--topic100', Partition: 7, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T17:54:39.332812Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 1 requestId: cookie: 1 2024-11-21T17:54:39.332815Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:54:39.332817Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T17:54:39.332822Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:54:39.332834Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 Topic 'rt3.dc1--account--topic100' partition 7 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T17:54:39.332840Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T17:54:39.332842Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T17:54:39.332844Z node 27 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:54:39.332853Z node 27 :PERSQUEUE DEBUG: Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp done, result 1732211679331 queuesize 0 startOffset 0 2024-11-21T17:54:39.332965Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false partition_id: 7 write_statistics { } 2024-11-21T17:54:39.332973Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0] Write session: acknoledged message 1 2024-11-21T17:54:39.333025Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0] Write session: close. Timeout = 0 ms 2024-11-21T17:54:39.333030Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0] Write session will now close 2024-11-21T17:54:39.333033Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0] Write session: aborting 2024-11-21T17:54:39.333119Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:54:39.333122Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0] Write session: destroy 2024-11-21T17:54:39.333213Z node 27 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0 grpc read done: success: 0 data: 2024-11-21T17:54:39.333221Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0 grpc read failed 2024-11-21T17:54:39.333224Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0 grpc closed 2024-11-21T17:54:39.333228Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|65ab8f43-b4c8ee6f-ba60b61c-308c730a_0 is DEAD 2024-11-21T17:54:39.333403Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:54:39.333441Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:54:39.333454Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [27:7439792512275509082:2658] destroyed 2024-11-21T17:54:39.333471Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::DropOwner. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:37.122942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:37.122963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:37.122966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:37.122970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:37.122980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:37.122983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:37.122989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:37.123046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:37.132562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:37.132585Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:37.135279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:37.136006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:37.136051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:37.137337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:37.137494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:37.137582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.137673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:37.138460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.138674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:37.138681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.138708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:37.138712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:37.138716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:37.138725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.139574Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:37.154446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:37.154522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.154600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:37.154670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:37.154677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.155326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.155348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:37.155391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.155399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:37.155404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:37.155408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:37.155712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.155720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:37.155724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:37.155970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.155976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.155982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:37.155987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:37.156556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:37.156943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:37.156990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:37.157156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:37.157179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:37.157186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:37.157235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:37.157241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:37.157268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:37.157279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:37.157638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:37.157645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:37.157681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:37.157686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:37.157760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:37.157766Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:37.157778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:37.157782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:37.157787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:37.157792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:37.157796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:37.157799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:37.157809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:37.157815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:37.157818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:37.158095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:37.158112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:37.158117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:37.158121Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:37.158126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:37.158137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... Status: COMPLETE TxId: 104 Step: 10100 OrderId: 104 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 127 } } 2024-11-21T17:54:40.340717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 522 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2024-11-21T17:54:40.340721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2024-11-21T17:54:40.340730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 522 RawX2: 4294969767 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2024-11-21T17:54:40.340734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2024-11-21T17:54:40.340800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:40.340804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet72075186233409546 2024-11-21T17:54:40.340810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:54:40.340813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2024-11-21T17:54:40.340820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet72075186233409546 2024-11-21T17:54:40.340840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2024-11-21T17:54:40.340855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2024-11-21T17:54:40.340863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2024-11-21T17:54:40.341131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:40.341290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:40.341314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T17:54:40.341317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T17:54:40.341353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2024-11-21T17:54:40.341374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T17:54:40.341377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 104, path id: 1 2024-11-21T17:54:40.341379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 104, path id: 2 2024-11-21T17:54:40.341425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:40.341432Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2024-11-21T17:54:40.341442Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:40.341446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2024-11-21T17:54:40.341449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2024-11-21T17:54:40.341568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T17:54:40.341577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T17:54:40.341580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2024-11-21T17:54:40.341582Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2024-11-21T17:54:40.341586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2024-11-21T17:54:40.341683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T17:54:40.341690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2024-11-21T17:54:40.341692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2024-11-21T17:54:40.341694Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2024-11-21T17:54:40.341697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2024-11-21T17:54:40.341702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T17:54:40.342160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2024-11-21T17:54:40.342167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2024-11-21T17:54:40.342260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2024-11-21T17:54:40.342286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T17:54:40.342288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:54:40.342292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T17:54:40.342301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2494] message: TxId: 104 2024-11-21T17:54:40.342304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T17:54:40.342307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T17:54:40.342311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T17:54:40.342321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2024-11-21T17:54:40.342556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T17:54:40.342570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T17:54:40.342638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2024-11-21T17:54:40.342725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2024-11-21T17:54:40.342988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T17:54:40.342994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:442:2396], at schemeshard: 72075186233409546, txId: 0, path id: 1 2024-11-21T17:54:40.343027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T17:54:40.343031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:751:2671] 2024-11-21T17:54:40.343116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2024-11-21T17:54:40.343243Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2024-11-21T17:54:40.343265Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 27us result status StatusSuccess 2024-11-21T17:54:40.343322Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> test.py::test[pg_catalog-pg_language-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_language-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_language-default.txt-Results] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Analyze] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Debug] >> test.py::test[view-view_with_lambda_process--Results] [GOOD] >> test.py::test[weak_field-weak_field--Debug] >> YdbYqlClient::TestExplicitPartitioning [GOOD] >> test.py::test[pg_catalog-pg_language-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_stat_gssapi-default.txt-Debug] >> test.py::test[window-presort_window_order_by_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Plan] [GOOD] >> test.py::test[window-presort_window_order_by_table-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestExplicitPartitioning [GOOD] Test command err: 2024-11-21T17:54:12.033510Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792398057656567:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:12.033684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001114/r3tmp/tmpEBK2Ey/pdisk_1.dat 2024-11-21T17:54:12.092308Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16839, node 1 2024-11-21T17:54:12.114844Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:12.114856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:12.114858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:12.114893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:54:12.133780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:12.133819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:12.135237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:12.143296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:12.144204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:12.144218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:12.144821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:12.144869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:12.144881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:54:12.145256Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:12.145265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:54:12.145383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:54:12.145604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:12.146488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211652192, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:12.146499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:54:12.146552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:54:12.146909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:12.146945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:12.146958Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:54:12.146972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:54:12.146984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:54:12.146992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:54:12.147356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:54:12.147369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:54:12.147372Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:12.147384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:54:12.323465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/Foo, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:12.323538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:12.324623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/Foo 2024-11-21T17:54:12.324699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:12.324776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:12.324798Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:12.324877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T17:54:12.325000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:54:12.325011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:54:12.325015Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T17:54:12.325052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:54:12.325060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:54:12.325061Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T17:54:12.326126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211652374, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:12.326145Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211652374, at schemeshard: 72057594046644480 2024-11-21T17:54:12.326170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T17:54:12.326623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:12.326678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:12.326689Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T17:54:12.326709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T17:54:12.326722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T17:54:12.326742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T17:54:12.326854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:54:12.326873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:54:12.326876Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T17:54:12.326908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T17:54:12.326911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T17:54:12.326912Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T17:54:12.326916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T17:54:12.328825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792398057657525:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:12.328860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:12.358892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo/Test, opId: 281474976710659:0, at schemeshard: 72057594046644480 2 ... e operationId#281474976715659:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:40.470652Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T17:54:40.470754Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:54:40.470765Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:54:40.470768Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T17:54:40.470796Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:54:40.470802Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:54:40.470802Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T17:54:40.470812Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:54:40.470818Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:54:40.470819Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T17:54:40.470828Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:54:40.470833Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:54:40.470834Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T17:54:40.470843Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:54:40.470849Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:54:40.470850Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T17:54:40.471333Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211680521, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:40.471342Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211680521, at schemeshard: 72057594046644480 2024-11-21T17:54:40.471358Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T17:54:40.471376Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211680521, at schemeshard: 72057594046644480 2024-11-21T17:54:40.471385Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T17:54:40.471390Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732211680521, at schemeshard: 72057594046644480 2024-11-21T17:54:40.471397Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T17:54:40.471403Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732211680521 2024-11-21T17:54:40.471410Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T17:54:40.471676Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:40.471750Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:40.471775Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T17:54:40.471793Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T17:54:40.471831Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T17:54:40.471840Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T17:54:40.471851Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T17:54:40.471861Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T17:54:40.471869Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T17:54:40.471872Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T17:54:40.471877Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:54:40.471890Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T17:54:40.471897Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T17:54:40.471899Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T17:54:40.471902Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T17:54:40.472069Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:54:40.472079Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:54:40.472081Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T17:54:40.472100Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:54:40.472106Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:54:40.472106Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T17:54:40.472116Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:54:40.472118Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:54:40.472119Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T17:54:40.472126Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:54:40.472131Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:54:40.472133Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T17:54:40.472140Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T17:54:40.472160Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T17:54:40.472165Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T17:54:40.472168Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T17:54:40.472478Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439792515484920400:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T17:54:40.530743Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T17:54:40.530779Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, at schemeshard: 72057594046644480 2024-11-21T17:54:40.531494Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91, operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T17:54:40.541297Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd7xs96m2xww6t7y1b9fcy0x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGVhZjM3NTAtODJiNGM5Mi1lN2RlMjA3Ny1kZjhmNGNmYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:54:40.567686Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xs9908b56yen08a3tnjdk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGVhZjM3NTAtODJiNGM5Mi1lN2RlMjA3Ny1kZjhmNGNmYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> test.py::test[sampling-join_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-map-dynamic-Debug] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Debug] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-ForceBlocks] >> test.py::test[pg_catalog-pg_stat_gssapi-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_stat_gssapi-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_stat_gssapi-default.txt-Results] >> test.py::test[window-presort_window_order_by_table-default.txt-Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Analyze] >> test.py::test[weak_field-weak_field--Debug] [GOOD] >> test.py::test[weak_field-weak_field--Plan] [GOOD] >> test.py::test[weak_field-weak_field--Results] >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] >> test.py::test[pg_catalog-pg_stat_gssapi-default.txt-Results] [GOOD] >> test.py::test[pg_catalog-pg_tables-default.txt-Debug] >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> test.py::test[sampling-map-dynamic-Debug] [GOOD] >> test.py::test[sampling-map-dynamic-Plan] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-map-dynamic-Plan] [GOOD] >> test.py::test[sampling-map-dynamic-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: 2024-11-21T17:51:35.225822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:51:35.225849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:51:35.225855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00168d/r3tmp/tmpxILOTj/pdisk_1.dat 2024-11-21T17:51:35.310550Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6887, node 1 2024-11-21T17:51:35.404896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:35.404912Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:35.404915Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:35.404976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:35.409659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:51:35.486094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:35.486127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:35.497907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9763 2024-11-21T17:51:35.906891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:36.769412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:36.769446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:36.803079Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:51:36.803962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:36.859936Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:36.869426Z node 3 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:51:36.869459Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:51:36.877166Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:51:36.877254Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:51:36.877276Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:51:36.877281Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:51:36.877287Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:51:36.877293Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:51:36.877298Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:51:36.877305Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:51:36.877509Z node 3 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:51:37.043373Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:51:37.043405Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1816:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:51:37.045000Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:1826:2561] 2024-11-21T17:51:37.046397Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:1861:2578] 2024-11-21T17:51:37.046439Z node 3 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [3:1861:2578], schemeshard id = 72075186224037889 2024-11-21T17:51:37.047246Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared1 2024-11-21T17:51:37.051336Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:51:37.051350Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:51:37.051361Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared1/.metadata/_statistics 2024-11-21T17:51:37.052381Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:37.052406Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:37.054235Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:51:37.055741Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:51:37.055774Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:51:37.058888Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:51:37.071100Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:37.115678Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:51:37.211030Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:51:37.388294Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:51:38.060625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:51:38.823193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:38.823235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:38.846747Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:38.847624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:38.922211Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:38.930306Z node 2 :STATISTICS INFO: [72075186224038907] OnActivateExecutor 2024-11-21T17:51:38.930339Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInitSchema::Execute 2024-11-21T17:51:38.936590Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInitSchema::Complete 2024-11-21T17:51:38.936777Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInit::Execute 2024-11-21T17:51:38.936797Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:51:38.936802Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ColumnStatistics: column count# 0 2024-11-21T17:51:38.936807Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:51:38.936813Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:51:38.936826Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:51:38.936834Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInit::Complete 2024-11-21T17:51:38.936962Z node 2 :STATISTICS INFO: [72075186224038907] Subscribed for config changes 2024-11-21T17:51:39.043881Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:3031:2537] 2024-11-21T17:51:39.044100Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxConfigure::Execute: database# /Root/Shared2 2024-11-21T17:51:39.045890Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:51:39.045903Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:51:39.045910Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared2/.metadata/_statistics 2024-11-21T17:51:39.047386Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038907, at schemeshard: 72075186224038899 2024-11-21T17:51:39.047407Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3075:2560], at schemeshard: 72075186224038899, StatisticsAggregatorId: 72075186224038907, at schemeshard: 72075186224038899 2024-11-21T17:51:39.052858Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:3104:2578] 2024-11-21T17:51:39.053032Z node 2 :STATISTICS DEBUG: [72075186224038907] EvConnectSchemeShard, pipe server id = [2:3104:2578], schemeshard id = 72075186224038899 2024-11-21T17:51:39.067062Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:39.067095Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:39.068889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730657:1, at schemeshard: 72075186224038899 2024-11-21T17:51:39.069868Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976730657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224038899 PathId: 3 } 2024-11-21T17:51:39.069894Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976730657 2024-11-21T17:51:39.072792Z node 2 :HIVE WARN: HIVE#72075186224038898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:51:39.084527Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxConfigure::Complete 2024-11-21T17:51:39.084923Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:39.218255Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976730657. Doublechecking... 2024-11-21T17:51:39.356695Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:51:40.338818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:51:40.880521Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:41.160678Z node 3 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T17:51:41.160706Z node 3 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 7 ... ISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [3:12322:5520], StatRequests.size() = 1 2024-11-21T17:54:36.843050Z node 3 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T17:54:36.843072Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:54:36.895484Z node 3 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [3:5003:3121], schemeshard count = 1 2024-11-21T17:54:37.288665Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224038899 2024-11-21T17:54:37.288696Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 212.000000s, at schemeshard: 72075186224038899 2024-11-21T17:54:37.288816Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038899, stats size# 25 2024-11-21T17:54:37.300470Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Complete 2024-11-21T17:54:38.232566Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [3:12397:5551]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:38.232657Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2024-11-21T17:54:38.232664Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [3:12397:5551], StatRequests.size() = 1 2024-11-21T17:54:38.474461Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal 2024-11-21T17:54:38.474487Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:38.474498Z node 2 :STATISTICS DEBUG: [72075186224038907] IsColumnTable. Path [OwnerId: 72075186224038899, LocalPathId: 3] is data table. 2024-11-21T17:54:38.474503Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038899, LocalPathId: 3] 2024-11-21T17:54:38.474627Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared2 2024-11-21T17:54:38.475370Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:54:38.476449Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12413:5183], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:38.476471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12423:5188], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:38.476482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:38.479776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038899 2024-11-21T17:54:38.492376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:12427:5191], DatabaseId: /Root/Shared2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2024-11-21T17:54:38.766467Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:12554:5259]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:38.766542Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:54:38.766553Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:12556:5261] 2024-11-21T17:54:38.766562Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:12556:5261] 2024-11-21T17:54:38.766704Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:12557:5262] 2024-11-21T17:54:38.766754Z node 2 :STATISTICS DEBUG: [72075186224038907] EvConnectNode, pipe server id = [2:12557:5262], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T17:54:38.766761Z node 2 :STATISTICS DEBUG: [72075186224038907] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T17:54:38.766800Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:12556:5261], server id = [2:12557:5262], tablet id = 72075186224038907, status = OK 2024-11-21T17:54:38.766831Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:54:38.766839Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:12554:5259], StatRequests.size() = 1 2024-11-21T17:54:38.779730Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTE2YzdmOGItZThiMzIxMWMtYmNlNzU5MTgtMzg1MmMzNmQ=, TxId: 2024-11-21T17:54:38.779751Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTE2YzdmOGItZThiMzIxMWMtYmNlNzU5MTgtMzg1MmMzNmQ=, TxId: 2024-11-21T17:54:38.779876Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Execute 2024-11-21T17:54:38.791785Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038899, LocalPathId: 3] 2024-11-21T17:54:38.791825Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:54:38.844360Z node 2 :STATISTICS DEBUG: [72075186224038907] EvFastPropagateCheck 2024-11-21T17:54:38.844382Z node 2 :STATISTICS DEBUG: [72075186224038907] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:54:38.896759Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:12556:5261], schemeshard count = 1 2024-11-21T17:54:39.239928Z node 3 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T17:54:39.261139Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:39.261179Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:39.292577Z node 3 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2024-11-21T17:54:39.292601Z node 3 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 221.000000s, at schemeshard: 72075186224037899 2024-11-21T17:54:39.292657Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2024-11-21T17:54:39.304290Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T17:54:39.624853Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [3:12631:5573]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:39.624967Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T17:54:39.624977Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [3:12631:5573], StatRequests.size() = 1 2024-11-21T17:54:40.839482Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [3:12698:5592]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:40.839572Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T17:54:40.839577Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [3:12698:5592], StatRequests.size() = 1 2024-11-21T17:54:41.122165Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal 2024-11-21T17:54:41.122199Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:41.142935Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224038909 2024-11-21T17:54:41.142955Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 229.000000s, at schemeshard: 72075186224038909 2024-11-21T17:54:41.143060Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038909, stats size# 26 2024-11-21T17:54:41.154521Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Complete 2024-11-21T17:54:41.662234Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 3 2024-11-21T17:54:41.662414Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:54:41.662561Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2024-11-21T17:54:41.694680Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:41.694698Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:41.694705Z node 3 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2024-11-21T17:54:41.694708Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T17:54:41.694768Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared1 2024-11-21T17:54:41.695361Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:54:41.699400Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=NDYwNzQ5MTgtNzE2MTliMzQtYjk2YWViNzYtYThiOGY4Mjg=, TxId: 2024-11-21T17:54:41.699415Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=NDYwNzQ5MTgtNzE2MTliMzQtYjk2YWViNzYtYThiOGY4Mjg=, TxId: 2024-11-21T17:54:41.699621Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:54:41.710915Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T17:54:41.710930Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:54:42.017407Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:12790:5634]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:42.017486Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T17:54:42.017490Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:12790:5634], StatRequests.size() = 1 2024-11-21T17:54:42.017608Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:12792:5333]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:42.018401Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T17:54:42.018436Z node 2 :STATISTICS DEBUG: [72075186224038907] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2024-11-21T17:54:42.018439Z node 2 :STATISTICS DEBUG: [72075186224038907] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T17:54:42.018481Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:54:42.018490Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:12792:5333], StatRequests.size() = 1 >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Plan] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] >> test.py::test[window-win_func_lead_lag_worm--Analyze] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Debug] >> test.py::test[weak_field-weak_field--Results] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Debug] >> test.py::test[pg_catalog-pg_tables-default.txt-Debug] [GOOD] >> test.py::test[pg_catalog-pg_tables-default.txt-Plan] [GOOD] >> test.py::test[pg_catalog-pg_tables-default.txt-Results] >> test.py::test[sampling-map-dynamic-Results] [GOOD] >> test.py::test[schema-fake_column-default.txt-Debug] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--Analyze] >> TOlapReboots::CreateTable [GOOD] >> test.py::test[pg_catalog-pg_tables-default.txt-Results] [GOOD] >> test.py::test[pg_duplicated-qualified_star_from_using-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:54:17.399584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:17.399605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:17.399610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:17.399615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:17.399628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:17.399633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:17.399641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:17.399722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:17.409736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:17.409756Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:54:17.411811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:17.411897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:17.411926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:17.414277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:17.414350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:17.414446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:17.414632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:17.415237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:17.415471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:17.415481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:17.415492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:17.415499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:17.415504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:17.415546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:54:17.416737Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:54:17.431435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:17.431515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:17.431573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:17.431631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:17.431639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:17.432346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:17.432370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:17.432429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:17.432439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:17.432444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:17.432448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:17.432831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:17.432841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:17.432845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:17.433132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:17.433139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:17.433145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:17.433151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:17.433666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:17.433988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:17.434028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:17.434211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:17.434234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:17.434247Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:17.434301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:17.434308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:17.434333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:17.434343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:17.434746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:17.434755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:17.434794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:17.434800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:17.434870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:17.434877Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:17.434887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:17.434891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:17.434897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:17.434903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:17.434909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:17.434915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:17.434925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:17.434930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:17.434935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... hDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2024-11-21T17:54:43.579913Z node 104 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:43.579918Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:43.579944Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:54:43.579960Z node 104 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:43.579963Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [104:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:54:43.579966Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [104:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2024-11-21T17:54:43.580019Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:54:43.580025Z node 104 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId#1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:54:43.580030Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId#1003:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T17:54:43.580101Z node 104 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:54:43.580108Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:54:43.580110Z node 104 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:54:43.580113Z node 104 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:54:43.580115Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:54:43.580310Z node 104 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:54:43.580318Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:54:43.580320Z node 104 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:54:43.580323Z node 104 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T17:54:43.580325Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:54:43.580332Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:54:43.580442Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T17:54:43.580450Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:4 msg type: 268697639 2024-11-21T17:54:43.580459Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 0, tablet: 72057594037968897 2024-11-21T17:54:43.580672Z node 104 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 1003 TxPartId: 0 2024-11-21T17:54:43.580689Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 1003 TxPartId: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:43.580696Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 1003 TxPartId: 0 2024-11-21T17:54:43.580774Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:54:43.580933Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:54:43.581092Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:54:43.591574Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1003 MinStep: 0 Step: 5000004 2024-11-21T17:54:43.591583Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:54:43.591594Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1003 MinStep: 0 Step: 5000004 2024-11-21T17:54:43.591600Z node 104 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1003 MinStep: 0 Step: 5000004 2024-11-21T17:54:43.591636Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2024-11-21T17:54:43.591639Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:54:43.591644Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:54:43.592011Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:54:43.592041Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:54:43.592051Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:54:43.592055Z node 104 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:54:43.592064Z node 104 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:54:43.592067Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:54:43.592070Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:54:43.592078Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [104:362:2342] message: TxId: 1003 2024-11-21T17:54:43.592082Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:54:43.592088Z node 104 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:54:43.592090Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:54:43.592111Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:54:43.592366Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:54:43.592374Z node 104 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [104:417:2396] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:54:43.592441Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:43.592477Z node 104 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 41us result status StatusSuccess 2024-11-21T17:54:43.592580Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[window-win_func_lead_lag_worm--Debug] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--ForceBlocks] >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--Analyze] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--Debug] >> TPQTest::TestPQReadAhead [GOOD] >> TOlapReboots::CreateMultipleTables [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Debug] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Plan] [GOOD] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] >> test.py::test[pg_duplicated-qualified_star_from_using-default.txt-Debug] [GOOD] >> test.py::test[pg_duplicated-qualified_star_from_using-default.txt-Plan] [GOOD] >> test.py::test[pg_duplicated-qualified_star_from_using-default.txt-Results] >> test.py::test[schema-fake_column-default.txt-Debug] [GOOD] >> test.py::test[schema-fake_column-default.txt-Plan] [GOOD] >> test.py::test[schema-fake_column-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestPQReadAhead [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T17:53:33.383649Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:33.383670Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:33.387848Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:33.390363Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2024-11-21T17:53:33.390592Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T17:53:33.391176Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:33.396137Z node 1 :PERSQUEUE INFO: new Cookie default|7fe3c70f-76a5aad-d42ac866-b43bcb22_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX C ... :TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [34:175:2190] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [34:243:2244] sender: [34:354:2057] recipient: [34:14:2061] 2024-11-21T17:54:44.044546Z node 34 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 size 8365317 2024-11-21T17:54:44.044568Z node 34 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 12 size 7877895 Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:106:2057] recipient: [35:99:2133] 2024-11-21T17:54:44.149535Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:44.149550Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [35:147:2057] recipient: [35:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [35:147:2057] recipient: [35:145:2168] Leader for TabletID 72057594037927938 is [35:151:2172] sender: [35:152:2057] recipient: [35:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:175:2057] recipient: [35:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:44.152140Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:44.152305Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 35 actor [35:173:2188] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 35 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 35 ReadRuleGenerations: 35 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 35 Important: false } Consumers { Name: "aaa" Generation: 35 Important: true } 2024-11-21T17:54:44.152391Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [35:182:2195] 2024-11-21T17:54:44.152782Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [35:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:54:44.153138Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [35:183:2196] 2024-11-21T17:54:44.153411Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [35:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:44.157548Z node 35 :PERSQUEUE INFO: new Cookie default|a3a8df89-81c9401d-703dfe23-d04ec6b1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [35:173:2188] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [35:173:2188] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [35:173:2188] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [35:173:2188] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [35:173:2188] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:106:2057] recipient: [36:99:2133] 2024-11-21T17:54:44.381498Z node 36 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:44.381515Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:147:2057] recipient: [36:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:147:2057] recipient: [36:145:2168] Leader for TabletID 72057594037927938 is [36:151:2172] sender: [36:152:2057] recipient: [36:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:177:2057] recipient: [36:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:44.384696Z node 36 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:54:44.384841Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 36 actor [36:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 36 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 36 ReadRuleGenerations: 36 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 36 Important: false } Consumers { Name: "aaa" Generation: 36 Important: true } 2024-11-21T17:54:44.384922Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [36:184:2197] 2024-11-21T17:54:44.385349Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [36:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:54:44.385741Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [36:185:2198] 2024-11-21T17:54:44.386001Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [36:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:44.389898Z node 36 :PERSQUEUE INFO: new Cookie default|59d58365-63209ece-c3a2345e-fd8103d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [36:175:2190] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateMultipleTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:54:21.209586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:21.209608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:21.209613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:21.209618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:21.209632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:21.209636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:21.209646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:21.209719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:21.221737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:21.221760Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:54:21.223962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:21.224047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:21.224077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:21.226644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:21.226718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:21.226821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:21.226987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:21.227592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:21.227837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:21.227847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:21.227859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:21.227866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:21.227872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:21.227910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:54:21.229185Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:54:21.245178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:21.245264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:21.245319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:21.245375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:21.245383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:21.246008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:21.246029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:21.246079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:21.246088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:21.246093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:21.246097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:21.246430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:21.246438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:21.246443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:21.246748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:21.246755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:21.246761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:21.246767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:21.247317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:21.247662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:21.247704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:21.247883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:21.247905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:21.247917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:21.247970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:21.247976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:21.247999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:21.248009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:21.248352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:21.248363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:21.248401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:21.248407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:21.248474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:21.248481Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:21.248490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:21.248494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:21.248500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:21.248505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:21.248509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:21.248513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:21.248523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:21.248529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:21.248533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... Id: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:54:44.583907Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:54:44.583909Z node 89 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:54:44.583911Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2024-11-21T17:54:44.583913Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:54:44.583917Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:54:44.584107Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T17:54:44.584117Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:5 msg type: 268697639 2024-11-21T17:54:44.584125Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 0, tablet: 72057594037968897 2024-11-21T17:54:44.584307Z node 89 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 1003 TxPartId: 0 2024-11-21T17:54:44.584324Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 1003 TxPartId: 0, at schemeshard: 72057594046678944 2024-11-21T17:54:44.584332Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 1003 TxPartId: 0 2024-11-21T17:54:44.584435Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:54:44.584544Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:54:44.584588Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:54:44.595176Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1003 MinStep: 0 Step: 5000005 2024-11-21T17:54:44.595193Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:54:44.595207Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1003 MinStep: 0 Step: 5000005 2024-11-21T17:54:44.595213Z node 89 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1003 MinStep: 0 Step: 5000005 2024-11-21T17:54:44.595263Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2024-11-21T17:54:44.595266Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:54:44.595272Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:54:44.595697Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:54:44.595720Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:54:44.595732Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:54:44.595737Z node 89 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:54:44.595749Z node 89 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:54:44.595751Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:54:44.595755Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:54:44.595764Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [89:357:2337] message: TxId: 1003 2024-11-21T17:54:44.595768Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:54:44.595771Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:54:44.595777Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:54:44.595798Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:54:44.596052Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:54:44.596059Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [89:422:2401] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:54:44.596136Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:44.596181Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable1" took 51us result status StatusSuccess 2024-11-21T17:54:44.596285Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable1" PathDescription { Self { Name: "ColumnTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable1" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:44.596365Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:44.596379Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable2" took 15us result status StatusSuccess 2024-11-21T17:54:44.596412Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable2" PathDescription { Self { Name: "ColumnTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable2" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:35.014106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:35.014128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.014133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:35.014137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:35.014151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:35.014155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:35.014163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:35.014234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:35.022056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:35.022073Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:35.024733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:35.025370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:35.025400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:35.026953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:35.027154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:35.027246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.027332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:35.028408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.028705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.028712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.028740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:35.028745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.028750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:35.028761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.029797Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:35.042200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:35.042272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.042323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:35.042404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:35.042411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.043147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.043174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:35.043216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.043224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:35.043227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:35.043231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:35.043584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.043596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:35.043601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:35.043888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.043897Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.043902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.043909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.044387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:35.044756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:35.044798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:35.044931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:35.044950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:35.044954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.044991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:35.044995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:35.045018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.045027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:35.045365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:35.045371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:35.045404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:35.045408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:35.045471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:35.045476Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:35.045484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:35.045486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.045490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:35.045493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:35.045497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:35.045499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:35.045506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:35.045510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:35.045512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:35.045717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.045726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:35.045729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:35.045732Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:35.045735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:35.045743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... 186233409546 2024-11-21T17:54:43.933641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:43.933652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:54:43.933920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:54:43.933927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:54:43.934153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3150, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:43.934171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 AckTo { RawX1: 378 RawX2: 4294969643 } } Step: 3150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:43.934178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropPQ TPropose, operationId: 103:0 HandleReply TEvOperationPlan, step: 3150, at schemeshard: 72057594046678944 2024-11-21T17:54:43.934212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:43.934223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2024-11-21T17:54:43.934246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:43.934252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:54:43.934522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:43.934528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:43.934606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:43.934611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:43.934633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:54:43.934638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:43.934651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:43.934654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T17:54:43.934659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T17:54:43.934661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T17:54:43.934668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:54:43.934672Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T17:54:43.934680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:54:43.934682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:54:43.934686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T17:54:43.934689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:54:43.934692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:54:43.934694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:54:43.934710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:43.934718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2024-11-21T17:54:43.934720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 9 2024-11-21T17:54:43.934722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:54:43.934887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:54:43.934896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:54:43.934898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:54:43.934901Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:54:43.934903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:54:43.934952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:43.934955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:54:43.934960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:54:43.934980Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2024-11-21T17:54:43.935015Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2024-11-21T17:54:43.935024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:54:43.935030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:54:43.935033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:54:43.935035Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 9 2024-11-21T17:54:43.935037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:54:43.935042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T17:54:43.935208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:54:43.935245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:54:43.935618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:43.935631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:54:43.935874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:54:43.935887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:54:43.935896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T17:54:43.935958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T17:54:43.935962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T17:54:43.936007Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T17:54:43.936019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:54:43.936022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:714:2635] TestWaitNotification: OK eventTxId 103 2024-11-21T17:54:44.321659Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:44.321754Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 116us result status StatusSuccess 2024-11-21T17:54:44.321836Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[pg_duplicated-qualified_star_from_using-default.txt-Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_fail--Debug] >> test.py::test[pragma-release_temp_data_chain_pull_fail--Debug] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_fail--Plan] [SKIPPED] >> test.py::test[pragma-release_temp_data_chain_pull_fail--Results] [SKIPPED] >> test.py::test[pragma-yson_strict_fail--Debug] [SKIPPED] >> test.py::test[pragma-yson_strict_fail--Plan] [SKIPPED] >> test.py::test[pragma-yson_strict_fail--Results] >> test.py::test[key_filter-yql-8663-dedup_ranges--Debug] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--ForceBlocks] >> test.py::test[schema-fake_column-default.txt-Results] [GOOD] >> test.py::test[schema-read_schema_other--Debug] >> test.py::test[window-win_func_lead_lag_worm--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Plan] [GOOD] >> test.py::test[window-win_func_lead_lag_worm--Results] >> test.py::test[window-all_columns_hide_window_special_ones-default.txt-Results] [GOOD] >> test.py::test[window-rank/nulls-default.txt-Debug] >> TPersQueueTest::TxCounters [GOOD] >> test.py::test[pragma-yson_strict_fail--Results] [GOOD] >> test.py::test[produce-process_multi_in--Debug] [SKIPPED] >> test.py::test[produce-process_multi_in--Plan] [SKIPPED] >> test.py::test[produce-process_multi_in--Results] [SKIPPED] >> test.py::test[produce-process_with_udf_rows-default.txt-Debug] [SKIPPED] >> test.py::test[produce-process_with_udf_rows-default.txt-Plan] [SKIPPED] >> test.py::test[produce-process_with_udf_rows-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_all_opt-default.txt-Debug] [SKIPPED] >> test.py::test[produce-reduce_all_opt-default.txt-Plan] [SKIPPED] >> test.py::test[produce-reduce_all_opt-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in--Debug] >> test.py::test[key_filter-yql-8663-dedup_ranges--ForceBlocks] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--Plan] [GOOD] >> test.py::test[key_filter-yql-8663-dedup_ranges--Results] >> test.py::test[schema-read_schema_other--Debug] [GOOD] >> test.py::test[schema-read_schema_other--Plan] [GOOD] >> test.py::test[schema-read_schema_other--Results] >> test.py::test[window-rank/nulls-default.txt-Debug] [GOOD] >> test.py::test[window-rank/nulls-default.txt-Plan] [GOOD] >> test.py::test[window-rank/nulls-default.txt-Results] >> test.py::test[window-win_func_lead_lag_worm--Results] [GOOD] >> test.py::test[window-win_func_special--Analyze] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::TxCounters [GOOD] Test command err: === Server->StartServer(false); 2024-11-21T17:51:57.391963Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791818691083591:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:57.392002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:57.395659Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791815989049117:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:57.425053Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00201d/r3tmp/tmpSA51HS/pdisk_1.dat 2024-11-21T17:51:57.432697Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:51:57.433943Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:57.470273Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:57.491985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.492012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28834, node 1 2024-11-21T17:51:57.493811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:57.497944Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/00201d/r3tmp/yandex45B4Di.tmp 2024-11-21T17:51:57.497954Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/00201d/r3tmp/yandex45B4Di.tmp 2024-11-21T17:51:57.498015Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/00201d/r3tmp/yandex45B4Di.tmp 2024-11-21T17:51:57.498050Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:51:57.502583Z INFO: TTestServer started on Port 20775 GrpcPort 28834 TClient is connected to server localhost:20775 PQClient connected to localhost:28834 === TenantModeEnabled() = 0 === Init PQ - start server on port 28834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:57.531256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T17:51:57.531304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.531350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T17:51:57.531429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:51:57.531437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.532033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:57.532059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:51:57.532094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.532105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:51:57.532107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2024-11-21T17:51:57.532109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T17:51:57.532517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.532526Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:51:57.532529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:51:57.532597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:57.532606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2024-11-21T17:51:57.532610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:51:57.532816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.532823Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.532826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:51:57.532830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2024-11-21T17:51:57.533374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:51:57.533703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2024-11-21T17:51:57.533764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T17:51:57.534142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:57.534161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:57.534336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211517582, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:51:57.534364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7439791818691084144 RawX2: 4294969651 } } Step: 1732211517582 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:51:57.534374Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:51:57.534413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:51:57.534420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:51:57.534443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:51:57.534453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T17:51:57.534715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:51:57.534722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:51:57.534762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:51:57.534767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439791818691084167:2375], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2024-11-21T17:51:57.534772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:51:57.534775Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:51:57.534782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:51:57.534788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T17:51:57.534791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2024-11-21T17:51:57.534793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T17:51:57.534795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:51:57.534796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2024-11-21T17:51:57.534803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T17:51:57.534810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:51:57.534811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T17:51:57.535145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: ... on AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:54:45.733677Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:54:45.733684Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7439792537134195785:2448] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:54:45.733691Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T17:54:45.733762Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2024-11-21T17:54:45.733781Z node 32 :PERSQUEUE INFO: new Cookie 123|9d7464d-417618b9-c9cf1849-5c2c5de1_0 generated for partition 0 topic 'topic' owner 123 2024-11-21T17:54:45.733867Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 123|9d7464d-417618b9-c9cf1849-5c2c5de1_0 2024-11-21T17:54:45.744554Z node 32 :PQ_READ_PROXY DEBUG: new Describe partition request 2024-11-21T17:54:45.744631Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/topic" include_location: true 2024-11-21T17:54:45.744648Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[32:7439792537134195791:2450]: Bootstrap 2024-11-21T17:54:45.745009Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7439792537134195791:2450]: Request location 2024-11-21T17:54:45.745067Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7439792537134195800:2451] connected; active server actors: 1 2024-11-21T17:54:45.745080Z node 32 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 32, Generation 1 2024-11-21T17:54:45.745085Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7439792537134195791:2450]: Got location 2024-11-21T17:54:45.745106Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7439792537134195800:2451] disconnected; active server actors: 1 2024-11-21T17:54:45.745112Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7439792537134195800:2451] disconnected no session 2024-11-21T17:54:45.745598Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 123|9d7464d-417618b9-c9cf1849-5c2c5de1_0 grpc read done: success: 0 data: 2024-11-21T17:54:45.745604Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|9d7464d-417618b9-c9cf1849-5c2c5de1_0 grpc read failed 2024-11-21T17:54:45.745609Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|9d7464d-417618b9-c9cf1849-5c2c5de1_0 grpc closed 2024-11-21T17:54:45.745611Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|9d7464d-417618b9-c9cf1849-5c2c5de1_0 is DEAD 2024-11-21T17:54:45.745759Z node 32 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T17:54:45.745766Z node 32 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2024-11-21T17:54:45.745812Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:54:45.745836Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { path: "topic" producer_id: "123" partition_with_generation { generation: 1 } } 2024-11-21T17:54:45.745847Z node 32 :PQ_WRITE_PROXY INFO: session request cookie: 3 path: "topic" producer_id: "123" partition_with_generation { generation: 1 } from ipv6:[::1]:52288 2024-11-21T17:54:45.745849Z node 32 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="topic server" ip=ipv6:[::1]:52288 proto=topic topic=topic durationSec=0 2024-11-21T17:54:45.745851Z node 32 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T17:54:45.745855Z node 32 :PQ_WRITE_PROXY INFO: session to partition: 0, generation: 1 2024-11-21T17:54:45.746013Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2024-11-21T17:54:45.746043Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2024-11-21T17:54:45.746048Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T17:54:45.746049Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T17:54:45.746053Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7439792537134195805:2453] (SourceId=123, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2024-11-21T17:54:45.746055Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2024-11-21T17:54:45.746152Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2024-11-21T17:54:45.746177Z node 32 :PERSQUEUE INFO: new Cookie 123|879ec448-5240e9fb-515da953-d902407c_0 generated for partition 0 topic 'topic' owner 123 2024-11-21T17:54:45.746254Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: 123|879ec448-5240e9fb-515da953-d902407c_0 2024-11-21T17:54:45.746514Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|879ec448-5240e9fb-515da953-d902407c_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:54:45.746564Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|879ec448-5240e9fb-515da953-d902407c_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:54:45.746624Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|879ec448-5240e9fb-515da953-d902407c_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:54:45.746652Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2024-11-21T17:54:45.746675Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|879ec448-5240e9fb-515da953-d902407c_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T17:54:45.746804Z node 32 :PQ_WRITE_PROXY DEBUG: SessionId: ydb://session/3?node_id=32&id=ZmU2YjU4Y2QtZmYxZGY0MzYtYmY3MWJiNWUtMWYyNWRmYjM= TxId: 01jd7xseax709zpcn7tdv2afrs WriteId: {32, 281474976715674} 2024-11-21T17:54:45.747227Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976715674}, 100000}, State: StateInit] bootstrapping {0, {32, 281474976715674}, 100000} [32:7439792537134195817:2455] 2024-11-21T17:54:45.747508Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976715674}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {32, 281474976715674}, 100000} generation 1 [32:7439792537134195817:2455] 2024-11-21T17:54:45.747538Z node 32 :PERSQUEUE INFO: new Cookie 123|18f37868-c4e2b29b-f3eba704-65f0a4c9_0 generated for partition {0, {32, 281474976715674}, 100000} topic 'topic' owner 123 2024-11-21T17:54:45.747764Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:54:45.747772Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:54:45.747775Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:54:45.747777Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:54:45.749984Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:54:45.750403Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:54:45.750412Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:54:45.750496Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:54:45.847021Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|879ec448-5240e9fb-515da953-d902407c_0 grpc read done: success: 0 data: 2024-11-21T17:54:45.847037Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|879ec448-5240e9fb-515da953-d902407c_0 grpc read failed 2024-11-21T17:54:45.847043Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|879ec448-5240e9fb-515da953-d902407c_0 grpc closed 2024-11-21T17:54:45.847046Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|879ec448-5240e9fb-515da953-d902407c_0 is DEAD 2024-11-21T17:54:45.847388Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:54:45.847401Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:54:45.848417Z node 32 :PERSQUEUE WARN: [PQ: 72075186224037892] Unknown transaction 281474976715675 Counters: ================================
name=api.grpc.topic.stream_write.bytes: 20796
name=api.grpc.topic.stream_write.messages: 4
name=topic.write.bytes: 20796
name=topic.write.discarded_bytes: 0
name=topic.write.discarded_messages: 0
name=topic.write.messages: 4
name=topic.write.uncompressed_bytes: 16
name=topic.write.lag_milliseconds:
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=180000: 0
    bin=200: 0
    bin=2000: 3
    bin=30000: 0
    bin=500: 0
    bin=5000: 1
    bin=60000: 0
    bin=999999: 0
name=topic.write.message_size_bytes:
    bin=1024: 1
    bin=10240: 2
    bin=102400: 0
    bin=1048576: 0
    bin=10485760: 0
    bin=20480: 1
    bin=204800: 0
    bin=2097152: 0
    bin=5120: 0
    bin=51200: 0
    bin=524288: 0
    bin=5242880: 0
    bin=67108864: 0
    bin=99999999: 0
name=topic.write.partition_throttled_milliseconds:
    bin=0: 4
    bin=1: 0
    bin=10: 0
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=20: 0
    bin=2500: 0
    bin=5: 0
    bin=50: 0
    bin=500: 0
    bin=5000: 0
    bin=999999: 0
>> test.py::test[key_filter-yql-8663-dedup_ranges--Results] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-Analyze] >> test.py::test[schema-read_schema_other--Results] [GOOD] >> test.py::test[schema-select_all-read_schema-Debug] >> test.py::test[window-rank/nulls-default.txt-Results] [GOOD] >> test.py::test[window-rank/unordered--Debug] >> test.py::test[window-win_func_special--Analyze] [GOOD] >> test.py::test[window-win_func_special--Debug] >> ColumnStatistics::CountMinSketchStatistics [GOOD] >> test.py::test[produce-reduce_multi_in--Debug] [GOOD] >> test.py::test[produce-reduce_multi_in--Plan] [GOOD] >> test.py::test[produce-reduce_multi_in--Results] >> test.py::test[key_filter-yql_5895_or-default.txt-Analyze] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-Debug] >> TOlapReboots::DropMultipleTables [GOOD] >> test.py::test[schema-select_all-read_schema-Debug] [GOOD] >> test.py::test[schema-select_all-read_schema-Plan] [GOOD] >> test.py::test[schema-select_all-read_schema-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics [GOOD] Test command err: 2024-11-21T17:52:18.121963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:18.122022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:18.122036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001655/r3tmp/tmp1SMP4s/pdisk_1.dat 2024-11-21T17:52:18.232106Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12057, node 1 2024-11-21T17:52:18.342165Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:18.342184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:18.342189Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:18.342275Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:18.348392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:18.424479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:18.424505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:18.436290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2387 2024-11-21T17:52:18.837830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:19.686713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:19.686737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:19.719834Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:19.720648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:19.769324Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:19.778696Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:52:19.778720Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:52:19.785360Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:52:19.785451Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:52:19.785463Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:52:19.785467Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:52:19.785470Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:52:19.785474Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:52:19.785477Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:52:19.785482Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:52:19.785557Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:52:19.960550Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:19.960584Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:19.962172Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T17:52:19.964034Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T17:52:19.964141Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T17:52:19.964896Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T17:52:19.968784Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:52:19.968800Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:52:19.968812Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T17:52:19.971188Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:19.971221Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:19.972521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:52:19.974063Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:52:19.974088Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:52:19.976283Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:52:19.988456Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:20.010521Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:52:20.125895Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:52:20.282143Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:52:21.015147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.015178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.017915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T17:52:21.040923Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:52:21.040972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:52:21.041018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:52:21.041039Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:52:21.041066Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:52:21.041081Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:52:21.041094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:52:21.041107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:52:21.041124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:52:21.041137Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:52:21.041149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:52:21.041162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2223:2806];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:52:21.045980Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:52:21.046005Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:52:21.046017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:52:21.046022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:52:21.046035Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:52:21.046040Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:52:21.046047Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleI ... pool default not found or you don't have access permissions } 2024-11-21T17:52:22.409885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:22.410712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037889 waiting actualization: 0/0.000011s 2024-11-21T17:52:24.185076Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;task_id=5c8e6436-a83111ef-ad4a7574-67aa05ac;fline=with_appended.cpp:80;portions=1,;task_id=5c8e6436-a83111ef-ad4a7574-67aa05ac; 2024-11-21T17:52:32.167822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:52:32.167848Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:33.299806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T17:52:33.299844Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:43.320044Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T17:54:43.320099Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T17:54:43.320102Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T17:54:43.320105Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T17:54:44.332252Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2024-11-21T17:54:44.332293Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 182.000000s, at schemeshard: 72075186224037889 2024-11-21T17:54:44.332386Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 51 2024-11-21T17:54:44.343392Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T17:54:45.281882Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:45.281903Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:45.281912Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T17:54:45.281916Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:54:45.282044Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:54:45.283136Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:54:45.284035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7279:5510], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:45.284056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7290:5515], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:45.284150Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:45.286289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T17:54:45.294883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7293:5518], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T17:54:45.477705Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7410:5580]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:45.477752Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:54:45.477763Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7412:5582] 2024-11-21T17:54:45.477775Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7412:5582] 2024-11-21T17:54:45.477866Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7413:5583] 2024-11-21T17:54:45.477898Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7412:5582], server id = [2:7413:5583], tablet id = 72075186224037897, status = OK 2024-11-21T17:54:45.477920Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:7413:5583], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T17:54:45.477934Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T17:54:45.477951Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:54:45.477961Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7410:5580], StatRequests.size() = 1 2024-11-21T17:54:45.492444Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODJkYjQzY2QtOWFhYWNkMGEtNjk4MGY2NDctZjJmZmIzMDM=, TxId: 2024-11-21T17:54:45.492464Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODJkYjQzY2QtOWFhYWNkMGEtNjk4MGY2NDctZjJmZmIzMDM=, TxId: 2024-11-21T17:54:45.492569Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:54:45.503511Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:54:45.503524Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:54:45.585239Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T17:54:45.585262Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:54:45.626226Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7412:5582], schemeshard count = 1 2024-11-21T17:54:47.273601Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:47.273621Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:47.273629Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T17:54:47.273633Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:54:47.274284Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T17:54:47.285780Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T17:54:47.285889Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T17:54:47.285899Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T17:54:47.286104Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T17:54:47.307400Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T17:54:47.307443Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T17:54:47.307585Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7543:5656], server id = [2:7544:5657], tablet id = 72075186224037899, status = OK 2024-11-21T17:54:47.307709Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7543:5656], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T17:54:47.307883Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T17:54:47.307890Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T17:54:47.307920Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T17:54:47.307941Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T17:54:47.307980Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:54:47.308010Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7543:5656], server id = [2:7544:5657], tablet id = 72075186224037899 2024-11-21T17:54:47.308013Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:54:47.308436Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T17:54:47.311045Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7561:5674]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:47.311082Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T17:54:47.311086Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7561:5674], StatRequests.size() = 1 2024-11-21T17:54:47.331226Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjRhNjI4MTctNTVmYTM5MzctNmFmZGM2YTQtZTI3MDgyMjg=, TxId: 2024-11-21T17:54:47.331242Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjRhNjI4MTctNTVmYTM5MzctNmFmZGM2YTQtZTI3MDgyMjg=, TxId: 2024-11-21T17:54:47.331426Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:54:47.331805Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:7569:5401]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:47.331859Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:54:47.331863Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T17:54:47.332246Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:54:47.332253Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T17:54:47.332259Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T17:54:47.333948Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 |85.8%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::DropMultipleTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:54:10.704182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:10.704205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:10.704211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:10.704215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:10.704249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:10.704254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:10.704262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:10.704338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:10.713355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:10.713374Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:54:10.715150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:10.715224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:10.715248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:10.717590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:10.717671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:10.717774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:10.717953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:10.718760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:10.719030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:10.719038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:10.719047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:10.719051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:10.719055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:10.719090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:54:10.720595Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:54:10.735806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:10.735903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:10.735973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:10.736046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:10.736055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:10.736881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:10.736907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:10.736960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:10.736968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:10.736971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:10.736975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:10.737319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:10.737327Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:10.737330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:10.737546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:10.737551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:10.737556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:10.737562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:10.737997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:10.738311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:10.738352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:10.738494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:10.738515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:10.738529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:10.738593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:10.738598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:10.738623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:10.738632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:10.738989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:10.738996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:10.739030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:10.739033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:10.739101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:10.739106Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:10.739114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:10.739117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:10.739121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:10.739124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:10.739127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:10.739129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:10.739137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:10.739141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:10.739143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... eason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:47.718300Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000007 2024-11-21T17:54:47.718628Z node 83 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:47.718633Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:47.718669Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T17:54:47.718687Z node 83 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:47.718690Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2024-11-21T17:54:47.718693Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:203:2206], at schemeshard: 72057594046678944, txId: 1005, path id: 5 2024-11-21T17:54:47.718749Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:54:47.718753Z node 83 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedWaitParts operationId#1005:0 ProgressState at schemeshard: 72057594046678944 2024-11-21T17:54:47.718757Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TDropColumnTable TProposedWaitParts operationId#1005:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T17:54:47.718809Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:54:47.718818Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:54:47.718822Z node 83 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:54:47.718826Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T17:54:47.718831Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:54:47.718873Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:54:47.718882Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:54:47.718884Z node 83 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:54:47.718887Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 11 2024-11-21T17:54:47.718889Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:54:47.718895Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T17:54:47.719160Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T17:54:47.719369Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:54:47.719381Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:54:47.730024Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1005 MinStep: 0 Step: 5000007 2024-11-21T17:54:47.730040Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2024-11-21T17:54:47.730061Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1005 MinStep: 0 Step: 5000007 2024-11-21T17:54:47.730072Z node 83 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 1005 MinStep: 0 Step: 5000007 2024-11-21T17:54:47.730119Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2024-11-21T17:54:47.730125Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2024-11-21T17:54:47.730135Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2024-11-21T17:54:47.730143Z node 83 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 130 FAKE_COORDINATOR: Erasing txId 1005 2024-11-21T17:54:47.730633Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:54:47.730865Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:54:47.730894Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:54:47.730900Z node 83 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedDeleteParts operationId#1005:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:47.730922Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:54:47.730938Z node 83 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T17:54:47.730942Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:54:47.730949Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T17:54:47.730961Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [83:358:2338] message: TxId: 1005 2024-11-21T17:54:47.730967Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:54:47.730971Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T17:54:47.730975Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T17:54:47.730999Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:54:47.731071Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:47.731078Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:54:47.731090Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:47.731640Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:54:47.731653Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [83:523:2501] 2024-11-21T17:54:47.731762Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 2024-11-21T17:54:47.731854Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:47.731897Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable1" took 48us result status StatusPathDoesNotExist 2024-11-21T17:54:47.731935Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore/ColumnTable1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/OlapStore\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/OlapStore/ColumnTable1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/OlapStore" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:54:47.732010Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:47.732028Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable2" took 20us result status StatusPathDoesNotExist 2024-11-21T17:54:47.732046Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore/ColumnTable2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/OlapStore\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/OlapStore/ColumnTable2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/OlapStore" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[schema-select_all-read_schema-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_range--Debug] >> test.py::test[window-win_func_special--Debug] [GOOD] >> test.py::test[window-win_func_special--ForceBlocks] >> test.py::test[key_filter-yql_5895_or-default.txt-Debug] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-ForceBlocks] |85.8%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[produce-reduce_multi_in--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--Debug] >> test.py::test[window-win_func_special--ForceBlocks] [GOOD] >> test.py::test[window-win_func_special--Plan] [GOOD] >> test.py::test[window-win_func_special--Results] >> test.py::test[key_filter-yql_5895_or-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-Plan] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] >> test.py::test[window-rank/unordered--Debug] [GOOD] >> test.py::test[window-rank/unordered--Plan] [GOOD] >> test.py::test[window-rank/unordered--Results] >> test.py::test[schema-select_all_inferschema_range--Debug] [GOOD] >> test.py::test[schema-select_all_inferschema_range--Plan] [GOOD] >> test.py::test[schema-select_all_inferschema_range--Results] >> BasicStatistics::Simple [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] [GOOD] >> test.py::test[lambda-list_aggregate-default.txt-Analyze] >> test.py::test[schema-select_all_inferschema_range--Results] [GOOD] >> test.py::test[schema-select_all_inferschema_range-empty_fail-Debug] [SKIPPED] >> test.py::test[schema-select_all_inferschema_range-empty_fail-Plan] [SKIPPED] >> test.py::test[schema-select_all_inferschema_range-empty_fail-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple [GOOD] Test command err: 2024-11-21T17:52:21.706731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:21.706767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:21.706776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001628/r3tmp/tmp7DEyzj/pdisk_1.dat 2024-11-21T17:52:21.779323Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9313, node 1 2024-11-21T17:52:21.870084Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:21.870101Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:21.870104Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:21.870166Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:21.874447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:21.949593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:21.949619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:21.960966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9479 2024-11-21T17:52:22.361409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:23.112786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:23.112812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:23.145866Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:23.146691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:23.196955Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:23.205727Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:52:23.205752Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:52:23.213012Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:52:23.213150Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:52:23.213171Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:52:23.213176Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:52:23.213182Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:52:23.213188Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:52:23.213193Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:52:23.213200Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:52:23.213302Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:52:23.388673Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:23.388699Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:23.389947Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T17:52:23.391686Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T17:52:23.391796Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T17:52:23.392393Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T17:52:23.396032Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:52:23.396047Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:52:23.396056Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T17:52:23.397680Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:23.397711Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:23.398942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:52:23.400379Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:52:23.400415Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:52:23.403099Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:52:23.415052Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:23.436951Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:52:23.554350Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:52:23.709694Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:52:24.427023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.427052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.429295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T17:52:24.501339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2275:3055], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.501369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.501689Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2280:3059]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:52:24.501717Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:52:24.501724Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2282:3061] 2024-11-21T17:52:24.501732Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2282:3061] 2024-11-21T17:52:24.501840Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2283:2832] 2024-11-21T17:52:24.501891Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2282:3061], server id = [2:2283:2832], tablet id = 72075186224037897, status = OK 2024-11-21T17:52:24.501919Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2283:2832], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T17:52:24.501934Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T17:52:24.501974Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:52:24.501981Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2280:3059], StatRequests.size() = 1 2024-11-21T17:52:24.503970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2287:3065], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.503996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.504052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2292:3070], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.505053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:52:24.665229Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T17:52:24.665256Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:52:24.757708Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2282:3061], schemeshard count = 1 2024-11-21T17:52:24.979449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2294:3072], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:52:25.045376Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2414:3155]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:52:25.045421Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T17:52:25.045427Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2414:3155], StatRequests.size() = 1 2024-11-21T17:52:25.053904Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xn4dkazmh7yja79mmcnzz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNhODJiZDktNzFkZDllMzctOTAzZWE4YTQtNjQ0NzU5MGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:25.078942Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2429:2847]], StatType[ 0 ], StatRequestsCount[ 1 ... 4-11-21T17:54:36.913379Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 115 ] 2024-11-21T17:54:36.913384Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 115, ReplyToActorId = [2:6247:4522], StatRequests.size() = 1 2024-11-21T17:54:38.103170Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 116 ], ReplyToActorId[ [2:6280:4536]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:38.103268Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 116 ] 2024-11-21T17:54:38.103275Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 116, ReplyToActorId = [2:6280:4536], StatRequests.size() = 1 2024-11-21T17:54:39.311676Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:6313:4550]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:39.311786Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2024-11-21T17:54:39.311795Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:6313:4550], StatRequests.size() = 1 2024-11-21T17:54:39.785480Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T17:54:40.505675Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:6346:4564]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:40.505774Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2024-11-21T17:54:40.505780Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:6346:4564], StatRequests.size() = 1 2024-11-21T17:54:41.257888Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T17:54:41.257914Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T17:54:41.257918Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T17:54:41.257921Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T17:54:41.965879Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6386:4581]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:41.965960Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T17:54:41.965965Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6386:4581], StatRequests.size() = 1 2024-11-21T17:54:42.492921Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T17:54:42.493066Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:54:42.493144Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:54:42.556168Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2024-11-21T17:54:42.556190Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 208.000000s, at schemeshard: 72075186224037889 2024-11-21T17:54:42.556283Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 49 2024-11-21T17:54:42.567390Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T17:54:43.266507Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6419:4597]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:43.266604Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T17:54:43.266610Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6419:4597], StatRequests.size() = 1 2024-11-21T17:54:43.759418Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:43.759440Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:43.759449Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T17:54:43.759452Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:54:43.759538Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:54:43.762636Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:54:43.763499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6440:4614], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:43.763515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6451:4619], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:43.763522Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:43.766012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T17:54:43.775563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6454:4622], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T17:54:44.020912Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6573:4686]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:44.020962Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T17:54:44.020969Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6573:4686], StatRequests.size() = 1 2024-11-21T17:54:44.033961Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTVkZTg2NzQtMmJjMDZlOWMtM2U1OGZmZTItNjFlMjgzYzY=, TxId: 2024-11-21T17:54:44.033981Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTVkZTg2NzQtMmJjMDZlOWMtM2U1OGZmZTItNjFlMjgzYzY=, TxId: 2024-11-21T17:54:44.034086Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:54:44.045162Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:54:44.045178Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:54:44.582395Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6615:4706]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:44.582469Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T17:54:44.582474Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6615:4706], StatRequests.size() = 1 2024-11-21T17:54:45.827381Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6656:4728]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:45.827460Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T17:54:45.827464Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6656:4728], StatRequests.size() = 1 2024-11-21T17:54:46.288300Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T17:54:46.288347Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:46.288351Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:46.288357Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T17:54:46.288360Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:54:46.288428Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:54:46.288868Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:54:46.291930Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDg3NDVmMDYtY2IyMGE3NGEtYzRhYjNkNTEtZjQ2NzUxOGU=, TxId: 2024-11-21T17:54:46.291946Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDg3NDVmMDYtY2IyMGE3NGEtYzRhYjNkNTEtZjQ2NzUxOGU=, TxId: 2024-11-21T17:54:46.292039Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:54:46.303012Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:54:46.303022Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:54:47.063801Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:6726:4770]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:47.063906Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T17:54:47.063911Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:6726:4770], StatRequests.size() = 1 2024-11-21T17:54:48.338488Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:6765:4790]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:48.338565Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2024-11-21T17:54:48.338570Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:6765:4790], StatRequests.size() = 1 2024-11-21T17:54:48.812733Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T17:54:48.812791Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:48.812795Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:48.812827Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:54:48.812908Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:54:49.541177Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:6798:4806]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:49.541266Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2024-11-21T17:54:49.541271Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:6798:4806], StatRequests.size() = 1 >> test.py::test[produce-reduce_multi_in_keytuple--Debug] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--Plan] [GOOD] >> test.py::test[window-win_func_special--Results] [GOOD] >> test.py::test[ypath-empty_range-dynamic-Analyze] [SKIPPED] >> test.py::test[ypath-empty_range-dynamic-Debug] [SKIPPED] >> test.py::test[ypath-empty_range-dynamic-ForceBlocks] [SKIPPED] >> test.py::test[ypath-empty_range-dynamic-Plan] [SKIPPED] >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] >> test.py::test[window-rank/unordered--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func--Debug] >> test.py::test[lambda-list_aggregate-default.txt-Analyze] [GOOD] >> test.py::test[lambda-list_aggregate-default.txt-Debug] >> test.py::test[schema-select_all_inferschema_range-empty_fail-Results] [GOOD] >> test.py::test[schema-select_fields_inferschema--Debug] >> THeavyPerfTest::TTestLoadEverything [GOOD] >> THiveImplTest::BootQueueSpeed >> test.py::test[lambda-list_aggregate-default.txt-Debug] [GOOD] >> test.py::test[lambda-list_aggregate-default.txt-ForceBlocks] >> test.py::test[schema-select_fields_inferschema--Debug] [GOOD] >> test.py::test[schema-select_fields_inferschema--Plan] [GOOD] >> test.py::test[schema-select_fields_inferschema--Results] |85.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[produce-reduce_multi_in_keytuple--Plan] [GOOD] >> test.py::test[window-win_func_aggr_4func--Debug] [GOOD] >> test.py::test[window-win_func_aggr_4func--Plan] [GOOD] >> test.py::test[window-win_func_aggr_4func--Results] >> test.py::test[schema-select_fields_inferschema--Results] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt-Debug] >> test.py::test[lambda-list_aggregate-default.txt-ForceBlocks] [GOOD] >> test.py::test[lambda-list_aggregate-default.txt-Plan] [GOOD] >> test.py::test[lambda-list_aggregate-default.txt-Results] >> BasicStatistics::TwoTables [GOOD] >> TOlapReboots::CreateDropStore [GOOD] >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution >> TPQTest::TestLowWatermark [GOOD] >> TPQTest::TestGetTimestamps ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: 2024-11-21T17:52:18.944614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:18.944668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:18.944681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001651/r3tmp/tmprsVi3u/pdisk_1.dat 2024-11-21T17:52:19.038320Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11333, node 1 2024-11-21T17:52:19.132719Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:19.132737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:19.132739Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:19.132805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:19.137789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:19.217028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:19.217065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:19.228783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6938 2024-11-21T17:52:19.629802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:20.372255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:20.372280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:20.405231Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:20.406106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:20.463937Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:20.473325Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:52:20.473347Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:52:20.481325Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:52:20.481569Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:52:20.481599Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:52:20.481605Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:52:20.481610Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:52:20.481615Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:52:20.481619Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:52:20.481626Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:52:20.481745Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:52:20.656980Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:20.657024Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1758:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:20.658163Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1765:2556] 2024-11-21T17:52:20.660273Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1797:2574] 2024-11-21T17:52:20.660529Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1797:2574], schemeshard id = 72075186224037889 2024-11-21T17:52:20.660921Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T17:52:20.665017Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:52:20.665035Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:52:20.665043Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T17:52:20.666178Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:20.666196Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:20.667963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:52:20.669180Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:52:20.669200Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:52:20.671328Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:52:20.683228Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:20.705850Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:52:20.820582Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:52:20.976944Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:52:21.571255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2141:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.571305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.574772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T17:52:21.647927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2271:3054], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.647960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.648346Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2276:3058]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:52:21.648373Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:52:21.648382Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2278:3060] 2024-11-21T17:52:21.648393Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2278:3060] 2024-11-21T17:52:21.648530Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2279:2830] 2024-11-21T17:52:21.648590Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2278:3060], server id = [2:2279:2830], tablet id = 72075186224037897, status = OK 2024-11-21T17:52:21.648624Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2279:2830], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T17:52:21.648637Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T17:52:21.648680Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:52:21.648693Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2276:3058], StatRequests.size() = 1 2024-11-21T17:52:21.651109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2283:3064], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.651132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.651199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2288:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.652354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T17:52:21.803193Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T17:52:21.803218Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:52:21.917330Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2278:3060], schemeshard count = 1 2024-11-21T17:52:22.130184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2290:3071], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T17:52:22.228721Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2416:3156]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:52:22.228766Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T17:52:22.228772Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2416:3156], StatRequests.size() = 1 2024-11-21T17:52:22.238358Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd7xn1meeym55qcvd5fqxv01, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q1ZTRiYmMtNGZiZTQ4MWUtNmQyZGU1YzAtMThlYzI3ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T17:52:22.258774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opI ... tisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T17:54:43.905465Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T17:54:44.580874Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6568:4679]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:44.580933Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T17:54:44.580938Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6568:4679], StatRequests.size() = 1 2024-11-21T17:54:45.160087Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T17:54:45.160176Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:54:45.160282Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:54:45.232716Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037889 2024-11-21T17:54:45.232737Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 214.000000s, at schemeshard: 72075186224037889 2024-11-21T17:54:45.232821Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 73 2024-11-21T17:54:45.243813Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T17:54:45.958874Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6601:4695]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:45.958955Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T17:54:45.958960Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6601:4695], StatRequests.size() = 1 2024-11-21T17:54:46.490568Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:46.490600Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:46.490610Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is data table. 2024-11-21T17:54:46.490614Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:54:46.490708Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:54:46.493690Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:54:46.494638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6622:4712], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:46.494663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6632:4717], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:46.494685Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:46.497044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T17:54:46.505955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6636:4720], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T17:54:46.771790Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6753:4782]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:46.771846Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T17:54:46.771853Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6753:4782], StatRequests.size() = 1 2024-11-21T17:54:46.788750Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTg3Njc2NzgtYzRhN2FmMjctNGViOTRhYWUtYTM4OTY3Nzk=, TxId: 2024-11-21T17:54:46.788771Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTg3Njc2NzgtYzRhN2FmMjctNGViOTRhYWUtYTM4OTY3Nzk=, TxId: 2024-11-21T17:54:46.788885Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:54:46.799812Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T17:54:46.799824Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:54:47.416963Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6798:4802]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:47.417031Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T17:54:47.417035Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6798:4802], StatRequests.size() = 1 2024-11-21T17:54:48.750726Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6839:4824]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:48.750828Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T17:54:48.750836Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6839:4824], StatRequests.size() = 1 2024-11-21T17:54:49.293881Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T17:54:49.304186Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:49.304204Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:49.304210Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 5] is data table. 2024-11-21T17:54:49.304214Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T17:54:49.304289Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:54:49.304768Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:54:49.307627Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzZhZmI0OC0zM2ZkOTViMS03ZjllZThhNy1kNTdjMTZhMg==, TxId: 2024-11-21T17:54:49.307641Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzZhZmI0OC0zM2ZkOTViMS03ZjllZThhNy1kNTdjMTZhMg==, TxId: 2024-11-21T17:54:49.307733Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:54:49.329346Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 5] 2024-11-21T17:54:49.329358Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:54:50.027367Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:6911:4868]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:50.027448Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T17:54:50.027453Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:6911:4868], StatRequests.size() = 1 2024-11-21T17:54:51.248506Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:6950:4888]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:51.248575Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2024-11-21T17:54:51.248579Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:6950:4888], StatRequests.size() = 1 2024-11-21T17:54:51.770209Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T17:54:51.770266Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:54:51.770369Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:54:51.780623Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:54:51.780635Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:54:51.780641Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T17:54:51.780644Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:54:51.780716Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T17:54:51.781169Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:54:51.783577Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjkxMGY1ODktMmU5Njg0MTYtMjc0YzQwNGEtYzdkY2Y5MTg=, TxId: 2024-11-21T17:54:51.783588Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjkxMGY1ODktMmU5Njg0MTYtMjc0YzQwNGEtYzdkY2Y5MTg=, TxId: 2024-11-21T17:54:51.783681Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:54:51.794533Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:54:51.794543Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:54:52.553997Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7013:4925]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:52.554062Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2024-11-21T17:54:52.554067Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7013:4925], StatRequests.size() = 1 2024-11-21T17:54:52.554139Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 127 ], ReplyToActorId[ [2:7015:4927]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:54:52.554706Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 127 ] 2024-11-21T17:54:52.554713Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 127, ReplyToActorId = [2:7015:4927], StatRequests.size() = 1 >> test.py::test[lambda-list_aggregate-default.txt-Results] [GOOD] >> test.py::test[lambda-list_aggregate_flatmap-default.txt-Analyze] >> test.py::test[window-win_func_aggr_4func--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Debug] >> test.py::test[schema-user_schema_directread-default.txt-Debug] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt-Plan] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateDropStore [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:54:02.026625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:02.026643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:02.026647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:02.026650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:02.026662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:02.026665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:02.026671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:02.026737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:02.033568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:02.033583Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:54:02.035131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:02.035214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:02.035237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:02.036863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:02.036917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:02.036990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:02.037100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:02.037520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:02.037702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:02.037708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:02.037716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:02.037719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:02.037723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:02.037750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:54:02.038616Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:54:02.049313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:02.049392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:02.049437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:02.049483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:02.049488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:02.049947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:02.049964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:02.049996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:02.050002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:02.050005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:02.050008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:02.050235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:02.050240Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:02.050242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:02.050423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:02.050428Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:02.050432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:02.050437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:02.050813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:02.051044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:02.051076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:02.051202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:02.051219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:02.051231Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:02.051273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:02.051278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:02.051296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:02.051304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:02.051563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:02.051567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:02.051595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:02.051598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:02.051657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:02.051662Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:02.051669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:02.051671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:02.051675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:02.051678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:02.051681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:02.051683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:02.051689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:02.051693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:02.051695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... e 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T17:54:52.827992Z node 188 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:52.828006Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 807453853802 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:52.828010Z node 188 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TPropose operationId#1003:0 HandleReply TEvOperationPlan at schemeshard: 72057594046678944, stepId: 5000004 2024-11-21T17:54:52.828029Z node 188 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 129 2024-11-21T17:54:52.828040Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:52.828046Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:54:52.828326Z node 188 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:52.828331Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:52.828347Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:54:52.828359Z node 188 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:52.828362Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [188:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T17:54:52.828365Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [188:203:2206], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:54:52.828398Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:54:52.828401Z node 188 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedWaitParts operationId#1003:0 ProgressState at schemeshard: 72057594046678944 2024-11-21T17:54:52.828405Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TDropOlapStore TProposedWaitParts operationId#1003:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T17:54:52.828433Z node 188 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:54:52.828438Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:54:52.828440Z node 188 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:54:52.828443Z node 188 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:54:52.828446Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:54:52.828477Z node 188 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:54:52.828482Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:54:52.828484Z node 188 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:54:52.828486Z node 188 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T17:54:52.828488Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:54:52.828495Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:54:52.828894Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T17:54:52.828907Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 0, tablet: 72075186233409546 2024-11-21T17:54:52.828926Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:54:52.828966Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2024-11-21T17:54:52.828969Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T17:54:52.828976Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2024-11-21T17:54:52.828981Z node 188 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 130 2024-11-21T17:54:52.829015Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:54:52.829253Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:54:52.829267Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:54:52.829271Z node 188 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId#1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:52.829279Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:54:52.829299Z node 188 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:54:52.829302Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:54:52.829305Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:54:52.829307Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:54:52.829310Z node 188 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:54:52.829312Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:54:52.829325Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:54:52.829550Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:54:52.829606Z node 188 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:54:52.829653Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:52.829751Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:54:52.830429Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:54:52.830439Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:54:52.830446Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:52.830900Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:54:52.830909Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:54:52.830947Z node 188 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2024-11-21T17:54:52.830972Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:54:52.830976Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:54:52.831011Z node 188 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:54:52.831021Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:54:52.831025Z node 188 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [188:444:2423] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:54:52.831067Z node 188 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:54:52.831090Z node 188 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 32us result status StatusPathDoesNotExist 2024-11-21T17:54:52.831120Z node 188 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TPQTest::TestGetTimestamps [GOOD] >> TPQTest::TestMaxTimeLagRewind >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas >> test.py::test[schema-user_schema_directread-default.txt-Results] [GOOD] >> test.py::test[lambda-list_aggregate_flatmap-default.txt-Analyze] [GOOD] >> test.py::test[lambda-list_aggregate_flatmap-default.txt-Debug] >> test.py::test[schema-user_schema_with_sort--Debug] >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveTest::TestBlockCreateTablet >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeUncompat >> KqpScheme::InvalidationAfterDropCreateTable2 >> KqpScheme::CreateAndAlterTableWithPartitionBy >> KqpScheme::DoubleCreateExternalDataSource >> KqpScheme::CreateTableWithUniqConstraint >> KqpOlapScheme::InvalidColumnInTieringRule >> KqpScheme::ChangefeedRetentionPeriod >> KqpScheme::CreateTableWithTtlSettingsUncompat >> KqpScheme::DropChangefeedNegative >> KqpAcl::FailNavigate >> KqpScheme::CreateDropColumnTable >> KqpScheme::QueryWithAlter >> KqpScheme::UseUnauthorizedTable >> KqpOlapScheme::TtlRunInterval >> KqpOlapScheme::DropTable >> KqpScheme::CreateUserWithoutPassword >> KqpScheme::CreateDroppedTable >> KqpOlapScheme::CreateTableWithTtl >> KqpScheme::CreateTableWithWrongPartitionAtKeys >> KqpScheme::CreateExternalDataSourceWithSa >> KqpOlapScheme::AddColumnLongPk >> KqpScheme::AlterTableAddExplicitSyncIndex >> KqpScheme::CreateTableWithCompactionPolicyUncompat >> KqpOlapScheme::BulkError >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart >> KqpOlapScheme::NullColumnError >> KqpScheme::AlterTableRenameIndex >> KqpOlapScheme::AddColumnWithTtl >> KqpScheme::AddColumnFamilyWithCompressionLevel >> KqpConstraints::DropCreateSerial >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Debug] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Plan] [GOOD] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Results] >> test.py::test[lambda-list_aggregate_flatmap-default.txt-Debug] [GOOD] >> test.py::test[lambda-list_aggregate_flatmap-default.txt-ForceBlocks] >> test.py::test[schema-user_schema_with_sort--Debug] [GOOD] >> test.py::test[schema-user_schema_with_sort--Plan] [GOOD] >> test.py::test[schema-user_schema_with_sort--Results] >> test.py::test[window-win_func_aggr_4func_no_part_sorted--Results] [GOOD] >> test.py::test[window-win_func_aggr_hist--Debug] >> test.py::test[lambda-list_aggregate_flatmap-default.txt-ForceBlocks] [GOOD] >> test.py::test[lambda-list_aggregate_flatmap-default.txt-Plan] [GOOD] >> test.py::test[lambda-list_aggregate_flatmap-default.txt-Results] >> test.py::test[schema-user_schema_with_sort--Results] [GOOD] >> test.py::test[select-autogen_columns_conflict-default.txt-Debug] >> KqpScheme::CreateDropColumnTable [GOOD] >> KqpScheme::CreateAsyncReplication >> KqpOlapScheme::DropTable [GOOD] >> KqpOlapScheme::DropThenAddColumn >> KqpOlapScheme::CreateTableWithTtl [GOOD] >> KqpOlapScheme::CreateTableWithoutTtl >> KqpOlapScheme::AddColumnLongPk [GOOD] >> KqpOlapScheme::AddColumnOldSchemeBulkUpsert >> KqpOlapScheme::BulkError [GOOD] >> KqpOlapScheme::ColumnFamilyWithFieldData >> KqpScheme::CreateDroppedTable [GOOD] >> KqpScheme::CreateDropTableMultipleTime >> KqpScheme::CreateTableWithWrongPartitionAtKeys [GOOD] >> KqpScheme::CreateTableWithVectorIndexNoFeatureFlag >> KqpScheme::CreateTableWithCompactionPolicyUncompat [GOOD] >> KqpScheme::CreateTableWithCompactionPolicyCompat >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeCompat >> KqpScheme::InvalidationAfterDropCreateTable2 [GOOD] >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTx >> KqpOlapScheme::NullColumnError [GOOD] >> KqpOlapScheme::NullKeySchema >> KqpOlapScheme::AddColumnWithTtl [GOOD] >> KqpOlapScheme::AddColumnWithStore >> KqpScheme::AddColumnFamilyWithCompressionLevel [GOOD] >> KqpScheme::AddDropColumn >> KqpScheme::CreateAndAlterTableWithPartitionBy [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitionSizeUncompat >> KqpScheme::DoubleCreateExternalDataSource [GOOD] >> KqpScheme::CreateTableWithUniqConstraint [GOOD] >> KqpScheme::CreateTableWithUniqConstraintPublicApi >> KqpScheme::DoubleCreateExternalTable >> KqpScheme::DropChangefeedNegative [GOOD] >> KqpScheme::DropAsyncReplication >> KqpOlapScheme::TtlRunInterval [GOOD] >> KqpOlapScheme::TenThousandColumns >> KqpScheme::UseUnauthorizedTable [GOOD] >> KqpScheme::UseNonexistentTable >> KqpScheme::CreateUserWithoutPassword [GOOD] >> KqpScheme::DescribeIndexTable >> KqpScheme::AlterTableRenameIndex [GOOD] >> KqpScheme::AlterTableReplaceIndex >> KqpConstraints::DropCreateSerial [GOOD] >> KqpConstraints::DefaultsAndDeleteAndUpdate >> KqpScheme::CreateTableWithTtlSettingsUncompat [GOOD] >> KqpScheme::CreateTableWithTtlSettingsCompat >> KqpAcl::FailNavigate [GOOD] >> KqpAcl::FailResolve >> KqpOlapScheme::CreateTableWithoutTtl [GOOD] >> KqpOlapScheme::CreateWithoutColumnFamily >> THiveTest::DrainWithHiveRestart [GOOD] >> KqpOlapScheme::ColumnFamilyWithFieldData [GOOD] >> THiveTest::TestCheckSubHiveForwarding >> KqpOlapScheme::CrateWithWrongCodec >> test.py::test[lambda-list_aggregate_flatmap-default.txt-Results] [GOOD] >> test.py::test[lineage-nested_lambda_fields-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-nested_lambda_fields-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-nested_lambda_fields-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-nested_lambda_fields-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-nested_lambda_fields-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-Analyze] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-Debug] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-Plan] [SKIPPED] >> test.py::test[lineage-select_group_by_key-default.txt-Results] [SKIPPED] >> test.py::test[match_recognize-test_type_predicate--Analyze] [SKIPPED] >> test.py::test[match_recognize-test_type_predicate--Debug] [SKIPPED] >> test.py::test[match_recognize-test_type_predicate--ForceBlocks] [SKIPPED] >> test.py::test[match_recognize-test_type_predicate--Plan] [SKIPPED] >> test.py::test[match_recognize-test_type_predicate--Results] >> KqpScheme::AlterTableAddExplicitSyncIndex [GOOD] >> KqpScheme::AlterTableAddExplicitAsyncIndex >> KqpScheme::ChangefeedRetentionPeriod [GOOD] >> KqpScheme::ChangefeedTopicPartitions >> KqpOlapScheme::DropThenAddColumn [GOOD] >> KqpOlapScheme::DropThenAddColumnCompaction >> KqpOlapScheme::AddColumnOldSchemeBulkUpsert [GOOD] >> KqpOlapScheme::AddColumnErrors >> KqpOlapScheme::NullKeySchema [GOOD] >> KqpOlapScheme::SetColumnFamily >> KqpScheme::CreateExternalDataSourceWithSa [GOOD] >> KqpScheme::CreateExternalDataSourceValidationAuthMethod >> test.py::test[select-autogen_columns_conflict-default.txt-Debug] [GOOD] >> test.py::test[select-autogen_columns_conflict-default.txt-Plan] [GOOD] >> test.py::test[select-autogen_columns_conflict-default.txt-Results] >> KqpScheme::CreateAsyncReplication [GOOD] >> KqpScheme::CreateAsyncReplicationWithSecret >> KqpScheme::CreateTableWithVectorIndexNoFeatureFlag [GOOD] >> KqpScheme::CreateTableWithVectorIndexPublicApi >> KqpScheme::CreateTableWithCompactionPolicyCompat [GOOD] >> KqpScheme::CreateTableStoreNegative >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeCompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadUncompat >> KqpScheme::AddDropColumn [GOOD] >> KqpScheme::AlterColumnTableTtl >> KqpScheme::CreateAndAlterTableWithPartitionSizeUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitionSizeCompat >> KqpScheme::DoubleCreateExternalTable [GOOD] >> KqpScheme::DisableResourcePools >> KqpScheme::CreateTableWithUniqConstraintPublicApi [GOOD] >> KqpScheme::CreateTableWithVectorIndex >> KqpScheme::UseNonexistentTable [GOOD] >> KqpScheme::UseDroppedTable >> KqpScheme::DescribeIndexTable [GOOD] >> KqpScheme::CreatedAt >> KqpOlapScheme::CreateWithoutColumnFamily [GOOD] >> KqpOlapScheme::CreateWithDefaultColumnFamily >> KqpOlapScheme::CrateWithWrongCodec [GOOD] >> KqpOlapScheme::AlterCompressionType >> KqpOlapScheme::InvalidColumnInTieringRule [GOOD] >> KqpOlapScheme::DropThenAddColumnIndexation >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTx [GOOD] >> KqpScheme::InvalidationAfterDropCreateTable2NoEffects >> KqpOlapScheme::SetColumnFamily [GOOD] >> KqpOlapScheme::PrimaryKeyNotDefaultColumnFamily >> KqpAcl::FailResolve [GOOD] >> KqpAcl::ReadSuccess >> KqpOlapScheme::AddColumnErrors [GOOD] >> KqpOlapScheme::AddColumnFamily >> KqpScheme::AlterTableReplaceIndex [GOOD] >> KqpScheme::AlterTableWithDecimalColumn >> KqpConstraints::DefaultsAndDeleteAndUpdate [GOOD] >> KqpConstraints::DefaultValuesForTable >> KqpOlapScheme::AddColumnWithStore [GOOD] >> KqpOlapScheme::AddPgColumnWithStore >> KqpScheme::CreateTableWithTtlSettingsCompat [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsUncompat >> test.py::test[match_recognize-test_type_predicate--Results] [GOOD] >> test.py::test[select-autogen_columns_conflict-default.txt-Results] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-Debug] >> KqpScheme::CreateTableStoreNegative [GOOD] >> KqpScheme::CreateResourcePoolClassifier >> KqpOlapScheme::AlterCompressionType [GOOD] >> KqpOlapScheme::AlterCompressionLevelError >> test.py::test[optimizers-instant_contains_lookup-default.txt-Analyze] >> KqpScheme::CreateExternalDataSourceValidationAuthMethod [GOOD] >> KqpScheme::CreateExternalDataSourceValidationLocation >> KqpOlapScheme::CreateWithDefaultColumnFamily [GOOD] >> KqpOlapScheme::CreateWithColumnFamily >> KqpScheme::AlterTableAddExplicitAsyncIndex [GOOD] >> KqpScheme::AlterResourcePool >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] >> KqpScheme::AlterColumnTableTtl [GOOD] >> KqpScheme::AlterColumnTableTiering >> KqpScheme::DisableResourcePools [GOOD] >> KqpScheme::DisableResourcePoolsOnServerless >> KqpScheme::CreateTableWithVectorIndex [GOOD] >> KqpScheme::ChangefeedTopicPartitions [GOOD] >> KqpScheme::CreateTableWithVectorIndexCovered >> KqpScheme::ChangefeedTopicAutoPartitioning >> KqpScheme::CreateTableWithVectorIndexPublicApi [GOOD] >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadUncompat [GOOD] >> KqpScheme::CreateAndDropUser >> KqpOlapScheme::PrimaryKeyNotDefaultColumnFamily [GOOD] >> KqpOlapScheme::SetNotDefaultColumnFamilyForPrimaryKey >> KqpScheme::CreatedAt [GOOD] >> KqpScheme::DisableCreateExternalDataSource >> KqpOlapScheme::AddColumnFamily [GOOD] >> KqpOlapScheme::AddColumnWithColumnFamily >> KqpScheme::InvalidationAfterDropCreateTable2NoEffects [GOOD] >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTxNoEffects >> KqpScheme::CreateAndAlterTableWithPartitionSizeCompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadCompat >> KqpAcl::ReadSuccess [GOOD] >> KqpAcl::WriteSuccess >> KqpScheme::UseDroppedTable [GOOD] >> KqpScheme::UnknownFamilyTest >> test.py::test[window-win_func_aggr_hist--Debug] [GOOD] >> test.py::test[window-win_func_aggr_hist--Plan] [GOOD] >> test.py::test[window-win_func_aggr_hist--Results] >> KqpOlapScheme::AlterCompressionLevelError [GOOD] >> KqpOlapScheme::CreateTableStoreWithFamily >> KqpOlapScheme::CreateWithColumnFamily [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsUncompat [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsCompat >> KqpOlapScheme::SetNotDefaultColumnFamilyForPrimaryKey [GOOD] >> KqpScheme::DropAsyncReplication [GOOD] >> KqpScheme::DropAsyncReplicationCascade >> KqpScheme::CreateExternalDataSourceValidationLocation [GOOD] >> KqpScheme::CreateExternalTable >> KqpScheme::AlterResourcePool [GOOD] >> KqpScheme::AlterResourcePoolClassifier >> KqpScheme::AlterTableWithDecimalColumn [GOOD] >> KqpScheme::AlterTableWithPgColumn >> KqpConstraints::DefaultValuesForTable [GOOD] >> KqpConstraints::DefaultValuesForTableNegative2 >> KqpOlapScheme::AddPgColumnWithStore [GOOD] >> KqpOlapScheme::AlterCompressionLevel >> KqpScheme::AlterColumnTableTiering [GOOD] >> KqpScheme::AlterAsyncReplication >> KqpScheme::CreateTableWithVectorIndexCovered [GOOD] >> KqpScheme::CreateTableWithVectorIndexCaseIncentive >> test.py::test[optimizers-instant_contains_lookup-default.txt-Analyze] [GOOD] >> test.py::test[optimizers-instant_contains_lookup-default.txt-Debug] >> KqpScheme::QueryWithAlter [GOOD] >> KqpScheme::RenameTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::CreateWithColumnFamily [GOOD] Test command err: Trying to start YDB, gRPC: 7570, MsgBus: 29096 2024-11-21T17:54:55.099011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792581748380508:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.099032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f6e/r3tmp/tmp63DNBT/pdisk_1.dat 2024-11-21T17:54:55.373463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.373494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.375089Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.392911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7570, node 1 2024-11-21T17:54:55.403328Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.403338Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766780Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766840Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29096 TClient is connected to server localhost:29096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.171875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (created_at, id_second)) PARTITION BY HASH(created_at) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1, TTL = Interval("PT1H") ON created_at); 2024-11-21T17:54:56.198217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792586043348233:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.198234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.326426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.333216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792586043348309:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:54:56.333257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792586043348309:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:54:56.333296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792586043348309:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:56.333320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792586043348309:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:56.333337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792586043348309:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:56.333360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792586043348309:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:56.333378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792586043348309:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:56.333399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792586043348309:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:54:56.333420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792586043348309:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:54:56.333441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792586043348309:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:54:56.333460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792586043348309:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:54:56.333482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792586043348309:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:54:56.333955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:54:56.333968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:54:56.333979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:54:56.333983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:54:56.333997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:54:56.334006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:54:56.334014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:54:56.334022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:54:56.334030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:54:56.334035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:54:56.334041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:54:56.334047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:54:56.334090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:54:56.334099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:54:56.334113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:54:56.334125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:54:56.334135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:54:56.334143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:54:56.334157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:54:56.334165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:54:56.334174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpd ... sActor;event=undelivered;self_id=[5:7439792589549148421:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:57.988993Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f6e/r3tmp/tmp0FHcvW/pdisk_1.dat 2024-11-21T17:54:57.998796Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12926, node 5 2024-11-21T17:54:58.017284Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:58.017296Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:58.017298Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:58.017331Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9762 TClient is connected to server localhost:9762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:58.089546Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:58.089574Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:58.090683Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:58.090831Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/TableWithColumnFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family2, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="zstd", COMPRESSION_LEVEL=3), FAMILY family2 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:54:58.247365Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792593844116316:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.247388Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.250508Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.255626Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792593844116362:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:54:58.255645Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792593844116362:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:54:58.255672Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792593844116362:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:58.255689Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792593844116362:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:58.255705Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792593844116362:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:58.255720Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792593844116362:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:58.255734Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792593844116362:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:58.255752Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792593844116362:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:54:58.255767Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792593844116362:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:54:58.255784Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792593844116362:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:54:58.255798Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792593844116362:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:54:58.255815Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792593844116362:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:54:58.256184Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:54:58.256202Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:54:58.256212Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:54:58.256216Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:54:58.256245Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:54:58.256256Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:54:58.256265Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:54:58.256270Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:54:58.256284Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:54:58.256293Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:54:58.256300Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:54:58.256309Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:54:58.256357Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:54:58.256367Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:54:58.256382Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:54:58.256391Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:54:58.256406Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:54:58.256415Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:54:58.256434Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:54:58.256443Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:54:58.256458Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:54:58.256467Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::SetNotDefaultColumnFamilyForPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 18648, MsgBus: 3818 2024-11-21T17:54:55.098912Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792580859126007:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f43/r3tmp/tmpLiN3Sd/pdisk_1.dat 2024-11-21T17:54:55.371807Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.374116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.374142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.393278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18648, node 1 2024-11-21T17:54:55.402847Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402860Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766389Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766408Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766410Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766449Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3818 TClient is connected to server localhost:3818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.175841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, resource_id Utf8 NOT NULL, level Int32 NOT NULL, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:54:56.198073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585154093730:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.198098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.332882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792585154093806:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:54:56.332923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792585154093806:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:54:56.332956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792585154093806:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:56.332975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792585154093806:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:56.332988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792585154093806:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:56.333008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792585154093806:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:56.333028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792585154093806:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:56.333051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792585154093806:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:54:56.333073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792585154093806:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:54:56.333095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792585154093806:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:54:56.333121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792585154093806:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:54:56.333154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792585154093806:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:54:56.333602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:54:56.333618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:54:56.333632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:54:56.333637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:54:56.333663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:54:56.333667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:54:56.333676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:54:56.333690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:54:56.333699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:54:56.333703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:54:56.333709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:54:56.333714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:54:56.333772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:54:56.333783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:54:56.333800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:54:56.333811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:54:56.333826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:54:56.333835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:54:56.333858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:54:56.333868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:54:56.333879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NA ... initialize from file: (empty maybe) 2024-11-21T17:54:58.158175Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:58.158219Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18919 TClient is connected to server localhost:18919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:58.234632Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:58.234667Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:58.234880Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.235694Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected CREATE TABLE `/Root/TableWithFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family1, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="zstd", COMPRESSION_LEVEL=1)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:54:58.412551Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792594646469803:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.412576Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.434672Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.441534Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792594646469849:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:54:58.441561Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792594646469849:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:54:58.441611Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792594646469849:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:58.441640Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792594646469849:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:58.441667Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792594646469849:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:58.441692Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792594646469849:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:58.441718Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792594646469849:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:58.441745Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792594646469849:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:54:58.441772Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792594646469849:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:54:58.441798Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792594646469849:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:54:58.441823Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792594646469849:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:54:58.441852Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792594646469849:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:54:58.442390Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:54:58.442405Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:54:58.442414Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:54:58.442419Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:54:58.442434Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:54:58.442444Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:54:58.442455Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:54:58.442467Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:54:58.442479Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:54:58.442489Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:54:58.442495Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:54:58.442500Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:54:58.442558Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:54:58.442571Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:54:58.442597Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:54:58.442608Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:54:58.442620Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:54:58.442629Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:54:58.442647Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:54:58.442657Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:54:58.442669Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:54:58.442678Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:54:58.492015Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792594646469928:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.492038Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.494235Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> test.py::test[select-backtick_with_escapes-default.txt-Debug] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-Plan] [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-Results] >> KqpOlapScheme::CreateTableStoreWithFamily [GOOD] >> KqpScheme::ChangefeedTopicAutoPartitioning [GOOD] >> KqpScheme::ChangefeedOnIndexTable >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi [GOOD] >> KqpOlapScheme::AddColumnWithColumnFamily [GOOD] >> KqpScheme::CreateUserWithPassword >> KqpOlapScheme::AddColumnFamilyWithNotSupportedCodec >> KqpScheme::CreateAndDropUser [GOOD] >> KqpScheme::CreateAndDropGroup >> KqpAcl::WriteSuccess [GOOD] >> KqpAcl::RecursiveCreateTableShouldSuccess >> KqpScheme::UnknownFamilyTest [GOOD] >> KqpScheme::TwoSimilarFamiliesTest >> KqpScheme::CreateTableWithUniformPartitionsCompat [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsUuid >> KqpScheme::DisableCreateExternalDataSource [GOOD] >> KqpScheme::DisableCreateExternalTable >> KqpScheme::InvalidationAfterDropCreateTable2MultiStageTxNoEffects [GOOD] >> KqpScheme::ModifyPermissions >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadCompat [GOOD] >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsUncompat ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::CreateTableStoreWithFamily [GOOD] Test command err: Trying to start YDB, gRPC: 4850, MsgBus: 12555 2024-11-21T17:54:55.098716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792581459046580:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f31/r3tmp/tmpSrqtHI/pdisk_1.dat 2024-11-21T17:54:55.369651Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.375880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.375909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4850, node 1 2024-11-21T17:54:55.392461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:55.402326Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402464Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.417159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:54:55.417828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439792581459046921:2285], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:54:55.766400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766419Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766420Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12555 TClient is connected to server localhost:12555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.172132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, level Uuid, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:54:56.198030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585754014298:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.198051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 4611, MsgBus: 15584 2024-11-21T17:54:56.495342Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792586187441641:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:56.495368Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f31/r3tmp/tmplC3fQJ/pdisk_1.dat 2024-11-21T17:54:56.506024Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4611, node 2 2024-11-21T17:54:56.518658Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.518673Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.518675Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.518724Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15584 TClient is connected to server localhost:15584 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.595918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:56.595952Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:56.597017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:56.597647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/TableWithoutColumnFamily` (Key Uint64 NOT NULL, Value1 String, Value2 Uint32, PRIMARY KEY (Key), FAMILY default (DATA="test", COMPRESSION="off")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:54:56.790450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792586187442243:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.790489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } CREATE TABLE `/Root/TableWithoutColumnFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (DATA="test", COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:54:56.792221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792586187442264:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.792250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } CREATE TABLE `/Root/TableWithoutColumnFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:54:56.795208Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792586187442271:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.795224Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.797440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.803988Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439792586187442333:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:54:56.804022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439792586187442333:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:54:56.804049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439792586187442333:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:56.804068Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439792586187442333:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:56.804087Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439792586187442333:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:56.804098Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439792586187442333:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:56.804114Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439792586187442333:2309];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:56.804133Z node 2 :TX_COLUMNSHARD WARN: ... NAME=Granules; 2024-11-21T17:54:58.233592Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792596162465409:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:58.233616Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792596162465409:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:58.233640Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792596162465409:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:58.233662Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792596162465409:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:58.233686Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792596162465409:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:58.233709Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792596162465409:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:54:58.233732Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792596162465409:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:54:58.233754Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792596162465409:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:54:58.233776Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792596162465409:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:54:58.233797Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792596162465409:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:54:58.234173Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:54:58.234186Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:54:58.234198Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:54:58.234202Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:54:58.234219Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:54:58.234228Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:54:58.234238Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:54:58.234247Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:54:58.234256Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:54:58.234260Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:54:58.234266Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:54:58.234270Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:54:58.234314Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:54:58.234328Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:54:58.234343Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:54:58.234348Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:54:58.234359Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:54:58.234367Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:54:58.234383Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:54:58.234392Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:54:58.234404Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:54:58.234413Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:54:58.283646Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792596162465489:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.283667Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 25629, MsgBus: 6134 2024-11-21T17:54:58.465555Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439792596748680450:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:58.465636Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f31/r3tmp/tmpAKJVSU/pdisk_1.dat 2024-11-21T17:54:58.475409Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25629, node 6 2024-11-21T17:54:58.490358Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:58.490373Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:58.490375Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:58.490413Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6134 TClient is connected to server localhost:6134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:58.566082Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:58.566108Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:58.567162Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:58.568393Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLESTORE `/Root/TableStoreWithColumnFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family2, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="zstd", COMPRESSION_LEVEL=1), FAMILY family2 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:54:58.739408Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792596748681049:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.739427Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpOlapScheme::AlterCompressionLevel [GOOD] >> KqpOlapScheme::AddColumnWithoutColumnFamily >> KqpScheme::CreateExternalTable [GOOD] >> KqpScheme::CreateExternalTableCheckPrimaryKey >> KqpConstraints::DefaultValuesForTableNegative2 [GOOD] >> KqpConstraints::DefaultValuesForTableNegative3 >> KqpScheme::CreateTableWithVectorIndexCaseIncentive [GOOD] >> KqpOlapScheme::AddColumnFamilyWithNotSupportedCodec [GOOD] >> test.py::test[select-backtick_with_escapes-default.txt-Results] [GOOD] >> test.py::test[select-bin_ops_long_concat-default.txt-Debug] >> KqpScheme::AlterTableWithPgColumn [GOOD] >> KqpScheme::AlterUser >> KqpScheme::DisableResourcePoolsOnServerless [GOOD] >> KqpScheme::DisableResourcePoolClassifiersOnServerless >> test.py::test[optimizers-instant_contains_lookup-default.txt-Debug] [GOOD] >> test.py::test[optimizers-instant_contains_lookup-default.txt-ForceBlocks] >> KqpScheme::RenameTable [GOOD] >> KqpScheme::ResourcePoolsValidation >> KqpScheme::TwoSimilarFamiliesTest [GOOD] >> KqpScheme::CreateUserWithPassword [GOOD] >> KqpScheme::CreateAndDropGroup [GOOD] >> KqpScheme::ChangefeedOnIndexTable [GOOD] >> KqpScheme::CreateAlterDropColumnTableInStore >> KqpAcl::RecursiveCreateTableShouldSuccess [GOOD] >> KqpConstraints::AddColumnWithDefaultForbidden >> test.py::test[window-win_func_aggr_hist--Results] [GOOD] >> test.py::test[window-win_func_lead_lag_opt--Debug] >> KqpScheme::AlterTableAddImplicitSyncIndex >> KqpScheme::CreateResourcePoolClassifier [GOOD] >> KqpScheme::CreateResourcePoolClassifierOnServerless >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsCompat >> KqpOlapScheme::AddColumnWithoutColumnFamily [GOOD] >> KqpOlapScheme::AddExsitsColumnFamily >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers >> KqpScheme::DisableCreateExternalTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithVectorIndexCaseIncentive [GOOD] Test command err: Trying to start YDB, gRPC: 9376, MsgBus: 29105 2024-11-21T17:54:55.098970Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792580243402370:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f1a/r3tmp/tmpTGTIt2/pdisk_1.dat 2024-11-21T17:54:55.359582Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9376, node 1 2024-11-21T17:54:55.403243Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.403255Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.418567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:54:55.427425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439792580243402708:2285], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:54:55.453998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.454019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.455013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:55.766349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766368Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766371Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29105 TClient is connected to server localhost:29105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.174151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.245884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.256599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.265326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.285276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584538371020:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.285293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.342043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.348831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.362760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584538371510:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584538371515:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.376135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792584538371517:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.571552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30143, MsgBus: 2915 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f1a/r3tmp/tmp2Sdd3Y/pdisk_1.dat 2024-11-21T17:54:56.725174Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:56.727599Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 30143, node 2 2024-11-21T17:54:56.744942Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.744955Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.744957Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.744988Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2915 TClient is connected to server localhost:2915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.815900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:56.815921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:56.816988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:56.818147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.829129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.836165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.850695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.860767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.005161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792590790831418:2374], ... 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.217428Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.225648Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.243207Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.254075Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.383653Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792596336060584:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.383675Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.387473Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.393342Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.399937Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.407232Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.413843Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.420894Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.485749Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792596336061098:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.485771Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.485770Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792596336061103:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.486380Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:58.487993Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792596336061105:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:54:58.655334Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8306, MsgBus: 30132 2024-11-21T17:54:58.803823Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792594289424082:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:58.803848Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f1a/r3tmp/tmps6Hd9C/pdisk_1.dat 2024-11-21T17:54:58.814644Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8306, node 5 2024-11-21T17:54:58.821697Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:58.821708Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:58.821710Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:58.821746Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30132 TClient is connected to server localhost:30132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:58.904385Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:58.904416Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:58.905469Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:58.906103Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.917407Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.925518Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:54:58.944694Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.000889Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.074748Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792598584392920:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.074779Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.078455Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.084222Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.092829Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.099562Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.106699Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.114142Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.121713Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792598584393420:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.121736Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.121738Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792598584393425:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.122177Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.127150Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792598584393427:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:54:59.303376Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddColumnFamilyWithNotSupportedCodec [GOOD] Test command err: Trying to start YDB, gRPC: 27044, MsgBus: 16484 2024-11-21T17:54:55.098929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792580584825034:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f67/r3tmp/tmpVFvNc9/pdisk_1.dat 2024-11-21T17:54:55.363834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.363864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.365095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:55.376963Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27044, node 1 2024-11-21T17:54:55.403731Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.403854Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.417184Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:54:55.417808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439792580584825374:2285], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:54:55.766721Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766736Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766783Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16484 TClient is connected to server localhost:16484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.177927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, resource_id Utf8, level Int32, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:54:56.198021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584879792751:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.198043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.333734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792584879792827:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:54:56.333778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792584879792827:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:54:56.333831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792584879792827:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:56.333862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792584879792827:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:56.333879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792584879792827:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:56.333900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792584879792827:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:56.333922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792584879792827:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:56.333945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792584879792827:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:54:56.333969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792584879792827:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:54:56.333991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792584879792827:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:54:56.334013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792584879792827:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:54:56.334036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792584879792827:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:54:56.334646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:54:56.334663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:54:56.334673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:54:56.334677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:54:56.334695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:54:56.334704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:54:56.334714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:54:56.334721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:54:56.334730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:54:56.334738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:54:56.334743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:54:56.334747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:54:56.334796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:54:56.334805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:54:56.334820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:54:56.334825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:54:56.334836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:54:56.334844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:54:56.334859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process ... .. TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.122683Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.122705Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.123740Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.125431Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/TableWithFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family1, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="lz4"), FAMILY family2 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:54:59.267512Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792598760455156:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.267535Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.269400Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.275587Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792598760455202:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:54:59.275615Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792598760455202:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:54:59.275648Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792598760455202:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:59.275671Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792598760455202:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:59.275690Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792598760455202:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:59.275709Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792598760455202:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:59.275725Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792598760455202:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:59.275740Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792598760455202:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:54:59.275761Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792598760455202:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:54:59.275778Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792598760455202:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:54:59.275791Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792598760455202:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:54:59.275806Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792598760455202:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:54:59.276220Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:54:59.276241Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:54:59.276253Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:54:59.276258Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:54:59.276274Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:54:59.276278Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:54:59.276287Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:54:59.276293Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:54:59.276303Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:54:59.276307Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:54:59.276313Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:54:59.276318Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:54:59.276367Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:54:59.276382Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:54:59.276398Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:54:59.276409Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:54:59.276422Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:54:59.276433Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:54:59.276453Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:54:59.276465Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:54:59.276476Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:54:59.276486Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:54:59.325497Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792598760455282:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.325515Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.326974Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792598760455287:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.326996Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsUncompat [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateUserWithPassword [GOOD] Test command err: Trying to start YDB, gRPC: 28029, MsgBus: 13576 2024-11-21T17:54:55.098933Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792583257297737:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f36/r3tmp/tmpVxxdM1/pdisk_1.dat 2024-11-21T17:54:55.364980Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.371566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.371598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.374947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28029, node 1 2024-11-21T17:54:55.403060Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.403074Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.414642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:54:55.415191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439792583257298081:2285], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:54:55.766453Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766467Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766505Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13576 TClient is connected to server localhost:13576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.177471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.198195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.208431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.216195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.235152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587552266386:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.235175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.330872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.341962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.348674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.363444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587552266880:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587552266885:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.376219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792587552266887:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 64801, MsgBus: 28444 2024-11-21T17:54:56.715334Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792585045662363:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:56.715352Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f36/r3tmp/tmpUXSO8o/pdisk_1.dat 2024-11-21T17:54:56.731976Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64801, node 2 2024-11-21T17:54:56.739972Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.739984Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.739986Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.740019Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28444 TClient is connected to server localhost:28444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.815722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:56.815747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:56.816847Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:56.818004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.829157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.837295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:54:56.855146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.864613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.992882Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792585045663899:2374], DatabaseId: /Root ... 4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:58.224535Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.230797Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.240283Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.257129Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.268467Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.405417Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792596523633113:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.405459Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.409926Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.465923Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.520714Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.533194Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.539966Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.546461Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.613709Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792596523633644:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.613735Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.613803Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792596523633649:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.614405Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:58.615855Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792596523633651:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:54:58.796183Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15364, MsgBus: 1066 2024-11-21T17:54:59.015615Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792600854534729:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:59.015632Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f36/r3tmp/tmpwlK95D/pdisk_1.dat 2024-11-21T17:54:59.026095Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15364, node 5 2024-11-21T17:54:59.033354Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:59.033366Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:59.033369Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:59.033403Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1066 TClient is connected to server localhost:1066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.115624Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.115656Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.116730Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.117956Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.122850Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.129531Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.148159Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.156889Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.285809Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792600854536269:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.285845Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.289844Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.295954Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.302781Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.310179Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.316446Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.323981Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.332214Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792600854536773:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.332256Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.332262Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792600854536778:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.332853Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.337114Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792600854536780:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::TwoSimilarFamiliesTest [GOOD] Test command err: Trying to start YDB, gRPC: 22408, MsgBus: 7068 2024-11-21T17:54:55.098959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792580357552155:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f55/r3tmp/tmpMBrqwx/pdisk_1.dat 2024-11-21T17:54:55.365882Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.371160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.371184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.374373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22408, node 1 2024-11-21T17:54:55.402509Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402520Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766856Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7068 TClient is connected to server localhost:7068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.175978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.243508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.254455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.265744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.285247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584652520811:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.285270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.385333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.390725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.397696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.404763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.412895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584652521307:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.412913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584652521312:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.412917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.413428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.417846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792584652521314:2430], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.583809Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439792584652521614:3468], for# test_user@builtin, access# DescribeSchema 2024-11-21T17:54:56.583823Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439792584652521614:3468], for# test_user@builtin, access# DescribeSchema 2024-11-21T17:54:56.585113Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439792584652521611:2462], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/KeyValue]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:54:56.585176Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjdkZjJkZGQtZDFlMzFhOGUtOTBmODZmODgtNjE2ZDU2Zjg=, ActorId: [1:7439792584652521602:2457], ActorState: ExecuteState, TraceId: 01jd7xsry6fk1ty045c8x4cvmf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/KeyValue]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:54:56.587381Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439792584652521618:2465], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/NonExistent]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:54:56.587436Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjdkZjJkZGQtZDFlMzFhOGUtOTBmODZmODgtNjE2ZDU2Zjg=, ActorId: [1:7439792584652521602:2457], ActorState: ExecuteState, TraceId: 01jd7xsry9b4y7etdkftnh6pba, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/NonExistent]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Trying to start YDB, gRPC: 12625, MsgBus: 9481 2024-11-21T17:54:56.741497Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f55/r3tmp/tmpJ2cgmH/pdisk_1.dat 2024-11-21T17:54:56.761157Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12625, node 2 2024-11-21T17:54:56.771070Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.771082Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.771083Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.771117Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9481 TClient is connected to server localhost:9481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting. ... -> Disconnected 2024-11-21T17:54:58.434571Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:58.435650Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:58.437304Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.439505Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.450054Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:54:58.468326Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.478117Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.618649Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792596765225769:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.618675Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.623927Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.630749Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.637654Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.644859Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.651908Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.658722Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.729876Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792596765226281:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.729905Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.729912Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792596765226286:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.730533Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:58.732277Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792596765226288:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 20235, MsgBus: 10446 2024-11-21T17:54:59.038034Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792600846169960:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:59.038096Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f55/r3tmp/tmpI0Pg2z/pdisk_1.dat 2024-11-21T17:54:59.046884Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20235, node 5 2024-11-21T17:54:59.056884Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:59.056896Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:59.056898Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:59.056930Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10446 TClient is connected to server localhost:10446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.138549Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.138570Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.139712Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.140332Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.144001Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.156960Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.173173Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.185024Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.290898Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792600846171494:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.290925Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.295252Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.300944Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.310000Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.364557Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.372791Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.380015Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.388198Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792600846172010:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.388240Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792600846172015:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.388243Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.388700Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.393079Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792600846172017:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAndDropGroup [GOOD] Test command err: Trying to start YDB, gRPC: 28397, MsgBus: 30281 2024-11-21T17:54:55.098960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792583254663236:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ecb/r3tmp/tmpImmWJc/pdisk_1.dat 2024-11-21T17:54:55.363415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.363443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.367182Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.373284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28397, node 1 2024-11-21T17:54:55.402317Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402534Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.414776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:54:55.415285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439792583254663581:2285], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:54:55.766768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766775Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766777Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766840Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30281 TClient is connected to server localhost:30281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.171584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.182998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.198356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.207397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.215873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.235636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587549631887:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.235667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.342117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.348817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.363627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587549632380:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587549632385:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.376020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792587549632387:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.568800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.578335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25324, MsgBus: 12707 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ecb/r3tmp/tmpnQBhSN/pdisk_1.dat 2024-11-21T17:54:56.734820Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:54:56.735260Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25324, node 2 2024-11-21T17:54:56.746886Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.746899Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.746900Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.746934Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12707 TClient is connected to server localhost:12707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.826617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:56.826643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:56.827313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.827617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:56.830703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.840017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:54:56.857805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.868130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESc ... T17:54:58.219361Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:58.220529Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.223974Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.232810Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.249738Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.261993Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.423797Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792594629005928:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.423818Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.427291Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.481611Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.491205Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.497851Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.505036Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.512208Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.520943Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792594629006435:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.520970Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.520981Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792594629006440:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.521521Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:58.525093Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792594629006442:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 1180, MsgBus: 27548 2024-11-21T17:54:59.008101Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792601224977486:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:59.008170Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ecb/r3tmp/tmpJW2iSY/pdisk_1.dat 2024-11-21T17:54:59.017892Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1180, node 5 2024-11-21T17:54:59.027417Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:59.027432Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:59.027434Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:59.027466Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27548 TClient is connected to server localhost:27548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.108556Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.108581Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.109646Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.110827Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.111401Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:59.122441Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.128899Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.144366Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.157084Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.311052Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792601224979025:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.311073Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.315522Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.320939Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.330740Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.337911Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.345029Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.351761Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.360320Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792601224979528:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.360340Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.360392Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792601224979533:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.360904Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.365406Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792601224979535:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TPQTest::TestStatusWithMultipleConsumers [GOOD] >> KqpScheme::CreateTableWithUniformPartitionsUuid [GOOD] >> TPQTest::TestTabletRestoreEventsOrder >> KqpScheme::CreateAsyncReplicationWithSecret [GOOD] >> KqpScheme::CreateBackupCollectionDisabledByDefault >> KqpScheme::CreateExternalTableCheckPrimaryKey [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources >> KqpScheme::ModifyPermissions [GOOD] >> KqpConstraints::DefaultValuesForTableNegative3 [GOOD] >> KqpConstraints::DefaultValuesForTableNegative4 >> KqpScheme::DisableExternalDataSourcesOnServerless >> TPQTest::TestTabletRestoreEventsOrder [GOOD] >> KqpScheme::DropAsyncReplicationCascade [GOOD] >> KqpScheme::DoubleCreateResourcePool >> KqpOlapScheme::AddExsitsColumnFamily [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsUncompat [GOOD] Test command err: Trying to start YDB, gRPC: 26253, MsgBus: 12805 2024-11-21T17:54:55.098837Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792580916200335:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f2f/r3tmp/tmpIfcthp/pdisk_1.dat 2024-11-21T17:54:55.366139Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.367669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.367682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.371245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26253, node 1 2024-11-21T17:54:55.403476Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.403490Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766538Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766555Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766556Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766610Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12805 TClient is connected to server localhost:12805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.174535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.198174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.207121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.216268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.235209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585211168986:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.235232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.342002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.348698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.363372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585211169478:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585211169483:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.376081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792585211169485:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.590701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20616, MsgBus: 65499 2024-11-21T17:54:56.735647Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792586033820469:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:56.738871Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f2f/r3tmp/tmpbMdo24/pdisk_1.dat 2024-11-21T17:54:56.754561Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20616, node 2 2024-11-21T17:54:56.764849Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.764860Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.764861Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.764893Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65499 TClient is connected to server localhost:65499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.834753Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:56.834788Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:56.835843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:56.839659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.847210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.855142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.871766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.881915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.024723Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792590328789156:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:57.024754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: ... CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732211698980 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithPartitioningByLoad" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColu... (TRUNCATED) 2024-11-21T17:54:58.950627Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithPartitioningByLoad TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithPartitioningByLoad" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732211698980 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TableWithPartitioningByLoad" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColu... (TRUNCATED) Trying to start YDB, gRPC: 11969, MsgBus: 12494 2024-11-21T17:54:59.218182Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792598050876060:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f2f/r3tmp/tmp99X6j0/pdisk_1.dat 2024-11-21T17:54:59.222486Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:54:59.226855Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11969, node 5 2024-11-21T17:54:59.235933Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:59.235946Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:59.235947Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:59.235981Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12494 TClient is connected to server localhost:12494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.317929Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.317958Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.319032Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.319643Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.323365Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.331660Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.348673Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.359697Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.494165Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792598050877456:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.494195Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.498032Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.503640Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.513188Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.520586Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.526822Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.533994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.542675Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792598050877960:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.542704Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.542747Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792598050877965:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.543328Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.546996Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792598050877967:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:54:59.712779Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithMinMaxPartitions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithMinMaxPartitions" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732211699764 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TableWithMinMaxPartitions" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNa... (TRUNCATED) 2024-11-21T17:54:59.724400Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithMinMaxPartitions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithMinMaxPartitions" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732211699764 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithMinMaxPartitions" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNa... (TRUNCATED) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DisableCreateExternalTable [GOOD] Test command err: Trying to start YDB, gRPC: 6473, MsgBus: 23353 2024-11-21T17:54:55.098949Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792580757474303:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000edf/r3tmp/tmpXSKzCl/pdisk_1.dat 2024-11-21T17:54:55.365798Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6473, node 1 2024-11-21T17:54:55.402459Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402478Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.454212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.454233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.455192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:55.766550Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766567Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766568Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766624Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23353 TClient is connected to server localhost:23353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.174351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.244817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.257510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.266224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.285682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585052442960:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.285703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.341665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.349283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.362741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585052443451:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585052443456:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.375898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792585052443458:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 24961, MsgBus: 21003 2024-11-21T17:54:56.739245Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792588265538206:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:56.741117Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000edf/r3tmp/tmpVGRt3U/pdisk_1.dat 2024-11-21T17:54:56.753371Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24961, node 2 2024-11-21T17:54:56.760385Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.760401Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.760404Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.760440Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21003 TClient is connected to server localhost:21003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.838583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:56.838614Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:56.839641Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:56.840869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.843299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.855593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.874463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.883232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.036776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792592560506886:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:57.036809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:57.041490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itse ... -> Disconnected 2024-11-21T17:54:58.437606Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:58.438638Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:58.447115Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.449563Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.464341Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.483991Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.493742Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.631227Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792595262333154:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.631253Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.635012Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.640904Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.652339Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.659019Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.665676Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.672994Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.681660Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792595262333667:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.681697Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792595262333672:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.681698Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.682184Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:58.686183Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792595262333674:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 27264, MsgBus: 13564 2024-11-21T17:54:59.243101Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792599249693089:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:59.243297Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000edf/r3tmp/tmpkWafV4/pdisk_1.dat 2024-11-21T17:54:59.257427Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27264, node 5 2024-11-21T17:54:59.265811Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:59.265826Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:59.265828Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:59.265863Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13564 TClient is connected to server localhost:13564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.343294Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.343320Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.344473Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.346119Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.353677Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.361226Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.378333Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.387841Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.512036Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792599249694626:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.512083Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.516604Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.521945Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.534467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.540844Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.548111Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.555083Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.563134Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792599249695131:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.563158Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792599249695136:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.563158Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.563684Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.568331Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792599249695138:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpScheme::ResourcePoolsValidation [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTableWithUniformPartitionsUuid [GOOD] >> KqpScheme::ResourcePoolClassifiersValidation Test command err: Trying to start YDB, gRPC: 19502, MsgBus: 2275 2024-11-21T17:54:55.098924Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792583518510158:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f62/r3tmp/tmp9oqIoW/pdisk_1.dat 2024-11-21T17:54:55.362760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.362790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.373591Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.392454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19502, node 1 2024-11-21T17:54:55.403530Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.403543Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766498Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766516Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766517Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766552Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2275 TClient is connected to server localhost:2275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.171100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.182362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.245125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.256335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.265107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.285803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587813478810:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.285827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.342113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.348817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.363078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587813479303:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587813479308:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.376111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792587813479310:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.591160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithTtlSettings TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithTtlSettings" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710676 CreateStep: 1732211696642 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TableWithTtlSettings" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2024-11-21T17:54:56.600056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithTtlSettings TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithTtlSettings" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710676 CreateStep: 1732211696642 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithTtlSettings" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2024-11-21T17:54:56.629992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithTtlSettings TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithTtlSettings" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710676 CreateStep: 1732211696642 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TableWithTtlSettings" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2024-11-21T17:54:56.637540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.645947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2843, MsgBus: 19440 2024-11-21T17:54:56.916955Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792588342403267:2052];se ... : 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.621676Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.631967Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.648913Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.660070Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.770611Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792594509598996:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.770643Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.775248Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.780770Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.792028Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.799280Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.806231Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.813717Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.881598Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792594509599510:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.881624Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792594509599515:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.881625Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:58.882185Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:58.883455Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792594509599517:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:54:59.048402Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13002, MsgBus: 9886 2024-11-21T17:54:59.197427Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792597451968916:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:59.197627Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f62/r3tmp/tmpuIXKRB/pdisk_1.dat 2024-11-21T17:54:59.206966Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13002, node 5 2024-11-21T17:54:59.214343Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:59.214360Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:59.214361Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:59.214399Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9886 TClient is connected to server localhost:9886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.297948Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.297985Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.299064Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.299643Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.310960Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.319049Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.333710Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.345848Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.465088Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792597451970446:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.465106Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.468785Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.473886Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.484829Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.491797Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.498974Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.505965Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.514558Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792597451970950:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.514582Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.514584Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792597451970955:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.515058Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.519654Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792597451970957:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:54:59.674987Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> test.py::test[select-bin_ops_long_concat-default.txt-Debug] [GOOD] >> test.py::test[select-bin_ops_long_concat-default.txt-Plan] [GOOD] >> test.py::test[select-bin_ops_long_concat-default.txt-Results] >> KqpScheme::CreateAlterDropColumnTableInStore [GOOD] >> KqpConstraints::AddColumnWithDefaultForbidden [GOOD] >> test.py::test[optimizers-instant_contains_lookup-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-instant_contains_lookup-default.txt-Plan] [GOOD] >> test.py::test[optimizers-instant_contains_lookup-default.txt-Results] >> KqpScheme::AlterUser [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateExternalTableCheckPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 10274, MsgBus: 7292 2024-11-21T17:54:55.098926Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792580919237254:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f61/r3tmp/tmpKaJclw/pdisk_1.dat 2024-11-21T17:54:55.365238Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.365328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.365343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.371864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10274, node 1 2024-11-21T17:54:55.403369Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.403381Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766424Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766440Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766441Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766478Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7292 TClient is connected to server localhost:7292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.175756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.198169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.207338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.215822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.235276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585214205909:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.235298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.341755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.348904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.370301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.377999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585214206409:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.378016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.378021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585214206414:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.378424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.382934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792585214206416:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.571736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2024-11-21T17:54:56.621878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.663145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:1, at schemeshard: 72057594046644480 2024-11-21T17:54:56.707648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.778386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.827111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.880103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T17:54:56.889197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T17:54:57.108055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710703:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8416, MsgBus: 2538 2024-11-21T17:54:57.322006Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792590921544800:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:57.322287Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f61/r3tmp/tmpAFjwxU/pdisk_1.dat 2024-11-21T17:54:57.331963Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8416, node 2 2024-11-21T17:54:57.340910Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:57.340923Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:57.340925Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:57.340962Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2538 TClient is connected to server localhost:2538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:57.424500Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:57.424530Z node 2 :HIVE WARN: HIVE ... schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.823844Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.834434Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.849935Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:58.863188Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.014873Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792601043825090:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.014927Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.018726Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.024206Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.029612Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.037334Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.044124Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.051219Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.101652Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792601043825601:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.101676Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.101680Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792601043825606:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.102209Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.103696Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792601043825608:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:54:59.240498Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.241826Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10765, MsgBus: 10382 2024-11-21T17:54:59.413084Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792599504315543:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:59.413109Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f61/r3tmp/tmpVqBtIX/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10765, node 5 2024-11-21T17:54:59.428379Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:59.429130Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:59.429140Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:59.429142Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:59.429166Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10382 TClient is connected to server localhost:10382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.513266Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.513302Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.514305Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.515518Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.517246Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.528019Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.547114Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.556316Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.662899Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792599504317082:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.662935Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.668167Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.674929Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.681217Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.688062Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.695116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.702209Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.709799Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792599504317575:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.709818Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.709830Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792599504317580:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.710304Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.715228Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792599504317582:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ModifyPermissions [GOOD] Test command err: Trying to start YDB, gRPC: 7753, MsgBus: 24085 2024-11-21T17:54:55.098920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792583867880794:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f3b/r3tmp/tmpHv6Nb0/pdisk_1.dat 2024-11-21T17:54:55.374969Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7753, node 1 2024-11-21T17:54:55.402802Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402818Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.440800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.440830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.441865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:55.766451Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766466Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766506Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24085 TClient is connected to server localhost:24085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.175209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.246006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.255678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.265201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.285791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792588162849451:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.285811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.341831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.349062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.362745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792588162849941:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792588162849946:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.376254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792588162849948:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.583029Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2024-11-21T17:54:56.583240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3112, MsgBus: 28954 2024-11-21T17:54:56.710442Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792585408377092:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:56.710662Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f3b/r3tmp/tmpgtF7Ax/pdisk_1.dat 2024-11-21T17:54:56.720201Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3112, node 2 2024-11-21T17:54:56.732392Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.732405Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.732407Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.732434Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28954 TClient is connected to server localhost:28954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.810810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:56.810833Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:56.811894Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:56.812573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.817578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.826260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.841959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.853805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.988594Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792585408378621:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have acce ... PoolCreatorActor] ActorId: [4:7439792593203176674:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:54:58.911550Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.911732Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037911 not found Trying to start YDB, gRPC: 15159, MsgBus: 16758 2024-11-21T17:54:59.226180Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792599196681615:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:59.226215Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f3b/r3tmp/tmpuqsK7m/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15159, node 5 2024-11-21T17:54:59.240997Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:59.243111Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:59.243121Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:59.243123Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:59.243152Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16758 TClient is connected to server localhost:16758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.326301Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.326333Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.327412Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.329040Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.329702Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:59.338375Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.345581Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.363363Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.373970Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.487893Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792599196683158:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.487926Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.492871Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.498400Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.505926Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.513151Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.520013Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.526929Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.535710Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792599196683660:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.535751Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.535754Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792599196683665:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.536328Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.539914Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792599196683667:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:54:59.713989Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.722986Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.780013Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.785682Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.791381Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.791476Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.795890Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.800192Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.805271Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.810209Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.815254Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.820039Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.824568Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.830496Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.837098Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.842764Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.847263Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.847379Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsCompat [GOOD] >> KqpScheme::CreateAndAlterTableWithBloomFilterUncompat ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::AddExsitsColumnFamily [GOOD] Test command err: Trying to start YDB, gRPC: 9285, MsgBus: 5620 2024-11-21T17:54:55.098936Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792583179806059:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f41/r3tmp/tmpjEvFer/pdisk_1.dat 2024-11-21T17:54:55.364769Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9285, node 1 2024-11-21T17:54:55.402973Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402987Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.414007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T17:54:55.414723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439792583179806396:2285], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:54:55.444908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.444938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.445960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:55.766708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766723Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766725Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766770Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5620 TClient is connected to server localhost:5620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.174967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:54:56.198061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587474773777:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.198080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.326485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.333340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792587474773853:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:54:56.333385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792587474773853:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:54:56.333416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792587474773853:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:56.333432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792587474773853:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:56.333443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792587474773853:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:56.333463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792587474773853:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:56.333487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792587474773853:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:56.333504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792587474773853:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:54:56.333522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792587474773853:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:54:56.333543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792587474773853:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:54:56.333564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792587474773853:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:54:56.333585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792587474773853:2304];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:54:56.334007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:54:56.334021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:54:56.334032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:54:56.334037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:54:56.334051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:54:56.334060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:54:56.334069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:54:56.334078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:54:56.334087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:54:56.334095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:54:56.334101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:54:56.334104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:54:56.334159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:54:56.334178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:54:56.334199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:54:56.334207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:54:56.334219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:54:56.334227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:54:56.334243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888 ... .. TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.957665Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.957696Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.958841Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.959444Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/TableWithFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family1, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="lz4"), FAMILY family2 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:55:00.117097Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792603571284049:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.117119Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.119213Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.126411Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792603571284095:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:55:00.126440Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792603571284095:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:55:00.126514Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792603571284095:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:55:00.126544Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792603571284095:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:55:00.126570Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792603571284095:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:55:00.126611Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792603571284095:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:55:00.126639Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792603571284095:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:55:00.126661Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792603571284095:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:55:00.126695Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792603571284095:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:55:00.126718Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792603571284095:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:55:00.126745Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792603571284095:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:55:00.126771Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[6:7439792603571284095:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:55:00.127353Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:55:00.127368Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:55:00.127381Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:55:00.127385Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:55:00.127410Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:55:00.127420Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:55:00.127431Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:55:00.127440Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:55:00.127450Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:55:00.127459Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:55:00.127465Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:55:00.127473Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:55:00.127526Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:55:00.127537Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:55:00.127556Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:55:00.127571Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:55:00.127585Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:55:00.127589Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:55:00.127607Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:55:00.127617Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:55:00.127629Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:55:00.127633Z node 6 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:55:00.175761Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792603571284175:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.175793Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.177319Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792603571284180:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.177337Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T17:53:40.008075Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:40.008096Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] 2024-11-21T17:53:40.010554Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:53:40.010571Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2024-11-21T17:53:40.010585Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T17:53:40.012079Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:40.012762Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2024-11-21T17:53:40.012778Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:40.013172Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2024-11-21T17:53:40.013191Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 0. Step TInitConfigStep 2024-11-21T17:53:40.013197Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 1. Step TInitConfigStep 2024-11-21T17:53:40.013200Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 2. Step TInitConfigStep 2024-11-21T17:53:40.013203Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 3. Step TInitConfigStep 2024-11-21T17:53:40.013270Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 0. Step TInitInternalFieldsStep 2024-11-21T17:53:40.013320Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T17:53:40.013708Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 0. Completed. 2024-11-21T17:53:40.013714Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T17:53:40.013719Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:40.014070Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2024-11-21T17:53:40.014075Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2024-11-21T17:53:40.014078Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2024-11-21T17:53:40.014080Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2024-11-21T17:53:40.014107Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:53:40.014110Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:53:40.014160Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 1. Step TInitInternalFieldsStep 2024-11-21T17:53:40.014178Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T17:53:40.014441Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 1. Completed. 2024-11-21T17:53:40.014446Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] 2024-11-21T17:53:40.014450Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:40.014685Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2024-11-21T17:53:40.014689Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2024-11-21T17:53:40.014691Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit request with generation 1 2024-11-21T17:53:40.014693Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit with generation 1 done 2024-11-21T17:53:40.014705Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:53:40.014707Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:40.014727Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:53:40.014771Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 2. Step TInitInternalFieldsStep 2024-11-21T17:53:40.014822Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [1:186:2199] 2024-11-21T17:53:40.015061Z node 1 :PERSQUEUE DEBUG: Initializing topic 'rt3.dc1--asdfgs--topic' partition 2. Completed. 2024-11-21T17:53:40.015064Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:186:2199] 2024-11-21T17:53:40.015067Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2024-11-21T17:53:40.015270Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user user reinit request with generation 1 2024-11-21T17:53:40.015273Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user user reinit with generation 1 done 2024-11-21T17:53:40.015275Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user test reinit request with generation 1 2024-11-21T17:53:40.015277Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user test reinit with generation 1 done 2024-11-21T17:53:40.015285Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:53:40.015288Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NP ... akeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.919834Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:650:2644] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.920178Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:655:2649] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.920563Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:660:2654] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.920974Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:665:2659] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.921336Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:670:2664] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.921675Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:675:2669] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.922034Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:680:2674] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.922387Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:685:2679] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.922750Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:690:2684] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.923066Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:695:2689] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.923416Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:700:2694] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.923769Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:705:2699] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.924107Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:710:2704] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.924484Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:715:2709] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.924824Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:720:2714] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.925179Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:725:2719] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.925520Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:730:2724] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.925908Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:735:2729] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.926402Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:740:2734] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.926865Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:745:2739] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.927300Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:750:2744] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:54:59.927760Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:54:59.927777Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [116:755:2749], now have 1 active actors on pipe 2024-11-21T17:54:59.927796Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:54:59.927962Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:54:59.927976Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [116:758:2752], now have 1 active actors on pipe 2024-11-21T17:54:59.927992Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:54:59.928095Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:54:59.928101Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [116:761:2755], now have 1 active actors on pipe 2024-11-21T17:54:59.928113Z node 116 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:54:59.928212Z node 116 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [116:764:2758] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to ACTORLIB_LONG_TIMER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:55:00.111122Z node 117 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:55:00.111148Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:55:00.119858Z node 117 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:55:00.119878Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:55:00.120679Z node 117 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:55:00.120867Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 227 actor [117:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 227 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 227 ReadRuleGenerations: 227 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 227 Important: false } Consumers { Name: "aaa" Generation: 227 Important: true } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:55:00.121004Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [117:246:2246] 2024-11-21T17:55:00.121148Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 3 [117:246:2246] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:55:00.121325Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [117:244:2244] 2024-11-21T17:55:00.121421Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [117:244:2244] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:55:00.127396Z node 117 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:55:00.127414Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:55:00.127541Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [117:325:2308] 2024-11-21T17:55:00.127731Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [117:327:2310] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:55:00.128441Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 4 [117:325:2308] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:55:00.128502Z node 117 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 4 [117:327:2310] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR >> KqpScheme::AlterTableAddImplicitSyncIndex [GOOD] >> KqpScheme::AlterTableAddExplicitSyncVectorKMeansTreeIndex >> KqpScheme::CreateBackupCollectionDisabledByDefault [GOOD] >> KqpScheme::CreateBackupCollection >> KqpScheme::TouchIndexAfterMoveIndexWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterUser [GOOD] Test command err: Trying to start YDB, gRPC: 3641, MsgBus: 9485 2024-11-21T17:54:55.099016Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792582448569062:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.099042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f0f/r3tmp/tmpCTtgd3/pdisk_1.dat 2024-11-21T17:54:55.375008Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3641, node 1 2024-11-21T17:54:55.402924Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402935Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.452641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.452669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.453677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:55.766356Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766374Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766375Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766436Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9485 TClient is connected to server localhost:9485 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.171770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.183397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.244524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.256784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.265285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.284992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792586743537719:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.285013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.330733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.342691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.348815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.362831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792586743538210:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792586743538215:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.376358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792586743538217:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.572535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.578667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.587241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17079, MsgBus: 24121 2024-11-21T17:54:56.940893Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792587268936182:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:56.940913Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f0f/r3tmp/tmpzgpBw0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17079, node 2 2024-11-21T17:54:56.957492Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:56.957783Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.957795Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.957797Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.957828Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24121 TClient is connected to server localhost:24121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:57.041345Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:57.041378Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:57.042458Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:57.043088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.046295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.057628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.071948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.084895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting ... 6644480 2024-11-21T17:54:59.016018Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.023300Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.029822Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.089602Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792597464362882:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.089638Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.089723Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792597464362887:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.090214Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.091597Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792597464362889:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:54:59.270036Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.278890Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.291967Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.299628Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.305532Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.312745Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.319741Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.325867Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.333747Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.340344Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.346981Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.354486Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6174, MsgBus: 24609 2024-11-21T17:54:59.627396Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792599628193916:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:59.627642Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f0f/r3tmp/tmpV3EgqB/pdisk_1.dat 2024-11-21T17:54:59.639555Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6174, node 5 2024-11-21T17:54:59.650136Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:59.650156Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:59.650158Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:59.650206Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24609 TClient is connected to server localhost:24609 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.727776Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.727798Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.728870Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.730509Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.736719Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.744651Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.763667Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.773355Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.905012Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792599628195454:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.905037Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.909513Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.964377Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.019136Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.030912Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.037946Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.045358Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.053577Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792603923163270:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.053598Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792603923163275:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.053607Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.054132Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:00.058457Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792603923163277:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::AddColumnWithDefaultForbidden [GOOD] Test command err: Trying to start YDB, gRPC: 4525, MsgBus: 6703 2024-11-21T17:54:55.098893Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792581391154477:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f68/r3tmp/tmp4bFXKI/pdisk_1.dat 2024-11-21T17:54:55.375204Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.376454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.376477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.379220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4525, node 1 2024-11-21T17:54:55.402509Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402519Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766526Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766538Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766540Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766578Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6703 TClient is connected to server localhost:6703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.176340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.244727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.256171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.265679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.286043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585686123135:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.286063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.379100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.390770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.397302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.404633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.411746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.419916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585686123630:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.419932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.419979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585686123635:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.420558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.425107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792585686123637:2430], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.630764Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439792585686123958:3489], for# user0@builtin, access# DescribeSchema 2024-11-21T17:54:56.630776Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439792585686123958:3489], for# user0@builtin, access# DescribeSchema 2024-11-21T17:54:56.631789Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439792585686123955:2462], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/TwoShard]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:54:56.631867Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTUyMDNmOGMtNTQ3MDZhMS1lN2FkYzYxYy1jNmVjNjU2Ng==, ActorId: [1:7439792585686123946:2457], ActorState: ExecuteState, TraceId: 01jd7xsrzn2etdgx6qhgagzs49, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 21508, MsgBus: 4028 2024-11-21T17:54:56.935075Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792584344752525:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:56.935238Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f68/r3tmp/tmp5CR3hs/pdisk_1.dat 2024-11-21T17:54:56.945804Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21508, node 2 2024-11-21T17:54:56.953856Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.953873Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.953875Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.953909Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4028 TClient is connected to server localhost:4028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:57.035241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:57.035274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:57.036420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:57.037553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T17:54:57.043418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:57.098224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.115162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part ... hemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.190254Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.199092Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.398969Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792601033235411:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.399010Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.402892Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.408658Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.422301Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.428807Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.436029Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.443090Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.451434Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792601033235903:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.451467Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.451474Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792601033235908:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.451977Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.456469Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792601033235910:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:54:59.596592Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.601982Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.614793Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:3, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4219, MsgBus: 62811 2024-11-21T17:54:59.766506Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439792597621603079:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:59.766802Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f68/r3tmp/tmpucfENP/pdisk_1.dat 2024-11-21T17:54:59.779401Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4219, node 6 2024-11-21T17:54:59.788119Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:59.788130Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:59.788132Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:59.788163Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62811 TClient is connected to server localhost:62811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.867116Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.867152Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.868267Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.869476Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.875359Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.884923Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.901907Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.913491Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.029310Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792601916571907:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.029328Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.031329Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.036432Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.044747Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.051644Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.058908Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.066147Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.074187Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792601916572407:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.074213Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.074270Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792601916572412:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.074847Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:00.079307Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439792601916572414:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:55:00.271685Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAlterDropColumnTableInStore [GOOD] Test command err: Trying to start YDB, gRPC: 22409, MsgBus: 12725 2024-11-21T17:54:55.098929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792581221609338:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000eeb/r3tmp/tmpfGGQPB/pdisk_1.dat 2024-11-21T17:54:55.362727Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22409, node 1 2024-11-21T17:54:55.403171Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.403189Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.452910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.452932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.453987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:55.766401Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766421Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766422Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12725 TClient is connected to server localhost:12725 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.173415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.244646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.255439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.265289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.285033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585516577993:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.285052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.333239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.341491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.349202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.410693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.420019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585516578488:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.420039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792585516578494:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.420043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.420522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.425217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792585516578496:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.621981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.819532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23692, MsgBus: 9988 2024-11-21T17:54:57.125219Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792588680376006:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:57.125239Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000eeb/r3tmp/tmpQlvC7n/pdisk_1.dat 2024-11-21T17:54:57.136014Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23692, node 2 2024-11-21T17:54:57.143487Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:57.143513Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:57.143515Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:57.143550Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9988 TClient is connected to server localhost:9988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:57.225507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:57.225536Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:57.226578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:57.227709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.231076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.238814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.254067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.267161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.379436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792588680377534:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND ... 86224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:55:00.153456Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:55:00.153461Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:55:00.153470Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:55:00.153475Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:55:00.153507Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:55:00.153513Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:55:00.153515Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:55:00.153518Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:55:00.153526Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:55:00.153537Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:55:00.153537Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:55:00.153540Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:55:00.153551Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:55:00.153553Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:55:00.153557Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:55:00.153559Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:55:00.153566Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:55:00.153570Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:55:00.153574Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:55:00.153578Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:55:00.153579Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:55:00.153584Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:55:00.153590Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:55:00.153593Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:55:00.153594Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:55:00.153596Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:55:00.153621Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:55:00.153632Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:55:00.153647Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:55:00.153657Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:55:00.153668Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:55:00.153677Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:55:00.153692Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:55:00.153702Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:55:00.153713Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:55:00.153723Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:55:00.183153Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792604235247163:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.183172Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.185597Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.197660Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792604235247308:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.197678Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.199047Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.207685Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792604235247388:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.207715Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.209131Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropColumnStore, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.213850Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037890 not found 2024-11-21T17:55:00.213865Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037895 not found 2024-11-21T17:55:00.213867Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037892 not found 2024-11-21T17:55:00.213869Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037893 not found 2024-11-21T17:55:00.213871Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037894 not found 2024-11-21T17:55:00.213874Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037896 not found 2024-11-21T17:55:00.214251Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037891 not found 2024-11-21T17:55:00.214262Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037888 not found 2024-11-21T17:55:00.214264Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037897 not found 2024-11-21T17:55:00.216041Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037889 not found >> KqpScheme::AlterResourcePoolClassifier [GOOD] >> KqpScheme::AlterNonExistingResourcePoolClassifier >> KqpConstraints::DefaultValuesForTableNegative4 [GOOD] >> KqpOlapTypes::Decimal >> test.py::test[select-bin_ops_long_concat-default.txt-Results] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-Debug] >> KqpScheme::DoubleCreateResourcePool [GOOD] >> KqpScheme::DoubleCreateResourcePoolClassifier >> test.py::test[optimizers-instant_contains_lookup-default.txt-Results] [GOOD] >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--Analyze] [SKIPPED] >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--Debug] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::DefaultValuesForTableNegative4 [GOOD] Test command err: Trying to start YDB, gRPC: 6239, MsgBus: 13437 2024-11-21T17:54:55.098778Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792580560111276:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ef9/r3tmp/tmpRpm1MW/pdisk_1.dat 2024-11-21T17:54:55.359651Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.370205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.370237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.375255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6239, node 1 2024-11-21T17:54:55.402435Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402452Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766495Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766508Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766510Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766554Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13437 TClient is connected to server localhost:13437 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.174194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.246353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.259779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.272834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.335990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584855079940:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.336012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.369584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.374454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.383529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.390620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.397789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.404808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.413103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584855080432:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.413123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.413162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584855080437:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.413647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.418434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792584855080439:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.612025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.643422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976710675:1, at schemeshard: 72057594046644480 2024-11-21T17:54:56.649268Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2024-11-21T17:54:56.651796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3585, MsgBus: 24604 2024-11-21T17:54:56.925348Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792585671048100:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:56.925366Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ef9/r3tmp/tmpPBrTJd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3585, node 2 2024-11-21T17:54:56.941193Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:56.941321Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.941334Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.941336Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.941369Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24604 TClient is connected to server localhost:24604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:57.025485Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:57.025511Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:57.026561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:57.027740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.028630Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:54:57.039241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.049957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.066958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpC ... -> Disconnected 2024-11-21T17:54:59.506674Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.507741Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:59.509443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.512941Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.520624Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.536885Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.549162Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.660354Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792600233245206:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.660387Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.665187Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.671727Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.681099Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.687733Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.694978Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.702454Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.709731Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792600233245706:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.709757Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792600233245711:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.709758Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:59.710247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:59.715232Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792600233245713:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 25882, MsgBus: 28589 2024-11-21T17:55:00.093740Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439792601648263658:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:00.093900Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ef9/r3tmp/tmpR6b8St/pdisk_1.dat 2024-11-21T17:55:00.104361Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25882, node 6 2024-11-21T17:55:00.111231Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:00.111247Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:00.111249Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:00.111284Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28589 TClient is connected to server localhost:28589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:00.194229Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:00.194252Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:00.195320Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:00.195962Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.198303Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.206673Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.225567Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.235168Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.368162Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792601648265190:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.368193Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.373784Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.380627Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.387542Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.394862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.402169Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.409322Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.417583Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792601648265693:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.417609Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.417670Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792601648265698:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.418256Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:00.422631Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439792601648265700:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpScheme::ResourcePoolClassifiersValidation [GOOD] >> KqpScheme::ResourcePoolClassifiersRankValidation >> KqpScheme::CreateAndAlterTableWithBloomFilterUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithBloomFilterCompat >> KqpScheme::CreateBackupCollection [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexWrite [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexReadReplace ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::MultipleSplitsWithRestartsWhenWait Test command err: Trying to start YDB, gRPC: 14758, MsgBus: 64238 2024-11-21T17:46:26.207452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439790394792794716:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:46:26.207573Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ddb/r3tmp/tmp9OzL5b/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14758, node 1 2024-11-21T17:46:26.264466Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:46:26.268199Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:46:26.268211Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:46:26.268213Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:46:26.268262Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64238 TClient is connected to server localhost:64238 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:46:26.308361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:46:26.308390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:46:26.309454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:46:26.335400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:46:26.339552Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:46:26.469430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:46:27.112160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790399087764389:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:27.112191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790399087764389:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:27.112221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790399087764389:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:27.112259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790399087764389:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:27.112280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790399087764389:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:27.112348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790399087764389:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:27.112370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790399087764389:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:27.112390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790399087764389:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:27.112413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790399087764389:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:27.112438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790399087764389:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.112454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790399087764389:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:27.112475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439790399087764389:2296];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:27.113494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790399087764402:2298];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:27.113525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790399087764402:2298];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:27.113596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790399087764402:2298];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:27.113620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790399087764402:2298];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:27.113637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790399087764402:2298];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:27.113671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790399087764402:2298];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:27.113699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790399087764402:2298];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:27.113753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790399087764402:2298];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:46:27.113773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790399087764402:2298];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:46:27.113812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790399087764402:2298];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:46:27.113834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790399087764402:2298];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:46:27.113856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038399;self_id=[1:7439790399087764402:2298];tablet_id=72075186224038399;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:46:27.115591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790399087764390:2297];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:46:27.115629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790399087764390:2297];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:46:27.115686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790399087764390:2297];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:46:27.115720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790399087764390:2297];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:46:27.115751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790399087764390:2297];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:46:27.115771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790399087764390:2297];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:46:27.115791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790399087764390:2297];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:46:27.115806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7439790399087764390:2297];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;de ... pe: ESchemeOpAlterColumnTable, opId: 281474976736265:0, at schemeshard: 72057594046644480 2024-11-21T17:54:44.402547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736266:0, at schemeshard: 72057594046644480 2024-11-21T17:54:44.411429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736267:0, at schemeshard: 72057594046644480 2024-11-21T17:54:44.418994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736268:0, at schemeshard: 72057594046644480 2024-11-21T17:54:45.744664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736269:0, at schemeshard: 72057594046644480 2024-11-21T17:54:45.755430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736270:0, at schemeshard: 72057594046644480 2024-11-21T17:54:45.770638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736271:0, at schemeshard: 72057594046644480 2024-11-21T17:54:46.399285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736272:0, at schemeshard: 72057594046644480 2024-11-21T17:54:46.407609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736273:0, at schemeshard: 72057594046644480 2024-11-21T17:54:46.422430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736274:0, at schemeshard: 72057594046644480 2024-11-21T17:54:47.773526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736275:0, at schemeshard: 72057594046644480 2024-11-21T17:54:47.787005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736276:0, at schemeshard: 72057594046644480 2024-11-21T17:54:47.800207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736277:0, at schemeshard: 72057594046644480 2024-11-21T17:54:48.152354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736278:0, at schemeshard: 72057594046644480 2024-11-21T17:54:48.164721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736279:0, at schemeshard: 72057594046644480 2024-11-21T17:54:48.177840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736280:0, at schemeshard: 72057594046644480 2024-11-21T17:54:48.598154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736281:0, at schemeshard: 72057594046644480 2024-11-21T17:54:48.606925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736282:0, at schemeshard: 72057594046644480 2024-11-21T17:54:48.619278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736283:0, at schemeshard: 72057594046644480 2024-11-21T17:54:50.598471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736284:0, at schemeshard: 72057594046644480 2024-11-21T17:54:50.608167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736285:0, at schemeshard: 72057594046644480 2024-11-21T17:54:50.620144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736286:0, at schemeshard: 72057594046644480 2024-11-21T17:54:51.941820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736287:0, at schemeshard: 72057594046644480 2024-11-21T17:54:51.952072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736288:0, at schemeshard: 72057594046644480 2024-11-21T17:54:51.960835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736289:0, at schemeshard: 72057594046644480 2024-11-21T17:54:52.739261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736290:0, at schemeshard: 72057594046644480 2024-11-21T17:54:52.748455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736291:0, at schemeshard: 72057594046644480 2024-11-21T17:54:52.763430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736292:0, at schemeshard: 72057594046644480 2024-11-21T17:54:53.657923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736293:0, at schemeshard: 72057594046644480 2024-11-21T17:54:53.666669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736294:0, at schemeshard: 72057594046644480 2024-11-21T17:54:53.679344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736295:0, at schemeshard: 72057594046644480 2024-11-21T17:54:54.602378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736296:0, at schemeshard: 72057594046644480 2024-11-21T17:54:54.614808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736297:0, at schemeshard: 72057594046644480 2024-11-21T17:54:54.625719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736298:0, at schemeshard: 72057594046644480 2024-11-21T17:54:55.265564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736299:0, at schemeshard: 72057594046644480 2024-11-21T17:54:55.280071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736300:0, at schemeshard: 72057594046644480 2024-11-21T17:54:55.290733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736301:0, at schemeshard: 72057594046644480 2024-11-21T17:54:55.659607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736302:0, at schemeshard: 72057594046644480 2024-11-21T17:54:55.668601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736303:0, at schemeshard: 72057594046644480 2024-11-21T17:54:55.684529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736304:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.750851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736305:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.766489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736306:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.781521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736307:0, at schemeshard: 72057594046644480 2024-11-21T17:54:57.973033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736308:0, at schemeshard: 72057594046644480 2024-11-21T17:54:57.987725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736309:0, at schemeshard: 72057594046644480 2024-11-21T17:54:57.999952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736310:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.381638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736311:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.395041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736312:0, at schemeshard: 72057594046644480 2024-11-21T17:54:58.409802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976736313:0, at schemeshard: 72057594046644480 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1266B1AA 1. /-S/util/system/yassert.cpp:55: Panic @ 0x12662916 2. /-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x13680663 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:294: WaitResharding @ 0x12462CE6 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:644: Execute_ @ 0x1246680E 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x1246A9F6 6. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x127C571D 7. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x1246A3B9 8. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x127C5E92 9. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x127D90AC 10. ??:0: ?? @ 0x7F1B5D171D8F 11. ??:0: ?? @ 0x7F1B5D171E3F 12. ??:0: ?? @ 0x1179D028 |85.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/pytest >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--Debug] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateBackupCollection [GOOD] Test command err: Trying to start YDB, gRPC: 1432, MsgBus: 10444 2024-11-21T17:54:55.098913Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792582390341333:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f73/r3tmp/tmpmZ7WYS/pdisk_1.dat 2024-11-21T17:54:55.367225Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1432, node 1 2024-11-21T17:54:55.403273Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.403284Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.442414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.442440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.443539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:55.766473Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766491Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766542Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10444 TClient is connected to server localhost:10444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.175791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.198067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792586685309060:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.198091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.338726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439792586685309181:2308];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:54:56.338755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439792586685309181:2308];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:54:56.338777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439792586685309181:2308];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:56.338788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439792586685309181:2308];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:56.338805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439792586685309181:2308];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:56.338817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439792586685309181:2308];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:56.338827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439792586685309181:2308];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:56.338840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439792586685309181:2308];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:54:56.338856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439792586685309181:2308];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:54:56.338867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439792586685309181:2308];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:54:56.338881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439792586685309181:2308];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:54:56.338895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439792586685309181:2308];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:54:56.338948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439792586685309178:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:54:56.338987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439792586685309178:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:54:56.339023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439792586685309178:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:56.339043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439792586685309178:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:56.339057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439792586685309178:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:56.339076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439792586685309178:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:56.339094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439792586685309178:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:56.339112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439792586685309178:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:54:56.339132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439792586685309178:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:54:56.339154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439792586685309178:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:54:56.339174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439792586685309178:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:54:56.339194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439792586685309178:2305];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:54:56.339585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:54:56.339598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:54:56.339607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:54:56.339615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:54:56.339627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:54:56.339635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:54:56.339642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:54:56.339651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstrac ... chemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.225503Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.235291Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.251009Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.263068Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.385105Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792603174008990:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.385128Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.389492Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.395055Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.402176Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.408928Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.416051Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.423487Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.431076Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792603174009493:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.431109Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.431123Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792603174009498:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.431713Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:00.436465Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792603174009500:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 24497, MsgBus: 23674 2024-11-21T17:55:00.802431Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792601585528201:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:00.802614Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f73/r3tmp/tmpUTjTKP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24497, node 5 2024-11-21T17:55:00.817515Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:55:00.819729Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:00.819742Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:00.819743Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:00.819770Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23674 TClient is connected to server localhost:23674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:00.902905Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:00.902932Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:00.903989Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:00.905204Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.917101Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.925832Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.945550Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.956326Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.062158Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792605880497034:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.062186Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.116767Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.121916Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.129981Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.136497Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.143522Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.150920Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.159004Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792605880497547:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.159030Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.159044Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792605880497552:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.159580Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:01.164281Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792605880497554:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:55:01.315678Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateBackupCollection, opId: 281474976715672:2, at schemeshard: 72057594046644480 2024-11-21T17:55:01.320674Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateBackupCollection, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpScheme::DisableExternalDataSourcesOnServerless [GOOD] >> KqpScheme::DisableDropExternalDataSource >> KqpScheme::CreateAndAlterTableWithBloomFilterCompat [GOOD] >> KqpScheme::CreateAndAlterTableComplex >> KqpOlapTypes::Decimal [GOOD] >> KqpOlapTypes::Decimal35 >> KqpScheme::AlterTableAddExplicitSyncVectorKMeansTreeIndex [GOOD] >> KqpScheme::AlterTableAlterIndex >> test.py::test[select-complex_filter_with_order-default.txt-Debug] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-Plan] [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-Results] >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 >> KqpScheme::AlterCompressionLevelInColumnFamily >> test.py::test[window-win_func_lead_lag_opt--Debug] [GOOD] >> test.py::test[window-win_func_lead_lag_opt--Plan] [GOOD] >> test.py::test[window-win_func_lead_lag_opt--Results] >> SubDomainWithReboots::Delete >> ForceDropWithReboots::PathsAndShardsCountersSimultaneousAlterSubDomain >> ForceDropWithReboots::ForceDeleteCreateTableInFlyWithRebootAtCommit >> SubDomainWithReboots::DeleteWithStoragePools >> KqpScheme::CreateResourcePoolClassifierOnServerless [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2024-11-21T17:54:17.267283Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792417569182373:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:17.267297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0010ce/r3tmp/tmp9GcaJ3/pdisk_1.dat 2024-11-21T17:54:17.352051Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4086, node 1 2024-11-21T17:54:17.368639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:17.368672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:17.370252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:17.379526Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:17.379544Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:17.379547Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:17.379590Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:17.412822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.414058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:17.414068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:17.416412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T17:54:17.416478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T17:54:17.416483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T17:54:17.417175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T17:54:17.417187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T17:54:17.417527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T17:54:17.417836Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.420629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732211657463, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T17:54:17.420645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T17:54:17.420715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T17:54:17.421631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:17.421676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:17.421687Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T17:54:17.421698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T17:54:17.421705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T17:54:17.421717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T17:54:17.422343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T17:54:17.422352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T17:54:17.422356Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T17:54:17.422381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T17:54:17.477925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Uint8" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:40426" , at schemeshard: 72057594046644480 2024-11-21T17:54:17.478002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.478147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: LogStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T17:54:17.478157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T17:54:17.478167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T17:54:17.478173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T17:54:17.478179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T17:54:17.478185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T17:54:17.478241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2024-11-21T17:54:17.478600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T17:54:17.478651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T17:54:17.478655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.478676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T17:54:17.478683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2024-11-21T17:54:17.480732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2024-11-21T17:54:17.480762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/LogStore 2024-11-21T17:54:17.480813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T17:54:17.480816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T17:54:17.480849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T17:54:17.480865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T17:54:17.480868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439792417569182990:2381], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2024-11-21T17:54:17.480871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439792417569182990:2381], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2024-11-21T17:54:17.480878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:17.480884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet72057594046644480 2024-11-21T17:54:17.481015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { ... de 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Origin: 72075186224037890 TxId: 281474976715659 2024-11-21T17:54:36.487191Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:54:36.487201Z node 67 :TX_COLUMNSHARD DEBUG: TxPlanStep[4] complete at tablet 72075186224037891 2024-11-21T17:54:36.487216Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:54:36.487225Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:54:36.487229Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037891 TxId: 281474976715659 MinStep: 0 Step: 1732211676531 2024-11-21T17:54:36.487231Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037891, partId: 0 2024-11-21T17:54:36.487239Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037891 TxId: 281474976715659 MinStep: 0 Step: 1732211676531 2024-11-21T17:54:36.487244Z node 67 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037891 TxId: 281474976715659 MinStep: 0 Step: 1732211676531 2024-11-21T17:54:36.487257Z node 67 :TX_COLUMNSHARD DEBUG: TxPlanStep[4] complete at tablet 72075186224037889 2024-11-21T17:54:36.487268Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T17:54:36.487274Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, message: Origin: 72075186224037891 TxId: 281474976715659 2024-11-21T17:54:36.487276Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037891, partId: 0 2024-11-21T17:54:36.487282Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Origin: 72075186224037891 TxId: 281474976715659 2024-11-21T17:54:36.487302Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037889 TxId: 281474976715659 MinStep: 0 Step: 1732211676531 2024-11-21T17:54:36.487321Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037889, partId: 0 2024-11-21T17:54:36.487342Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037889 TxId: 281474976715659 MinStep: 0 Step: 1732211676531 2024-11-21T17:54:36.487365Z node 67 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186224037889 TxId: 281474976715659 MinStep: 0 Step: 1732211676531 2024-11-21T17:54:36.487402Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, message: Origin: 72075186224037889 TxId: 281474976715659 2024-11-21T17:54:36.487420Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715659, tablet: 72075186224037889, partId: 0 2024-11-21T17:54:36.487435Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480, message: Origin: 72075186224037889 TxId: 281474976715659 2024-11-21T17:54:36.487773Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:54:36.487845Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:54:36.487857Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:54:36.487863Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:54:36.487875Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:54:36.487885Z node 67 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T17:54:36.487904Z node 67 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T17:54:36.487915Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715659 ready parts: 1/1 2024-11-21T17:54:36.487920Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715659, ready parts: 1/1, is published: true 2024-11-21T17:54:36.487931Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [67:7439792499896640020:2311] message: TxId: 281474976715659 2024-11-21T17:54:36.487936Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715659 ready parts: 1/1 2024-11-21T17:54:36.487941Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T17:54:36.487943Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715659:0 2024-11-21T17:54:36.487980Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2024-11-21T17:54:36.491538Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/LogStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "log2" TtlSettings { Enabled { ColumnName: "saved_at" ExpireAfterSeconds: 2000 } } SchemaPresetName: "default" ColumnShardCount: 4 Sharding { HashSharding { Function: HASH_FUNCTION_CLOUD_LOGS Columns: "timestamp" Columns: "uid" } } } } TxId: 281474976715660 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:44066" , at schemeshard: 72057594046644480 2024-11-21T17:54:36.491608Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /Root/LogStore/log2, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:54:36.491646Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusSchemeError, reason: Haven't MAX-index for TTL column and TTL column is not first column in primary key, at schemeshard: 72057594046644480 2024-11-21T17:54:36.492226Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715660, response: Status: StatusSchemeError Reason: "Haven\'t MAX-index for TTL column and TTL column is not first column in primary key" TxId: 281474976715660 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T17:54:36.492272Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusSchemeError, reason: Haven't MAX-index for TTL column and TTL column is not first column in primary key, operation: CREATE COLUMN TABLE, path: /Root/LogStore/ 2024-11-21T17:54:36.936362Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[67:7439792499896639776:2290];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:54:36.936831Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[67:7439792499896639780:2291];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:54:36.936841Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[67:7439792499896639774:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:54:36.936855Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[67:7439792499896639789:2292];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:54:37.436583Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[67:7439792499896639776:2290];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:54:37.437090Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[67:7439792499896639780:2291];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:54:37.437131Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[67:7439792499896639789:2292];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:54:37.437133Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[67:7439792499896639774:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:54:37.438101Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[67:7439792499896639776:2290];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2024-11-21T17:54:37.438595Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[67:7439792499896639774:2289];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T17:54:37.439111Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[67:7439792499896639780:2291];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2024-11-21T17:54:37.439126Z node 67 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[67:7439792499896639789:2292];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:458, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (SCHEME_ERROR != SUCCESS)
: Error: Haven't MAX-index for TTL column and TTL column is not first column in primary key , with diff: S(|UC)C(H|)E(ME_ERROR|SS) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x159BBCF9) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+10200 (0x156F3508) NTestSuiteYdbLogStore::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x15702477) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x159BDCAE) NTestSuiteYdbLogStore::TCurrentTest::Execute()+429 (0x15701E3D) NUnitTest::TTestFactory::Execute()+803 (0x159BE423) NUnitTest::RunMain(int, char**)+3005 (0x159D163D) ??+0 (0x7F92D1C23D90) __libc_start_main+128 (0x7F92D1C23E40) _start+41 (0x14707029) |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |85.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::CreateWithStoragePools >> KqpScheme::TouchIndexAfterMoveIndexReadReplace [GOOD] >> KqpScheme::TouchIndexAfterMoveIndexWriteReplace >> KqpScheme::AlterNonExistingResourcePoolClassifier [GOOD] |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateResourcePoolClassifierOnServerless [GOOD] Test command err: Trying to start YDB, gRPC: 27978, MsgBus: 12375 2024-11-21T17:54:55.098800Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792582059409604:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f27/r3tmp/tmpxdU0vE/pdisk_1.dat 2024-11-21T17:54:55.368925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.368965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.375012Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.375234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27978, node 1 2024-11-21T17:54:55.403314Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.403329Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766768Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766769Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12375 TClient is connected to server localhost:12375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.174730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.246025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.256414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.266066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.284909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792586354378257:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.284931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.332974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.342319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.348812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.362861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792586354378747:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792586354378752:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.375769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792586354378754:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.571568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithCompactionPolicy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithCompactionPolicy" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710671 CreateStep: 1732211696621 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TableWithCompactionPolicy" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNa... (TRUNCATED) 2024-11-21T17:54:56.580599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithCompactionPolicy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithCompactionPolicy" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710671 CreateStep: 1732211696621 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithCompactionPolicy" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNa... (TRUNCATED) Trying to start YDB, gRPC: 11025, MsgBus: 8781 2024-11-21T17:54:56.722903Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792587283203003:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f27/r3tmp/tmpVdGa8x/pdisk_1.dat 2024-11-21T17:54:56.726440Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:54:56.741179Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11025, node 2 2024-11-21T17:54:56.746140Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.746152Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.746154Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.746186Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8781 TClient is connected to server localhost:8781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 1844674407370955 ... &id=MThmYjIwZGEtODk0NDk5NDItYWVjYjk3ZWYtYTEyMTFiNzY=, ActorId: [6:7439792608187910185:2539], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T17:55:01.761861Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=OTM2NTc0MC1mMDczZjgxNS00Zjc0MTI1Yy1hZjFjNGMyMw==, ActorId: [6:7439792603892941905:2343], ActorState: CleanupState, TraceId: 01jd7xswpk8098w34f9qbqkf4f, EndCleanup, isFinal: 1 2024-11-21T17:55:01.761867Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MThmYjIwZGEtODk0NDk5NDItYWVjYjk3ZWYtYTEyMTFiNzY=, ActorId: [6:7439792608187910185:2539], ActorState: unknown state, Session actor destroyed 2024-11-21T17:55:01.761868Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=OTM2NTc0MC1mMDczZjgxNS00Zjc0MTI1Yy1hZjFjNGMyMw==, ActorId: [6:7439792603892941905:2343], ActorState: CleanupState, TraceId: 01jd7xswpk8098w34f9qbqkf4f, Sent query response back to proxy, proxyRequestId: 2, proxyId: [6:7439792603892941099:2198] 2024-11-21T17:55:01.761871Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=OTM2NTc0MC1mMDczZjgxNS00Zjc0MTI1Yy1hZjFjNGMyMw==, ActorId: [6:7439792603892941905:2343], ActorState: unknown state, TraceId: 01jd7xswpk8098w34f9qbqkf4f, Cleanup temp tables: 0 2024-11-21T17:55:01.761880Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=OTM2NTc0MC1mMDczZjgxNS00Zjc0MTI1Yy1hZjFjNGMyMw==, ActorId: [6:7439792603892941905:2343], ActorState: unknown state, TraceId: 01jd7xswpk8098w34f9qbqkf4f, Session actor destroyed 2024-11-21T17:55:01.761921Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request finished in pool, DatabaseId: 72057594046644480:4:/Root/test-serverless, PoolId: default, Duration: 1.325936s, CpuConsumed: 0.000123s, AdjustCpuQuota: 0 2024-11-21T17:55:01.762077Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [5:7439792608665207658:3310], Database: /Root/test-serverless, Start database fetching 2024-11-21T17:55:01.762126Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [5:7439792608665207658:3310], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2024-11-21T17:55:01.762494Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY= 2024-11-21T17:55:01.762528Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: unknown state, session actor bootstrapped 2024-11-21T17:55:01.762617Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ReadyState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, received request, proxyRequestId: 29 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [6:7439792608187910334:2583] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2024-11-21T17:55:01.762626Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ReadyState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, request placed into pool from cache: default 2024-11-21T17:55:01.762632Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ReadyState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, Sending CompileQuery request 2024-11-21T17:55:01.773983Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ExecuteState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, acquire mvcc snapshot 2024-11-21T17:55:01.774300Z node 6 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ExecuteState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, read snapshot result: UNAVAILABLE, step: 1732211701820, tx id: 18446744073709551615 2024-11-21T17:55:01.774321Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ExecuteState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, ExecutePhyTx, tx: 0x000055325D98E398 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T17:55:01.774326Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ExecuteState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, Sending to Executer TraceId: 0 8 2024-11-21T17:55:01.774338Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ExecuteState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, Created new KQP executer: [6:7439792608187910346:2582] isRollback: 0 2024-11-21T17:55:01.775101Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ExecuteState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T17:55:01.775142Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ExecuteState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 1.162 QueriesCount: 2 2024-11-21T17:55:01.775179Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ExecuteState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T17:55:01.775221Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ExecuteState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:55:01.775228Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ExecuteState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, EndCleanup, isFinal: 0 2024-11-21T17:55:01.775238Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ExecuteState, TraceId: 01jd7xsy0227rdpzbmqebyqk2e, Sent query response back to proxy, proxyRequestId: 29, proxyId: [6:7439792603892941099:2198] 2024-11-21T17:55:01.775333Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T17:55:01.775349Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2024-11-21T17:55:01.775359Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: 72057594046644480:4:/Root/test-serverless, PoolId: test_pool 2024-11-21T17:55:01.775363Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ReadyState, Created new KQP executer: [6:7439792608187910356:2582] isRollback: 1 2024-11-21T17:55:01.775367Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:55:01.775374Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792608187910357:2591], DatabaseId: 72057594046644480:4:/Root/test-serverless, PoolId: test_pool, Start pool fetching 2024-11-21T17:55:01.775411Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: CleanupState, EndCleanup, isFinal: 1 2024-11-21T17:55:01.775420Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T17:55:01.775441Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjRkY2JhYWYtMzQ4NDYxMDktYjZiMzRhYTUtZDI5YzNiMzY=, ActorId: [6:7439792608187910333:2582], ActorState: unknown state, Session actor destroyed 2024-11-21T17:55:01.775694Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439792608187910357:2591], DatabaseId: 72057594046644480:4:/Root/test-serverless, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T17:55:01.775728Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: 72057594046644480:4:/Root/test-serverless, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T17:55:01.775845Z node 5 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 6 2024-11-21T17:55:01.776001Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=OTk2ZDY4MDYtMjcwMmI2MzctODk2YjFkMjAtMTNjOGNiMDI=, ActorId: [5:7439792604370239354:2299], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T17:55:01.776025Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=OTk2ZDY4MDYtMjcwMmI2MzctODk2YjFkMjAtMTNjOGNiMDI=, ActorId: [5:7439792604370239354:2299], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:55:01.776029Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTk2ZDY4MDYtMjcwMmI2MzctODk2YjFkMjAtMTNjOGNiMDI=, ActorId: [5:7439792604370239354:2299], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T17:55:01.776029Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:55:01.776032Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTk2ZDY4MDYtMjcwMmI2MzctODk2YjFkMjAtMTNjOGNiMDI=, ActorId: [5:7439792604370239354:2299], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T17:55:01.776048Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTk2ZDY4MDYtMjcwMmI2MzctODk2YjFkMjAtMTNjOGNiMDI=, ActorId: [5:7439792604370239354:2299], ActorState: unknown state, Session actor destroyed 2024-11-21T17:55:01.776115Z node 5 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2024-11-21T17:55:01.776252Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] >> KqpScheme::DisableDropExternalDataSource [GOOD] >> KqpScheme::DisableDropExternalTable >> KqpScheme::CreateAndAlterTableComplex [GOOD] >> KqpScheme::CreateAlterDropTableStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterNonExistingResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 26250, MsgBus: 12744 2024-11-21T17:54:55.098699Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792580211205625:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ef6/r3tmp/tmpdmCRjf/pdisk_1.dat 2024-11-21T17:54:55.364163Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26250, node 1 2024-11-21T17:54:55.402503Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402519Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.452115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.452139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.453226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:55.766343Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766358Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766360Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766415Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12744 TClient is connected to server localhost:12744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.173638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.245913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.256197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.265430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.285145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584506174284:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.285166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.341836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.348817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.362807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584506174774:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584506174779:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.376336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792584506174781:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.572981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.579274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.587066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.768127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T17:54:56.780875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.814173Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2024-11-21T17:54:56.824623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715762:2, at schemeshard: 72057594046644480 2024-11-21T17:54:56.833804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16383, MsgBus: 6472 2024-11-21T17:54:57.118354Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792589915950705:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:57.118507Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ef6/r3tmp/tmpWkqVDm/pdisk_1.dat 2024-11-21T17:54:57.127155Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16383, node 2 2024-11-21T17:54:57.134677Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:57.134688Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:57.134689Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:57.134728Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6472 TClient is connected to server localhost:6472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:57.218453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:57.218475Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:57.219622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:57.220773Z node 2 :FLAT_TX_SCHEMESHARD WARN ... fyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.813793Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:54:59.914450Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.966348Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.005015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.209847Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792601621469486:2670], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T17:55:00.209864Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T17:55:00.319770Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792601621469682:2732], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T17:55:00.319796Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T17:55:00.424439Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792601621469857:2786], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T17:55:00.424471Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T17:55:00.562293Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792601621470035:2848], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T17:55:00.562315Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } Trying to start YDB, gRPC: 26399, MsgBus: 16146 2024-11-21T17:55:00.809143Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792605323361856:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:00.809440Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ef6/r3tmp/tmpHHsO6v/pdisk_1.dat 2024-11-21T17:55:00.816629Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26399, node 5 2024-11-21T17:55:00.825116Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:00.825133Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:00.825134Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:00.825168Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16146 TClient is connected to server localhost:16146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:00.909498Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:00.909525Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:00.910575Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:00.911299Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.912167Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T17:55:00.920788Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.929483Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.949348Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.959522Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.083781Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792609618330682:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.083809Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.089448Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.095171Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.101724Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.109058Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.115571Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.122783Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.131359Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792609618331186:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.131374Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.131383Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792609618331191:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.131810Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:01.135887Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792609618331193:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:55:01.810542Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T17:55:01.856252Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.896169Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:55:01.941625Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.986170Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.026779Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 >> SubDomainWithReboots::CreateTabletInsideWithStoragePools >> KqpScheme::DoubleCreateResourcePoolClassifier [GOOD] >> test.py::test[select-complex_filter_with_order-default.txt-Results] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Debug] |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T17:54:53.931602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:54:53.931619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:53.931622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:54:53.931625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:54:53.931635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:54:53.931637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:54:53.931643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:54:53.931690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:54:53.938164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:54:53.938178Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:53.939773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:54:53.940260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:54:53.940286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:54:53.941118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:54:53.941224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:54:53.941281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:53.941340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:54:53.941900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:53.942086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:53.942092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:53.942115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:54:53.942120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:53.942124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:54:53.942131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T17:54:53.942846Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T17:54:53.952497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:54:53.952569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:53.952614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:54:53.952664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:54:53.952668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:53.953224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:53.953240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:54:53.953269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:53.953275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:54:53.953277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:54:53.953281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:54:53.953518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:53.953523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:54:53.953526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:54:53.953721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:53.953726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:53.953729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:53.953734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:54:53.954108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:54:53.954384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:54:53.954419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:54:53.954539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:54:53.954553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:54:53.954559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:53.954607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:54:53.954611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:54:53.954630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:53.954638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:54:53.954896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:54:53.954901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:54:53.954931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:54:53.954934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T17:54:53.954992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:54:53.954997Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:54:53.955003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:54:53.955006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:53.955010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:54:53.955012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:54:53.955015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:54:53.955017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:54:53.955023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:54:53.955027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:54:53.955029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T17:54:53.955216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:53.955224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T17:54:53.955227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T17:54:53.955230Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T17:54:53.955232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:54:53.955240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... ShardId: 72075186233409548 CpuTimeUsec: 159 } } 2024-11-21T17:55:02.340685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969695 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2024-11-21T17:55:02.340696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2024-11-21T17:55:02.340711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969695 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2024-11-21T17:55:02.340716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2024-11-21T17:55:02.340788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.340794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T17:55:02.340801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T17:55:02.340805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2024-11-21T17:55:02.340814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T17:55:02.340836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2024-11-21T17:55:02.340853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T17:55:02.340863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:55:02.341215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.341434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.341468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.341471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:55:02.341507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:55:02.341525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.341528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T17:55:02.341531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T17:55:02.341622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.341631Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:55:02.341641Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.341643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:55:02.341647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T17:55:02.341781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:55:02.341788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:55:02.341791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:55:02.341794Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2024-11-21T17:55:02.341798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T17:55:02.341930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:55:02.341937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T17:55:02.341940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T17:55:02.341942Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:55:02.341944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:55:02.341951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2024-11-21T17:55:02.342291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.342298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:02.342349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:55:02.342371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T17:55:02.342374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:55:02.342377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T17:55:02.342386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:404:2371] message: TxId: 103 2024-11-21T17:55:02.342390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T17:55:02.342393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T17:55:02.342398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T17:55:02.342409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:55:02.342458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.342461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:55:02.342784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:55:02.342821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T17:55:02.343065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.343071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2024-11-21T17:55:02.343113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T17:55:02.343117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1341:3268] 2024-11-21T17:55:02.343218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2024-11-21T17:55:02.343671Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:02.343705Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 43us result status StatusSuccess 2024-11-21T17:55:02.343789Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScheme::AlterTableAlterIndex [GOOD] >> KqpScheme::AlterTableAlterVectorIndex >> KqpOlapTypes::Decimal35 [GOOD] >> KqpOlapTypes::DecimalCsv >> KqpScheme::AlterCompressionLevelInColumnFamily [GOOD] >> KqpScheme::AlterIndexImplTable |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DoubleCreateResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 29206, MsgBus: 23422 2024-11-21T17:54:55.099035Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792583999481148:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.099055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ecf/r3tmp/tmpWagOLv/pdisk_1.dat 2024-11-21T17:54:55.366441Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29206, node 1 2024-11-21T17:54:55.403172Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.403186Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.428662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.428688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.429790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:55.766832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23422 TClient is connected to server localhost:23422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.169646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.181005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.198193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.207314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.215895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.235236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792588294449803:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.235263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.341901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.349386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.362765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792588294450295:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792588294450300:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.375869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792588294450302:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.569566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14625, MsgBus: 26615 2024-11-21T17:54:56.737333Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792588300480065:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:56.737368Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ecf/r3tmp/tmpfAvtbR/pdisk_1.dat 2024-11-21T17:54:56.758856Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14625, node 2 2024-11-21T17:54:56.769523Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.769532Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.769533Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.769559Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26615 TClient is connected to server localhost:26615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.841199Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:56.841222Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:56.841454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.842300Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:56.852801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.865467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.882922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.892194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.033762Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792592595448896:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:57.033794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: ... ta/workload_manager/pools/MyResourcePool', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:91 Trying to start YDB, gRPC: 29939, MsgBus: 13942 2024-11-21T17:55:01.029309Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792606714478058:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:01.029327Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ecf/r3tmp/tmpsrndXC/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29939, node 5 2024-11-21T17:55:01.045346Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:55:01.045435Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:01.045442Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:01.045443Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:01.045468Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13942 TClient is connected to server localhost:13942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:01.129721Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:01.129752Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:01.130826Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:01.131953Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.140017Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.147697Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.167315Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.180389Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.306068Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792606714479601:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.306084Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.311363Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.317311Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.371818Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.382113Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.388779Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.396040Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.404424Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792606714480121:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.404426Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792606714480116:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.404439Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.405008Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:01.408990Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792606714480123:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:55:02.030723Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T17:55:02.075749Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.117636Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:55:02.165451Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.218245Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.268597Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.568481Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7439792611009448641:2689], TxId: 281474976715703, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xsyqwb4t8hrp36w95b3pq. SessionId : ydb://session/3?node_id=5&id=ZDUxYzk4ZmYtNDliZThlZTgtZDFjNzcyODYtYzcwOGUxMWY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T17:55:02.568627Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7439792611009448643:2690], TxId: 281474976715703, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=ZDUxYzk4ZmYtNDliZThlZTgtZDFjNzcyODYtYzcwOGUxMWY=. TraceId : 01jd7xsyqwb4t8hrp36w95b3pq. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [5:7439792611009448637:2664], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T17:55:02.569752Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=ZDUxYzk4ZmYtNDliZThlZTgtZDFjNzcyODYtYzcwOGUxMWY=, ActorId: [5:7439792611009448546:2664], ActorState: ExecuteState, TraceId: 01jd7xsyqwb4t8hrp36w95b3pq, Create QueryResponse for error on request, msg: 2024-11-21T17:55:02.571823Z node 5 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jd7xsyqr9vefzywdr9w52z0h" } } } } ;request=session_id: "ydb://session/3?node_id=5&id=ZDUxYzk4ZmYtNDliZThlZTgtZDFjNzcyODYtYzcwOGUxMWY=" tx_control { tx_id: "01jd7xsyqr9vefzywdr9w52z0h" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers`\nSELECT database,name,config,rank FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "database" type { type_id: UTF8 } } members { name: "name" type { type_id: UTF8 } } members { name: "config" type { type_id: JSON_DOCUMENT } } members { name: "rank" type { type_id: INT64 } } } } } } value { items { items { text_value: "/Root" } items { text_value: "MyResourcePoolClassifier" } items { text_value: "{\"resource_pool\":\"test_pool\"}" } items { int64_value: 1 } } } } } ; >> KqpScheme::CreateAlterDropTableStore [GOOD] >> KqpScheme::DisableResourcePoolClassifiersOnServerless [GOOD] |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> KqpScheme::DisableDropExternalTable [GOOD] >> KqpScheme::DisableResourcePoolClassifiers >> ForceDropWithReboots::Fake [GOOD] >> KqpScheme::ResourcePoolClassifiersRankValidation [GOOD] >> SubDomainWithReboots::DeclareAndDefine >> KqpScheme::TouchIndexAfterMoveIndexWriteReplace [GOOD] >> KqpScheme::TouchIndexAfterMoveTableRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] Test command err: 2024-11-21T17:53:52.786249Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:53:52.786829Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:52.786892Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:53:52.787003Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:53:52.787240Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T17:53:52.787248Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:53:52.787367Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:45:2073] ControllerId# 72057594037932033 2024-11-21T17:53:52.787369Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:53:52.787392Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:53:52.787445Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:53:52.789096Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:53:52.789106Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:53:52.789338Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:53:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.789366Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:54:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.789380Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:55:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.789406Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:56:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.789420Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:57:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.789435Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:58:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.789454Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:44:2072] Create Queue# [1:59:2084] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.789458Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:53:52.789467Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:45:2073] 2024-11-21T17:53:52.789470Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:45:2073] 2024-11-21T17:53:52.789476Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:53:52.789481Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:53:52.789627Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:53:52.789638Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:53:52.790477Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:52.790501Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:53:52.790612Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:67:2071] ControllerId# 72057594037932033 2024-11-21T17:53:52.790614Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:53:52.790627Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:53:52.790653Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:53:52.790745Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:53:52.790775Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:53:52.790779Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:53:52.790963Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:73:2075] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.790978Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:74:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.790992Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:75:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.791010Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:76:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.791023Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:77:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.791044Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:78:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.791059Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:66:2070] Create Queue# [2:79:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:52.791061Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:53:52.791067Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:67:2071] 2024-11-21T17:53:52.791070Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:67:2071] 2024-11-21T17:53:52.791089Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:53:52.791096Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:53:52.791141Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:53:52.791160Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:52.793929Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:45:2073] 2024-11-21T17:53:52.793952Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:52.793958Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:53:52.794038Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:67:2071] 2024-11-21T17:53:52.794053Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:52.794062Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:53:52.794086Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:53:52.794109Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:45:2073] 2024-11-21T17:53:52.794114Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:52.794121Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:53:52.794734Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:53:52.795101Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:53:52.795118Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:53:52.795152Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:52.795175Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:52.795264Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T17:53:52.795274Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:53:52.795278Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:53:52.795313Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:89:2084] 2024-11-21T17:53:52.795322Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:49:2064] 2024-11-21T17:53:52.795326Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:49:2064] 2024-11-21T17:53:52.795360Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:52.795429Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:53:52.795468Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T17:53:52.795477Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:52.795482Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:53:52.795498Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:53:52.795516Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T17:53:52.795521Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T17:53:52.795571Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T17:53:52.795578Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:52.795593Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:53:52.795618Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:53:52.795627Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:95:2091] 2024-11-21T17:53:52.795683Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID ... etID: 72075186224037888} 2024-11-21T17:54:57.859478Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:54:57.859482Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:54:57.859484Z node 8 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T17:54:57.859488Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [8:404:2362] 2024-11-21T17:54:57.859491Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] immediate retry [8:404:2362] 2024-11-21T17:54:57.859494Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [8:404:2362] 2024-11-21T17:54:57.859499Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:57.859505Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:54:57.859511Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T17:54:57.859513Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T17:54:57.859515Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T17:54:57.859518Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:54:57.859522Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:54:57.859526Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:54:57.859531Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:54:57.859533Z node 8 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T17:54:57.859537Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [8:404:2362] 2024-11-21T17:54:57.859539Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] schedule retry [8:404:2362] 2024-11-21T17:54:57.880075Z node 8 :BS_PROXY_PUT INFO: [65bcfa2ad0971069] bootstrap ActorId# [8:408:2364] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:199:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T17:54:57.880136Z node 8 :BS_PROXY_PUT DEBUG: [65bcfa2ad0971069] Id# [72057594037927937:2:9:0:0:199:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:54:57.880145Z node 8 :BS_PROXY_PUT DEBUG: [65bcfa2ad0971069] restore Id# [72057594037927937:2:9:0:0:199:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:54:57.880156Z node 8 :BS_PROXY_PUT DEBUG: [65bcfa2ad0971069] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:199:1] Marker# BPG33 2024-11-21T17:54:57.880160Z node 8 :BS_PROXY_PUT DEBUG: [65bcfa2ad0971069] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:199:1] Marker# BPG32 2024-11-21T17:54:57.880195Z node 8 :BS_PROXY DEBUG: Send to queueActorId# [8:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:199:1] FDS# 199 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:54:57.880712Z node 8 :BS_PROXY_PUT DEBUG: [65bcfa2ad0971069] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:199:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 23 } Cost# 81566 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 24 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T17:54:57.880751Z node 8 :BS_PROXY_PUT DEBUG: [65bcfa2ad0971069] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T17:54:57.880760Z node 8 :BS_PROXY_PUT INFO: [65bcfa2ad0971069] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T17:54:57.880806Z node 8 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T17:54:57.880836Z node 8 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 2024-11-21T17:54:57.880846Z node 8 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2024-11-21T17:54:57.891002Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] client retry [8:404:2362] 2024-11-21T17:54:57.891029Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [8:404:2362] 2024-11-21T17:54:57.891055Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:57.891091Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:54:57.891119Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T17:54:57.891127Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T17:54:57.891133Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T17:54:57.891139Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:54:57.891148Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:54:57.891152Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:54:57.891166Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:54:57.891170Z node 8 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T17:54:57.891180Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [8:404:2362] 2024-11-21T17:54:57.891184Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] schedule retry [8:404:2362] 2024-11-21T17:54:57.911502Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] client retry [8:404:2362] 2024-11-21T17:54:57.911530Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [8:404:2362] 2024-11-21T17:54:57.911555Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:57.911589Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:54:57.911624Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T17:54:57.911631Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T17:54:57.911635Z node 8 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T17:54:57.911640Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:54:57.911646Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:54:57.911650Z node 8 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:54:57.911666Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:54:57.911669Z node 8 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T17:54:57.911678Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [8:404:2362] 2024-11-21T17:54:57.911682Z node 8 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed, check aliveness [8:404:2362] 2024-11-21T17:54:57.932061Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [8:411:2365] 2024-11-21T17:54:57.932092Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [8:411:2365] 2024-11-21T17:54:57.932113Z node 8 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:57.932124Z node 8 :TABLET_RESOLVER DEBUG: SelectForward node 8 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [8:264:2257] 2024-11-21T17:54:57.932138Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [8:411:2365] 2024-11-21T17:54:57.932146Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [8:411:2365] 2024-11-21T17:54:57.932153Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [8:411:2365] 2024-11-21T17:54:57.932173Z node 8 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [8:411:2365] 2024-11-21T17:54:57.932214Z node 8 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([8:411:2365]) [8:412:2366] 2024-11-21T17:54:57.932248Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [8:411:2365] 2024-11-21T17:54:57.932255Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [8:411:2365] 2024-11-21T17:54:57.932259Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [8:411:2365] 2024-11-21T17:54:57.932268Z node 8 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [8:404:2362] EventType# 268697616 2024-11-21T17:54:57.932286Z node 8 :HIVE WARN: HIVE#72057594037927937 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2024-11-21T17:54:57.932301Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received poison pill [8:411:2365] 2024-11-21T17:54:57.932310Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [8:411:2365] 2024-11-21T17:54:57.932316Z node 8 :PIPE_SERVER DEBUG: [72057594037927937] Got PeerClosed from# [8:411:2365] 2024-11-21T17:54:57.932333Z node 8 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerDisconnected([8:411:2365]) [8:412:2366] >> SubDomainWithReboots::Create ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAlterDropTableStore [GOOD] Test command err: Trying to start YDB, gRPC: 5334, MsgBus: 6218 2024-11-21T17:54:59.901557Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792600549791457:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:59.901584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000eb2/r3tmp/tmpzMCmo6/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5334, node 1 2024-11-21T17:54:59.964710Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:59.967497Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:59.967509Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:59.967510Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:59.967539Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6218 2024-11-21T17:55:00.001776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:00.001805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:6218 2024-11-21T17:55:00.002948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:00.031222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.034294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.093663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.108322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.117936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.162093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792604844760294:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.162117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.191862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.196992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.205977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.212899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.219731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.226768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.235583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792604844760786:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.235604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.235631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792604844760791:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.236106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:00.240091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792604844760793:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:55:00.394254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithMinMaxPartitions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithMinMaxPartitions" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732211700492 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TableWithMinMaxPartitions" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNa... (TRUNCATED) 2024-11-21T17:55:00.455107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/TableWithMinMaxPartitions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableWithMinMaxPartitions" PathId: 16 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1732211700492 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TableWithMinMaxPartitions" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNa... (TRUNCATED) Trying to start YDB, gRPC: 23941, MsgBus: 64983 2024-11-21T17:55:00.594540Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792603122303907:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:00.594811Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000eb2/r3tmp/tmp2UlGCc/pdisk_1.dat 2024-11-21T17:55:00.602571Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23941, node 2 2024-11-21T17:55:00.611409Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:00.611423Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:00.611425Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:00.611459Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64983 TClient is connected to server localhost:64983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } D ... pp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:55:02.971264Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[5:7439792613461063944:2311];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:55:02.971270Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:55:02.971274Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[5:7439792613461063944:2311];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:55:02.971274Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:55:02.971280Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:55:02.971283Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[5:7439792613461063944:2311];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:55:02.971284Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:55:02.971292Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[5:7439792613461063944:2311];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:55:02.971301Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[5:7439792613461063944:2311];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:55:02.971311Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[5:7439792613461063944:2311];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:55:02.971317Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:55:02.971323Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:55:02.971338Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:55:02.971342Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:55:02.971352Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:55:02.971363Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:55:02.971383Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:55:02.971392Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:55:02.971403Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:55:02.971414Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:55:02.971668Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:55:02.971681Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:55:02.971691Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:55:02.971701Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:55:02.971715Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:55:02.971725Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:55:02.971733Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:55:02.971738Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:55:02.971751Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:55:02.971760Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:55:02.971766Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:55:02.971775Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:55:02.971973Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:55:02.971982Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:55:02.971991Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:55:02.971994Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:55:02.972001Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:55:02.972008Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:55:02.972018Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:55:02.972025Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:55:02.972032Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:55:02.972034Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:55:03.003573Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792617756031485:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.003592Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.005064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropColumnStore, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.013266Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037895 not found 2024-11-21T17:55:03.013278Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037892 not found 2024-11-21T17:55:03.013278Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037890 not found 2024-11-21T17:55:03.013279Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037888 not found 2024-11-21T17:55:03.013280Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037896 not found 2024-11-21T17:55:03.013281Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037897 not found 2024-11-21T17:55:03.013282Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037893 not found 2024-11-21T17:55:03.013717Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037889 not found 2024-11-21T17:55:03.014033Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037894 not found 2024-11-21T17:55:03.014041Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037891 not found |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DisableResourcePoolClassifiersOnServerless [GOOD] Test command err: Trying to start YDB, gRPC: 5220, MsgBus: 22923 2024-11-21T17:54:55.098960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792582200552470:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f52/r3tmp/tmpZqAv90/pdisk_1.dat 2024-11-21T17:54:55.367999Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.368491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.368514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.392411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5220, node 1 2024-11-21T17:54:55.402897Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402911Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766543Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766562Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766564Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766620Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22923 TClient is connected to server localhost:22923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.174009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.183976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.244329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.255129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.265123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.286467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792586495521122:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.286488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.342302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.348692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.362951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792586495521615:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792586495521620:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.376083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792586495521622:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.570173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.574256Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710672, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/ExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:100 Trying to start YDB, gRPC: 8203, MsgBus: 30231 2024-11-21T17:54:56.721937Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792586131088969:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:56.721956Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f52/r3tmp/tmp6pTx3l/pdisk_1.dat 2024-11-21T17:54:56.733272Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8203, node 2 2024-11-21T17:54:56.747261Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.747273Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.747275Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.747310Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30231 TClient is connected to server localhost:30231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.822457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:56.822484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:56.823536Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:56.824657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.830892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.839980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.857727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.868145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 7 ... onId: ydb://session/3?node_id=8&id=ZGI2ZWUyMGUtOGE2ODYzYjgtODE4YzJjM2MtY2M5ZmVmNmE=, ActorId: [8:7439792611425400917:2703], ActorState: ReadyState, TraceId: 01jd7xsywmaaz0t72v19da0atp, received request, proxyRequestId: 48 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [8:7439792611425400918:2704] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2024-11-21T17:55:02.676685Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGI2ZWUyMGUtOGE2ODYzYjgtODE4YzJjM2MtY2M5ZmVmNmE=, ActorId: [8:7439792611425400917:2703], ActorState: ReadyState, TraceId: 01jd7xsywmaaz0t72v19da0atp, request placed into pool from cache: default 2024-11-21T17:55:02.676697Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGI2ZWUyMGUtOGE2ODYzYjgtODE4YzJjM2MtY2M5ZmVmNmE=, ActorId: [8:7439792611425400917:2703], ActorState: ReadyState, TraceId: 01jd7xsywmaaz0t72v19da0atp, Sending CompileQuery request 2024-11-21T17:55:02.678178Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: [main][8:7439792602835464639:2513][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 24, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T17:55:02.678197Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: [main][8:7439792602835464639:2513][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 25, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T17:55:02.678393Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7439792611425400920:2705], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2024-11-21T17:55:02.678806Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=ZGI2ZWUyMGUtOGE2ODYzYjgtODE4YzJjM2MtY2M5ZmVmNmE=, ActorId: [8:7439792611425400917:2703], ActorState: ExecuteState, TraceId: 01jd7xsywmaaz0t72v19da0atp, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2024-11-21T17:55:02.678824Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZGI2ZWUyMGUtOGE2ODYzYjgtODE4YzJjM2MtY2M5ZmVmNmE=, ActorId: [8:7439792611425400917:2703], ActorState: ExecuteState, TraceId: 01jd7xsywmaaz0t72v19da0atp, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:55:02.678827Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGI2ZWUyMGUtOGE2ODYzYjgtODE4YzJjM2MtY2M5ZmVmNmE=, ActorId: [8:7439792611425400917:2703], ActorState: ExecuteState, TraceId: 01jd7xsywmaaz0t72v19da0atp, EndCleanup, isFinal: 0 2024-11-21T17:55:02.678861Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGI2ZWUyMGUtOGE2ODYzYjgtODE4YzJjM2MtY2M5ZmVmNmE=, ActorId: [8:7439792611425400917:2703], ActorState: ExecuteState, TraceId: 01jd7xsywmaaz0t72v19da0atp, Sent query response back to proxy, proxyRequestId: 48, proxyId: [8:7439792602835463861:2191] 2024-11-21T17:55:02.679627Z node 8 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2024-11-21T17:55:02.679713Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZGI2ZWUyMGUtOGE2ODYzYjgtODE4YzJjM2MtY2M5ZmVmNmE=, ActorId: [8:7439792611425400917:2703], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T17:55:02.679714Z node 8 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2024-11-21T17:55:02.679725Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZGI2ZWUyMGUtOGE2ODYzYjgtODE4YzJjM2MtY2M5ZmVmNmE=, ActorId: [8:7439792611425400917:2703], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:55:02.679727Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGI2ZWUyMGUtOGE2ODYzYjgtODE4YzJjM2MtY2M5ZmVmNmE=, ActorId: [8:7439792611425400917:2703], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T17:55:02.679729Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGI2ZWUyMGUtOGE2ODYzYjgtODE4YzJjM2MtY2M5ZmVmNmE=, ActorId: [8:7439792611425400917:2703], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T17:55:02.679749Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZGI2ZWUyMGUtOGE2ODYzYjgtODE4YzJjM2MtY2M5ZmVmNmE=, ActorId: [8:7439792611425400917:2703], ActorState: unknown state, Session actor destroyed 2024-11-21T17:55:02.767699Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk= 2024-11-21T17:55:02.767756Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: unknown state, session actor bootstrapped 2024-11-21T17:55:02.767865Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: ReadyState, TraceId: 01jd7xsyzffbdatd02wv7azmfn, received request, proxyRequestId: 50 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [8:7439792611425400932:2710] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2024-11-21T17:55:02.767886Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: ReadyState, TraceId: 01jd7xsyzffbdatd02wv7azmfn, request placed into pool from cache: default 2024-11-21T17:55:02.767901Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: ReadyState, TraceId: 01jd7xsyzffbdatd02wv7azmfn, Sending CompileQuery request 2024-11-21T17:55:02.769337Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: [main][8:7439792602835464639:2513][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 26, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T17:55:02.769352Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: [main][8:7439792602835464639:2513][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 27, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T17:55:02.769573Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7439792611425400934:2711], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2024-11-21T17:55:02.769648Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: ExecuteState, TraceId: 01jd7xsyzffbdatd02wv7azmfn, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2024-11-21T17:55:02.769663Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: ExecuteState, TraceId: 01jd7xsyzffbdatd02wv7azmfn, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:55:02.769672Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: ExecuteState, TraceId: 01jd7xsyzffbdatd02wv7azmfn, EndCleanup, isFinal: 0 2024-11-21T17:55:02.769706Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: ExecuteState, TraceId: 01jd7xsyzffbdatd02wv7azmfn, Sent query response back to proxy, proxyRequestId: 50, proxyId: [8:7439792602835463861:2191] 2024-11-21T17:55:02.769879Z node 8 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2024-11-21T17:55:02.769905Z node 8 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2024-11-21T17:55:02.769933Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T17:55:02.769942Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:55:02.769945Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T17:55:02.769947Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T17:55:02.769960Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YmRhNDllYTAtYjE3MzlmZmMtMmFmNDQ0MjgtOGM4OWIxODk=, ActorId: [8:7439792611425400931:2709], ActorState: unknown state, Session actor destroyed |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> KqpScheme::AlterTableAlterVectorIndex [GOOD] >> KqpScheme::AlterTableAlterMissedIndex >> ForceDropWithReboots::ForceDeleteCreateTableInFly |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::Fake [GOOD] |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ResourcePoolClassifiersRankValidation [GOOD] Test command err: Trying to start YDB, gRPC: 5581, MsgBus: 18558 2024-11-21T17:54:55.098925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792579964457461:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f6d/r3tmp/tmpMn7hGM/pdisk_1.dat 2024-11-21T17:54:55.361725Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.372354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.372381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.376963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5581, node 1 2024-11-21T17:54:55.402513Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402529Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766379Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766381Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766441Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18558 TClient is connected to server localhost:18558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.178350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.185378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.245221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.257479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.265515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.285734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584259426115:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.285750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.341766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.349246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.363293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.371509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584259426606:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.371594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792584259426611:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.372902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.375882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792584259426613:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:57.582157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976711375:0, at schemeshard: 72057594046644480 2024-11-21T17:54:57.595870Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439792588554402835:4268], TxId: 281474976711384, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xssx9b3nanx5pevvxgk5m. SessionId : ydb://session/3?node_id=1&id=NmRjMTAxMjAtNzNiNDYzNmEtMzAwYmVmYTMtNDljOTlmNzU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037893 node# 1 state# Ready) } } 2024-11-21T17:54:57.595891Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439792588554402837:4270], TxId: 281474976711381, task: 1. Ctx: { TraceId : 01jd7xssxa27vmhb5p3sk28v14. SessionId : ydb://session/3?node_id=1&id=NzQyMzQ4OGUtYWQwNjliN2UtYjM5OTk5ZWMtMWE3YzkxMmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037893 node# 1 state# Ready) } } 2024-11-21T17:54:57.596661Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439792588554402835:4268], TxId: 281474976711384, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd7xssx9b3nanx5pevvxgk5m. SessionId : ydb://session/3?node_id=1&id=NmRjMTAxMjAtNzNiNDYzNmEtMzAwYmVmYTMtNDljOTlmNzU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037893 node# 1 state# Ready) } }. 2024-11-21T17:54:57.596674Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439792588554402837:4270], TxId: 281474976711381, task: 1. Ctx: { TraceId : 01jd7xssxa27vmhb5p3sk28v14. SessionId : ydb://session/3?node_id=1&id=NzQyMzQ4OGUtYWQwNjliN2UtYjM5OTk5ZWMtMWE3YzkxMmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037893 node# 1 state# Ready) } }. 2024-11-21T17:54:57.596753Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439792588554402829:4265], TxId: 281474976711383, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzM2NDg1YzctZjRiMWI2MmItMmZkYjljODAtMTU1NzY5ZTg=. TraceId : 01jd7xssxa142jzrq6f544r7vp. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037893 node# 1 state# Ready) } } 2024-11-21T17:54:57.596763Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439792588554402829:4265], TxId: 281474976711383, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzM2NDg1YzctZjRiMWI2MmItMmZkYjljODAtMTU1NzY5ZTg=. TraceId : 01jd7xssxa142jzrq6f544r7vp. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037893 node# 1 state# Ready) } }. 2024-11-21T17:54:57.598068Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976711384, task: 2. Input channel actor is unavailable 2024-11-21T17:54:57.598084Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976711384, task: 2. Skip unexpected event 65542 at DeadState 2024-11-21T17:54:57.598086Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976711384, task: 2. Skip unexpected event 271646927 at DeadState 2024-11-21T17:54:57.598095Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439792588554402836:4269], TxId: 281474976711384, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NmRjMTAxMjAtNzNiNDYzNmEtMzAwYmVmYTMtNDljOTlmNzU=. TraceId : 01jd7xssx9b3nanx5pevvxgk5m. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439792588554402763:2466], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T17:54:57.598201Z node 1 :KQP ... pe: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.717045Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.724515Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.738631Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.809727Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792604170059038:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.809748Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.809788Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792604170059043:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.810410Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:00.811964Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792604170059045:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 16550, MsgBus: 25320 2024-11-21T17:55:01.270628Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792609436903387:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:01.270836Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f6d/r3tmp/tmpsgKeGY/pdisk_1.dat 2024-11-21T17:55:01.278913Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16550, node 5 2024-11-21T17:55:01.288754Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:01.288768Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:01.288770Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:01.288811Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25320 TClient is connected to server localhost:25320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:01.370863Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:01.370889Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:01.371968Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:01.373228Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.385055Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.392734Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.408032Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.418173Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.566170Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792609436904932:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.566189Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.570312Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.625244Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.679727Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.689456Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.696870Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.703932Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.711634Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792609436905449:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.711647Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792609436905454:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.711653Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.712044Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:01.717156Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792609436905456:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:55:02.271938Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T17:55:02.331983Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.387006Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2024-11-21T17:55:02.440648Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.502950Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.561512Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.825115Z node 5 :KQP_GATEWAY WARN: [TQueryBase] [TRanksCheckerActor] TraceId: /Root, Finish with ALREADY_EXISTS, Issues: {
: Error: Classifier with rank 42 already exists, its name ClassifierRank42 }, SessionId: ydb://session/3?node_id=5&id=NzJjM2NlNWYtNGJkZGU1MDgtZDA1ZjU5NDEtZjA0NDA0ZGM=, TxId: 01jd7xsz1572htezr0y17v1y6s 2024-11-21T17:55:03.024184Z node 5 :KQP_GATEWAY WARN: [TQueryBase] [TRanksCheckerActor] TraceId: /Root, Finish with ALREADY_EXISTS, Issues: {
: Error: Classifier with rank 42 already exists, its name ClassifierRank42 }, SessionId: ydb://session/3?node_id=5&id=OGExNWU0MzAtYWQ3ZDYzMjQtOTg0NjhkNGQtYWVlYTZiYmQ=, TxId: 01jd7xsz7c98t540d1wx6p8mc3 2024-11-21T17:55:03.033911Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792618026841549:2792], DatabaseId: /Root, PoolId: test_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } 2024-11-21T17:55:03.033929Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool test_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool test_pool not found or you don't have access permissions } |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> TPQTest::TestMaxTimeLagRewind [GOOD] >> TPQTest::TestManyConsumers >> test.py::test[window-win_func_lead_lag_opt--Results] [GOOD] >> test.py::test[window-win_func_part_by_expr--Debug] |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[select-dot_name_subrequest-default.txt-Debug] [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Plan] >> KqpOlapTypes::DecimalCsv [GOOD] >> KqpOlapTypes::AttributeNegative >> KqpScheme::AlterIndexImplTable [GOOD] >> KqpScheme::AlterIndexImplTableUsingPublicAPI >> test.py::test[select-dot_name_subrequest-default.txt-Plan] [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> test.py::test[select-dot_name_subrequest-default.txt-Results] >> THiveTest::TestHiveBalancerUselessNeighbourMoves |86.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> KqpScheme::TouchIndexAfterMoveTableRead [GOOD] >> KqpScheme::TouchIndexAfterMoveTableWrite >> ForceDropWithReboots::ForceDeleteCreateSubdomainInfly |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDeleteSplitInFly |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> ForceDropWithReboots::ForceDropDeleteInFly >> KqpScheme::AlterTableAlterMissedIndex [GOOD] >> SubDomainWithReboots::Fake [GOOD] |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::RootWithStoragePools >> test.py::test[select-dot_name_subrequest-default.txt-Results] [GOOD] >> test.py::test[select-extend_and_take--Debug] >> KqpOlapTypes::AttributeNegative [GOOD] >> KqpOlapTypes::JsonImport |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterTableAlterMissedIndex [GOOD] Test command err: Trying to start YDB, gRPC: 64108, MsgBus: 25507 2024-11-21T17:54:59.859680Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792601046497726:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:59.859872Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ebd/r3tmp/tmpZTwkzm/pdisk_1.dat 2024-11-21T17:54:59.899137Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64108, node 1 2024-11-21T17:54:59.915722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:59.915733Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:59.915735Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:59.915769Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25507 TClient is connected to server localhost:25507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:59.959828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:59.959855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:59.960205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T17:54:59.960881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:54:59.970931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.985486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:59.999706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.011043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.112416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792605341466562:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.112452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.138669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.144634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.156809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.163743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.171070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.177973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.186222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792605341467067:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.186241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792605341467072:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.186244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:00.186695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:00.191375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792605341467074:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:55:00.348132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.354430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.367263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.513093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T17:55:00.522787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.543536Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2024-11-21T17:55:00.548078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715762:2, at schemeshard: 72057594046644480 2024-11-21T17:55:00.557760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31928, MsgBus: 17216 2024-11-21T17:55:00.722850Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792604605987963:2056];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ebd/r3tmp/tmpcY8rRh/pdisk_1.dat 2024-11-21T17:55:00.728368Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:55:00.731222Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31928, node 2 2024-11-21T17:55:00.740190Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:00.740200Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:00.740202Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:00.740253Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17216 TClient is connected to server localhost:17216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:00.822561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:00.822585Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:00.823725Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:00.824890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 wa ... emeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.027537Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.035069Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.152989Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792615820065651:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.153012Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.156424Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.162779Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.174207Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.180843Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.187831Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.195113Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.203069Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792615820066155:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.203079Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792615820066160:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.203089Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.203505Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:03.208315Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792615820066162:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:55:03.391972Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10488, MsgBus: 3328 2024-11-21T17:55:03.578886Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792617833088018:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:03.578903Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ebd/r3tmp/tmpraMCMY/pdisk_1.dat 2024-11-21T17:55:03.587337Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10488, node 5 2024-11-21T17:55:03.598197Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:03.598209Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:03.598210Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:03.598239Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3328 TClient is connected to server localhost:3328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:03.678987Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:03.679017Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:03.680084Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:03.681760Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.686247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.693607Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.708114Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.721129Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.823968Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792617833089562:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.823989Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.829278Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.835201Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.845890Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.853083Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.860092Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.866670Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.875845Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792617833090064:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.875871Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.875879Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792617833090069:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.876450Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:03.880435Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792617833090071:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:55:04.033592Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T17:55:04.040131Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T17:55:04.049206Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::Fake [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> TPQTest::TestManyConsumers [GOOD] >> KqpScheme::DisableResourcePoolClassifiers [GOOD] >> KqpScheme::DisableMetadataObjectsOnServerless >> KqpOlapTypes::JsonImport [GOOD] >> KqpScheme::TouchIndexAfterMoveTableWrite [GOOD] |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> test.py::test[select-extend_and_take--Debug] [GOOD] >> test.py::test[select-extend_and_take--Plan] [GOOD] >> test.py::test[select-extend_and_take--Results] >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerHighUsage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T17:53:39.071583Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:39.071600Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.074965Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:39.076976Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T17:53:39.077122Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T17:53:39.077469Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T17:53:39.077732Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T17:53:39.077981Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.079033Z node 1 :PERSQUEUE INFO: new Cookie default|19dca8a3-c3c59e99-6c603785-8ec44d1b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T17:53:39.310358Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:39.310379Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.313310Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:39.313466Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2024-11-21T17:53:39.313566Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2024-11-21T17:53:39.314014Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] 2024-11-21T17:53:39.314215Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2024-11-21T17:53:39.314493Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.315789Z node 2 :PERSQUEUE INFO: new Cookie default|d4905b5d-d111df29-9dab824b-96d46171_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] 2024-11-21T17:53:39.549974Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:39.549991Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:147:2057] recipient: [3:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:147:2057] recipient: [3:145:2168] Leader for TabletID 72057594037927938 is [3:151:2172] sender: [3:152:2057] recipient: [3:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:177:2057] recipient: [3:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.552884Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:39.553054Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2024-11-21T17:53:39.553135Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:184:2197] 2024-11-21T17:53:39.553529Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:39.553793Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:185:2198] 2024-11-21T17:53:39.554028Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.554980Z node 3 :PERSQUEUE INFO: new Cookie default|76df57dc-f4fee41d-99e8a1e-2302c1ad_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] 2024-11-21T17:53:39.793898Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:39.793920Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:147:2057] recipient: [4:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:147:2057] recipient: [4:145:2168] Leader for TabletID 72057594037927938 is [4:151:2172] sender: [4:152:2057] recipient: [4:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:177:2057] recipient: [4:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:39.796948Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:39.797095Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 ... Handle TEvPersQueue::TEvStatus 2024-11-21T17:55:04.818669Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:55:04.820143Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:55:04.822826Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:55:04.822837Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1009:3005], now have 1 active actors on pipe 2024-11-21T17:55:04.822852Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T17:55:04.823134Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:55:04.824703Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:55:04.827968Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:55:04.827985Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1012:3008], now have 1 active actors on pipe 2024-11-21T17:55:04.827996Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T17:55:04.828366Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:55:04.829698Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:55:04.832188Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:55:04.832197Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1015:3011], now have 1 active actors on pipe 2024-11-21T17:55:04.832213Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T17:55:04.832568Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:55:04.833752Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:55:04.836179Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:55:04.836189Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1018:3014], now have 1 active actors on pipe 2024-11-21T17:55:04.836197Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T17:55:04.836521Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:55:04.837695Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:55:04.839918Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:55:04.839946Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1021:3017], now have 1 active actors on pipe 2024-11-21T17:55:04.839963Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T17:55:04.840201Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:55:04.841360Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:55:04.843772Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:55:04.843780Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1024:3020], now have 1 active actors on pipe 2024-11-21T17:55:04.843795Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T17:55:04.844071Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:55:04.845387Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:55:04.847972Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:55:04.847981Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1027:3023], now have 1 active actors on pipe 2024-11-21T17:55:04.847988Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T17:55:04.848290Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:55:04.849519Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:55:04.852060Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:55:04.852070Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1030:3026], now have 1 active actors on pipe 2024-11-21T17:55:04.852078Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T17:55:04.852408Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:55:04.853733Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:55:04.856319Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T17:55:04.856330Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1033:3029], now have 1 active actors on pipe 2024-11-21T17:55:04.856338Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvStatus 2024-11-21T17:55:04.856629Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2024-11-21T17:55:04.857970Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2024-11-21T17:55:04.860953Z node 87 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [87:1036:3032] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to ACTORLIB_LONG_TIMER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapTypes::JsonImport [GOOD] Test command err: Trying to start YDB, gRPC: 26609, MsgBus: 28618 2024-11-21T17:55:00.896072Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792603753723840:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:00.896163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ea2/r3tmp/tmpbZgtcw/pdisk_1.dat 2024-11-21T17:55:00.937832Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26609, node 1 2024-11-21T17:55:00.953636Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:00.953650Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:00.953652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:00.953687Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28618 TClient is connected to server localhost:28618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:55:00.995556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:00.995576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:00.996671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:00.998671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int64 NOT NULL, dec Decimal(22,9) NOT NULL, PRIMARY KEY (id, dec)) PARTITION BY HASH(id, dec) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:55:01.161362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792608048691523:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.161387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.195471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.201988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792608048691600:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:55:01.202020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792608048691600:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:55:01.202045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792608048691600:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:55:01.202063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792608048691600:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:55:01.202077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792608048691600:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:55:01.202094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792608048691600:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:55:01.202108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792608048691600:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:55:01.202126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792608048691600:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:55:01.202142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792608048691600:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:55:01.202152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792608048691600:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:55:01.202162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792608048691600:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:55:01.202173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792608048691600:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:55:01.202527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:55:01.202545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:55:01.202557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:55:01.202562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:55:01.202601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:55:01.202610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:55:01.202619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:55:01.202626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:55:01.202635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:55:01.202644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:55:01.202650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:55:01.202657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:55:01.202702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:55:01.202712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:55:01.202727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:55:01.202732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:55:01.202746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:55:01.202755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:55:01.202772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:55:01.202788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:55:01.202803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:55:01.202811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalizat ... eateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:04.725991Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:04.726014Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:04.727063Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:04.728202Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:04.728834Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnTableTest` (id Int64 NOT NULL, json Json, json_doc JsonDocument, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:55:04.907077Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792621684470544:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:04.907099Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:04.909106Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:55:04.914870Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792621684470590:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:55:04.914898Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792621684470590:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:55:04.914947Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792621684470590:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:55:04.914974Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792621684470590:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:55:04.914998Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792621684470590:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:55:04.915023Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792621684470590:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:55:04.915050Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792621684470590:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:55:04.915071Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792621684470590:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:55:04.915095Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792621684470590:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:55:04.915117Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792621684470590:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:55:04.915141Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792621684470590:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:55:04.915163Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792621684470590:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:55:04.915698Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:55:04.915712Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:55:04.915720Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:55:04.915723Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:55:04.915736Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:55:04.915743Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:55:04.915750Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:55:04.915753Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:55:04.915760Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:55:04.915767Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:55:04.915771Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:55:04.915775Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:55:04.915814Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:55:04.915823Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:55:04.915833Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:55:04.915840Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:55:04.915848Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:55:04.915855Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:55:04.915865Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:55:04.915872Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:55:04.915879Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:55:04.915885Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=512;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=512;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=400;columns=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=400;columns=3; Bulk upsert to table '/Root/ColumnTableTest'bad batch in data: Invalid UTF8 sequence at string index 0; order:id, json, json_doc |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> KqpScheme::AlterIndexImplTableUsingPublicAPI [GOOD] >> KqpScheme::AlterGroup >> KqpScheme::CreateDropTableMultipleTime [GOOD] >> KqpScheme::CreateDropTableViaApiMultipleTime >> SubDomainWithReboots::SplitTabletInsideWithStoragePools ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::TouchIndexAfterMoveTableWrite [GOOD] Test command err: Trying to start YDB, gRPC: 23084, MsgBus: 3805 2024-11-21T17:55:00.850717Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792605257671761:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:00.850876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ea7/r3tmp/tmpQYureI/pdisk_1.dat 2024-11-21T17:55:00.897021Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23084, node 1 2024-11-21T17:55:00.911890Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:00.911903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:00.911904Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:00.911934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3805 TClient is connected to server localhost:3805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:55:00.950863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:00.950907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:00.952014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:00.955184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.967172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.981258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.999081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.011909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.107999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792609552640595:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.108025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.134521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.138997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.150720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.157996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.212294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.221531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:01.229052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792609552641112:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.229076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792609552641117:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.229085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.229563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:01.234188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792609552641119:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:55:01.424093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T17:55:01.434098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 27447, MsgBus: 5360 2024-11-21T17:55:01.722145Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792609611796124:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:01.722185Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ea7/r3tmp/tmpcnDz1P/pdisk_1.dat 2024-11-21T17:55:01.729757Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27447, node 2 2024-11-21T17:55:01.738194Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:01.738208Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:01.738210Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:01.738241Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5360 TClient is connected to server localhost:5360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:01.822341Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:01.822365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:01.823427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:01.824108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.832362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.839203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.853550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.865627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:01.959561Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792609611797655:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:01.959598Z node 2 :KQP_WORK ... 4046644480 waiting... 2024-11-21T17:55:03.501101Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.511077Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.618850Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792616054685233:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.618875Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.622994Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.628934Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.635877Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.643111Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.650493Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.664656Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:03.749710Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792616054685747:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.749757Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.749782Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792616054685752:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.750511Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:03.752244Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792616054685754:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:55:03.957315Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:55:03.968491Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16457, MsgBus: 19167 2024-11-21T17:55:04.244820Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792620460739770:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:04.245037Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ea7/r3tmp/tmpuUZLuO/pdisk_1.dat 2024-11-21T17:55:04.253090Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16457, node 5 2024-11-21T17:55:04.263122Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:04.263135Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:04.263137Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:04.263165Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19167 TClient is connected to server localhost:19167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:04.344930Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:04.344961Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:04.346022Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:04.347193Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:04.358576Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:04.365808Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:04.380258Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:04.392951Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:04.503303Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792620460741299:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:04.503324Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:04.508590Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:04.514970Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:04.525194Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:04.531622Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:04.538930Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:04.546099Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:04.554678Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792620460741799:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:04.554709Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:04.554719Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792620460741804:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:04.555277Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:04.559234Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792620460741806:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:55:04.720297Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T17:55:04.729680Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> test.py::test[window-win_func_part_by_expr--Debug] [GOOD] >> test.py::test[window-win_func_part_by_expr--Plan] [GOOD] >> test.py::test[window-win_func_part_by_expr--Results] |86.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> test.py::test[select-extend_and_take--Results] [GOOD] >> test.py::test[select-if-default.txt-Debug] >> KqpScheme::AlterGroup [GOOD] >> KqpScheme::AlterNonExistingResourcePool >> KqpOlapScheme::TenThousandColumns [FAIL] >> KqpOlapScheme::WithoutDefaultColumnFamily >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> ForceDropWithReboots::DoNotLostDeletedTablets >> KqpOlapScheme::WithoutDefaultColumnFamily [GOOD] >> KqpOlapScheme::UnknownColumnFamily >> test.py::test[window-win_func_part_by_expr--Results] [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] >> test.py::test[window-win_func_rank_by_opt_all--Debug] >> test.py::test[select-if-default.txt-Debug] [GOOD] >> test.py::test[select-if-default.txt-Plan] [GOOD] >> test.py::test[select-if-default.txt-Results] |86.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |86.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> KqpScheme::AlterNonExistingResourcePool [GOOD] |86.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] |86.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |86.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |86.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> KqpOlapScheme::UnknownColumnFamily [GOOD] >> KqpOlapScheme::TwoSimilarColumnFamilies >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterNonExistingResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 13869, MsgBus: 15944 2024-11-21T17:55:02.220130Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792612925046513:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:02.220199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e9d/r3tmp/tmpLHOj1l/pdisk_1.dat 2024-11-21T17:55:02.271597Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13869, node 1 2024-11-21T17:55:02.283812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:02.283829Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:02.283831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:02.283868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15944 TClient is connected to server localhost:15944 WaitRootIsUp 'Root'... TClient::Ls request: Root 2024-11-21T17:55:02.320144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:02.320178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2024-11-21T17:55:02.321160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:02.333015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:02.341186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:02.402935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:02.418479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:02.426968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:02.526326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792612925048049:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:02.526348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:02.562798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.568379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.578895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.585648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.593205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.600485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:02.615252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792612925048564:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:02.615283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792612925048569:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:02.615286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:02.615849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:02.620416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792612925048571:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:55:02.818202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23356, MsgBus: 23478 2024-11-21T17:55:03.109870Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792615140246244:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:03.109895Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e9d/r3tmp/tmpQID5OT/pdisk_1.dat 2024-11-21T17:55:03.120515Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23356, node 2 2024-11-21T17:55:03.126099Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:03.126113Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:03.126114Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:03.126147Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23478 TClient is connected to server localhost:23478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:03.209974Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:03.210006Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:03.211053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:03.212183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.221617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.229285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.243620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.251163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:03.354149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792615140247771:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.354167Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:03.358771Z node 2 :FLAT_TX_SCHEMESH ... -> Disconnected 2024-11-21T17:55:05.605095Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:05.606159Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:05.606789Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:05.617442Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:05.624616Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:05.640589Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:05.653216Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:05.770606Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792626282222121:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:05.770627Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:05.776411Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:05.782274Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:05.791808Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:05.799026Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:05.805840Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:05.813168Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:05.873840Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792626282222635:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:05.873864Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:05.873888Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792626282222640:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:05.874528Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:05.876014Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439792626282222642:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 21091, MsgBus: 19653 2024-11-21T17:55:06.199338Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792628277972010:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:06.199385Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000e9d/r3tmp/tmpf4jvhj/pdisk_1.dat 2024-11-21T17:55:06.207753Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21091, node 5 2024-11-21T17:55:06.215920Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:06.215931Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:06.215932Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:06.215966Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19653 TClient is connected to server localhost:19653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:06.299895Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:06.299921Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:06.300964Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:06.301654Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:06.311374Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:06.318545Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:06.335563Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:06.345737Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:06.434212Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792628277973540:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:06.434231Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:06.438712Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:06.444289Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:06.449524Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:06.456810Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:06.464077Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:06.470787Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:06.479064Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792628277974030:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:06.479082Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:06.479081Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792628277974035:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:06.479522Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:06.484257Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792628277974037:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test.py::test[select-if-default.txt-Results] [GOOD] >> test.py::test[select-one_labeled_column-default.txt-Debug] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> KqpOlapScheme::TwoSimilarColumnFamilies [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v1] >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v0] |86.2%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.2%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[window-win_func_rank_by_opt_all--Debug] [GOOD] >> test.py::test[window-win_func_rank_by_opt_all--Plan] [GOOD] >> test.py::test[window-win_func_rank_by_opt_all--Results] >> test.py::test[select-one_labeled_column-default.txt-Debug] [GOOD] >> test.py::test[select-one_labeled_column-default.txt-Plan] [GOOD] >> test.py::test[select-one_labeled_column-default.txt-Results] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v0] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-no] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v0] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[std] >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> test.py::test[select-one_labeled_column-default.txt-Results] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-Debug] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-fifo] >> KqpScheme::DisableMetadataObjectsOnServerless [GOOD] >> test.py::test[window-win_func_rank_by_opt_all--Results] [GOOD] >> test.py::test[window-win_over_joined--Debug] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DisableMetadataObjectsOnServerless [GOOD] Test command err: 2024-11-21T17:55:00.332800Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792604652199969:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:00.333065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000eac/r3tmp/tmpYxY8cD/pdisk_1.dat 2024-11-21T17:55:00.388067Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15294, node 1 2024-11-21T17:55:00.404392Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:00.404408Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:00.404410Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:00.404449Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:00.432922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:00.432950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:00.434489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:00.464015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:00.472711Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T17:55:00.579049Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T17:55:00.579074Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439792604652200720:2298], Start check tables existence, number paths: 2 2024-11-21T17:55:00.579103Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T17:55:00.579106Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T17:55:00.579586Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDA0MzE3ZjAtZDE2MzMyNmItNzJhNjc2M2QtMmEzZjU0ODg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NDA0MzE3ZjAtZDE2MzMyNmItNzJhNjc2M2QtMmEzZjU0ODg= 2024-11-21T17:55:00.579659Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDA0MzE3ZjAtZDE2MzMyNmItNzJhNjc2M2QtMmEzZjU0ODg=, ActorId: [1:7439792604652200736:2299], ActorState: unknown state, session actor bootstrapped 2024-11-21T17:55:00.579677Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2024-11-21T17:55:00.579695Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439792604652200720:2298], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T17:55:00.579703Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439792604652200720:2298], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T17:55:00.579706Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439792604652200720:2298], Successfully finished 2024-11-21T17:55:00.581024Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T17:55:00.586364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.589562Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439792603976797019:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:00.589863Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:55:00.591502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:00.591523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:00.592412Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:55:00.592663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:00.602684Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:55:00.603531Z node 3 :STATISTICS WARN: [72075186224037897] TTxInit::Complete. EnableColumnStatistics=false 2024-11-21T17:55:00.645721Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:00.645750Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:00.647521Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:00.695976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.699673Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792601259764307:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:00.699734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:55:00.701719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:00.701742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:00.702869Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:55:00.703051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:00.714386Z node 2 :STATISTICS WARN: [72075186224037907] TTxInit::Complete. EnableColumnStatistics=false 2024-11-21T17:55:00.714575Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:55:00.755442Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:00.755467Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:00.756890Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:00.803799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:00.808248Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:55:00.808290Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:55:00.808299Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:55:00.808306Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:55:00.808322Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:55:00.808345Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:55:00.816855Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:55:00.840219Z node 3 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T17:55:00.840273Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T17:55:00.840279Z node 3 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T17:55:00.840318Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439792603976797687:2313], Start check tables existence, number paths: 2 2024-11-21T17:55:00.840430Z node 3 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2024-11-21T17:55:00.840535Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439792603976797687:2313], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T17:55:00.840545Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439792603976797687:2313], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T17:55:00.840548Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439792603976797687:2313], Successfully finished 2024-11-21T17:55:00.840554Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T17:55:00.970115Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T17:55:00.970227Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439792601259765251:2340], Start check tables existence, number paths: 2 2024-11-21T17:55:00.970310Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7439792601259765263:2342], Database: /Root/test-dedicated, Start database fetching 2024-11-21T17:55:00.970332Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T17:55:00.970335Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2024-11-21T17:55:00.970340Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T17:55:00.970480Z node 2 :KQP_WORKLOAD_SERVICE DEBU ... ActorState: ExecuteState, TraceId: 01jd7xt4gk4tza7fmresg464cn, EndCleanup, isFinal: 0 2024-11-21T17:55:08.476314Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzkxYzA2ZS0xYzgyODI5My00MjY5MjRiLWU4NjgwZmJk, ActorId: [9:7439792638021630860:2840], ActorState: ExecuteState, TraceId: 01jd7xt4gk4tza7fmresg464cn, Sent query response back to proxy, proxyRequestId: 64, proxyId: [9:7439792625136726311:2172] 2024-11-21T17:55:08.476503Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MjIyMDFmNGYtNTE5NDE3ODQtMzhlNmQyNjYtNmU4NmQ4ZTA=, ActorId: [9:7439792638021630852:2835], ActorState: ExecuteState, TraceId: 01jd7xt4dtbt2s19ts0rm1jtsz, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T17:55:08.476518Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MzkxYzA2ZS0xYzgyODI5My00MjY5MjRiLWU4NjgwZmJk, ActorId: [9:7439792638021630860:2840], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T17:55:08.476531Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MzkxYzA2ZS0xYzgyODI5My00MjY5MjRiLWU4NjgwZmJk, ActorId: [9:7439792638021630860:2840], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:55:08.476532Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MjIyMDFmNGYtNTE5NDE3ODQtMzhlNmQyNjYtNmU4NmQ4ZTA=, ActorId: [9:7439792638021630852:2835], ActorState: ExecuteState, TraceId: 01jd7xt4dtbt2s19ts0rm1jtsz, txInfo Status: Committed Kind: Pure TotalDuration: 129.298 ServerDuration: 129.286 QueriesCount: 2 2024-11-21T17:55:08.476538Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzkxYzA2ZS0xYzgyODI5My00MjY5MjRiLWU4NjgwZmJk, ActorId: [9:7439792638021630860:2840], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T17:55:08.476540Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzkxYzA2ZS0xYzgyODI5My00MjY5MjRiLWU4NjgwZmJk, ActorId: [9:7439792638021630860:2840], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T17:55:08.476564Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzkxYzA2ZS0xYzgyODI5My00MjY5MjRiLWU4NjgwZmJk, ActorId: [9:7439792638021630860:2840], ActorState: unknown state, Session actor destroyed 2024-11-21T17:55:08.476567Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MjIyMDFmNGYtNTE5NDE3ODQtMzhlNmQyNjYtNmU4NmQ4ZTA=, ActorId: [9:7439792638021630852:2835], ActorState: ExecuteState, TraceId: 01jd7xt4dtbt2s19ts0rm1jtsz, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T17:55:08.476586Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MjIyMDFmNGYtNTE5NDE3ODQtMzhlNmQyNjYtNmU4NmQ4ZTA=, ActorId: [9:7439792638021630852:2835], ActorState: ExecuteState, TraceId: 01jd7xt4dtbt2s19ts0rm1jtsz, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:55:08.476596Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MjIyMDFmNGYtNTE5NDE3ODQtMzhlNmQyNjYtNmU4NmQ4ZTA=, ActorId: [9:7439792638021630852:2835], ActorState: ExecuteState, TraceId: 01jd7xt4dtbt2s19ts0rm1jtsz, EndCleanup, isFinal: 1 2024-11-21T17:55:08.476607Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MjIyMDFmNGYtNTE5NDE3ODQtMzhlNmQyNjYtNmU4NmQ4ZTA=, ActorId: [9:7439792638021630852:2835], ActorState: ExecuteState, TraceId: 01jd7xt4dtbt2s19ts0rm1jtsz, Sent query response back to proxy, proxyRequestId: 58, proxyId: [9:7439792625136726311:2172] 2024-11-21T17:55:08.476615Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MjIyMDFmNGYtNTE5NDE3ODQtMzhlNmQyNjYtNmU4NmQ4ZTA=, ActorId: [9:7439792638021630852:2835], ActorState: unknown state, TraceId: 01jd7xt4dtbt2s19ts0rm1jtsz, Cleanup temp tables: 0 2024-11-21T17:55:08.476625Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MjIyMDFmNGYtNTE5NDE3ODQtMzhlNmQyNjYtNmU4NmQ4ZTA=, ActorId: [9:7439792638021630852:2835], ActorState: unknown state, TraceId: 01jd7xt4dtbt2s19ts0rm1jtsz, Session actor destroyed 2024-11-21T17:55:08.477612Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2024-11-21T17:55:08.477915Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YzIzMTA5NWMtNTg0YjE3NWItZTZmOWM0MzAtYmU2ZTRhNWY=, ActorId: [7:7439792623152580146:2299], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T17:55:08.477944Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YzIzMTA5NWMtNTg0YjE3NWItZTZmOWM0MzAtYmU2ZTRhNWY=, ActorId: [7:7439792623152580146:2299], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:55:08.477947Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YzIzMTA5NWMtNTg0YjE3NWItZTZmOWM0MzAtYmU2ZTRhNWY=, ActorId: [7:7439792623152580146:2299], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T17:55:08.477956Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YzIzMTA5NWMtNTg0YjE3NWItZTZmOWM0MzAtYmU2ZTRhNWY=, ActorId: [7:7439792623152580146:2299], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T17:55:08.477994Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YzIzMTA5NWMtNTg0YjE3NWItZTZmOWM0MzAtYmU2ZTRhNWY=, ActorId: [7:7439792623152580146:2299], ActorState: unknown state, Session actor destroyed 2024-11-21T17:55:08.478080Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:55:08.478330Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2024-11-21T17:55:08.478486Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:55:08.994586Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M= 2024-11-21T17:55:08.994645Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: unknown state, session actor bootstrapped 2024-11-21T17:55:08.994760Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: ReadyState, TraceId: 01jd7xt5229s7asj33a9zt6b55, received request, proxyRequestId: 66 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/secrets/values`; rpcActor: [8:7439792639242112760:2934] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2024-11-21T17:55:08.994770Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: ReadyState, TraceId: 01jd7xt5229s7asj33a9zt6b55, request placed into pool from cache: default 2024-11-21T17:55:08.994783Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: ReadyState, TraceId: 01jd7xt5229s7asj33a9zt6b55, Sending CompileQuery request 2024-11-21T17:55:08.996475Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: [main][8:7439792630652176362:2982][/Root/test-shared/.metadata/secrets/values] Sync is done: cookie# 38, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T17:55:08.996503Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: [main][8:7439792630652176362:2982][/Root/test-shared/.metadata/secrets/values] Sync is done: cookie# 39, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T17:55:08.996708Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7439792639242112762:2935], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/secrets/values]
: Error: LookupError, code: 2005 2024-11-21T17:55:08.996784Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: ExecuteState, TraceId: 01jd7xt5229s7asj33a9zt6b55, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2024-11-21T17:55:08.996801Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: ExecuteState, TraceId: 01jd7xt5229s7asj33a9zt6b55, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:55:08.996804Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: ExecuteState, TraceId: 01jd7xt5229s7asj33a9zt6b55, EndCleanup, isFinal: 0 2024-11-21T17:55:08.996852Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: ExecuteState, TraceId: 01jd7xt5229s7asj33a9zt6b55, Sent query response back to proxy, proxyRequestId: 66, proxyId: [8:7439792626357207450:2181] 2024-11-21T17:55:08.997647Z node 8 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/secrets/values]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2024-11-21T17:55:08.997714Z node 8 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/secrets/values]
: Error: LookupError, code: 2005 2024-11-21T17:55:08.997726Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T17:55:08.997738Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T17:55:08.997740Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T17:55:08.997747Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T17:55:08.997764Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=OTRmZTMwYTYtZWNjYWJjZDMtMTQ4MjMwNzItODNmMGU5N2M=, ActorId: [8:7439792639242112759:2933], ActorState: unknown state, Session actor destroyed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::TwoSimilarColumnFamilies [GOOD] Test command err: Trying to start YDB, gRPC: 11449, MsgBus: 63239 2024-11-21T17:54:55.099001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792583474340572:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.099025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ee2/r3tmp/tmpGOTf1V/pdisk_1.dat 2024-11-21T17:54:55.363432Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11449, node 1 2024-11-21T17:54:55.402875Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402890Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.436181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.436212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.437274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:55.766479Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766494Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766539Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63239 TClient is connected to server localhost:63239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.170457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.182271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.245667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.256922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.265337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.285155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587769309233:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.285179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.332054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.341788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.396120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.404766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.411640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.420179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587769309727:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.420206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.420207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792587769309732:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.420749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.424995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792587769309734:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.576695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.583777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.587863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16234, MsgBus: 12008 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ee2/r3tmp/tmppzMVf2/pdisk_1.dat 2024-11-21T17:54:56.744331Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:54:56.744526Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16234, node 2 2024-11-21T17:54:56.752365Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.752378Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.752380Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.752415Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12008 TClient is connected to server localhost:12008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.830817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:56.830844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:56.831888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:56.833605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Uint64 NOT NULL, column0 Int32, column1 Int32, column2 Int32, column3 Int32, column4 Int32, column5 Int32, column6 Int32, column7 Int32, column8 Int32, column9 Int32, column10 Int32, column11 Int32, column12 Int32, column13 Int32, column14 Int32, column15 Int32, column16 Int32, column17 Int32, column18 Int32, column19 Int32, column20 Int32, column21 Int32, column22 Int32, column23 Int32, column24 Int32, column25 Int32, column26 Int32, column27 Int32, column28 Int32, column29 Int32, column30 Int32, column31 Int32, column32 Int32, column33 Int32, column34 Int32, column35 Int32, column36 Int32, column37 Int32, column38 Int32, column39 Int32, column40 Int32, column41 Int32, column42 Int32, column43 Int32, column44 Int32, column45 Int32, column46 Int32, column47 Int32, column48 Int32, column49 Int32, column50 Int32, column51 Int32, column52 Int32, column53 Int32, column54 Int32, column55 Int32, column56 Int32, column57 Int32, column58 Int32, column59 Int32, column60 Int32, column61 Int32, column62 Int32, column63 Int32, column64 Int32, column65 Int32, column66 Int32, column67 Int32, column68 ... LUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:55:06.503311Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:55:06.503334Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:55:06.503349Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:55:06.503364Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:55:06.503395Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:55:06.503415Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:55:06.503427Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:55:06.503445Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:55:06.503459Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:55:06.503693Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:55:06.503733Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:55:06.503770Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:55:06.503784Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:55:06.503800Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:55:06.503814Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:55:06.503844Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:55:06.503858Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:55:06.503883Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:55:06.503896Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; Trying to start YDB, gRPC: 18245, MsgBus: 26929 2024-11-21T17:55:06.729452Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439792627619060684:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:06.729612Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ee2/r3tmp/tmp67ZK5A/pdisk_1.dat 2024-11-21T17:55:06.739121Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18245, node 4 2024-11-21T17:55:06.750070Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:06.750082Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:06.750084Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:06.750123Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26929 TClient is connected to server localhost:26929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:06.829682Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:06.829743Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:06.830815Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:06.831968Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/TableWithFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family1, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:55:06.951893Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792627619061280:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:06.951933Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 3809, MsgBus: 12334 2024-11-21T17:55:07.205377Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792633925092293:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:07.205403Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000ee2/r3tmp/tmp0vjLHS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3809, node 5 2024-11-21T17:55:07.221449Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:55:07.221684Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:07.221692Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:07.221693Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:07.221719Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12334 TClient is connected to server localhost:12334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:07.305838Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:07.305870Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:07.306919Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:07.308136Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/TableWithFamily` (Key Uint64 NOT NULL, Value1 String FAMILY family1, Value2 Uint32 FAMILY family1, PRIMARY KEY (Key), FAMILY default (COMPRESSION="off"), FAMILY family1 (COMPRESSION="lz4"), FAMILY family1 (COMPRESSION="lz4")) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:55:07.436523Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792633925092894:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:07.436548Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> test.py::test[window-win_over_joined--Debug] [GOOD] >> test.py::test[window-win_over_joined--Plan] [GOOD] >> test.py::test[window-win_over_joined--Results] >> KqpScheme::CreateDropTableViaApiMultipleTime [GOOD] >> KqpScheme::CreateDropColumnTableNegative >> test.py::test[select-table_content_from_double_opt-default.txt-Debug] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-Plan] [GOOD] >> test.py::test[select-table_content_from_double_opt-default.txt-Results] >> KqpScheme::CreateDropColumnTableNegative [GOOD] >> KqpScheme::CreateExternalDataSource >> test.py::test[window-win_over_joined--Results] [GOOD] >> test.py::test[window-yql-14479-default.txt-Debug] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v0] [SKIPPED] >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestGetStorageInfo >> test.py::test[select-table_content_from_double_opt-default.txt-Results] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Debug] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v1] >> KqpScheme::CreateExternalDataSource [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_remove_queue_generates_event[tables_format_v1] [SKIPPED] >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-no] >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-no] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateExternalDataSource [GOOD] Test command err: Trying to start YDB, gRPC: 22723, MsgBus: 13783 2024-11-21T17:54:55.098751Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792583751214049:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f07/r3tmp/tmp7l98lk/pdisk_1.dat 2024-11-21T17:54:55.366629Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.374987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.375021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.392789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22723, node 1 2024-11-21T17:54:55.402554Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402568Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766552Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766553Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766601Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13783 TClient is connected to server localhost:13783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.171480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.183989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.244269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.255594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.265114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.285368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792588046182703:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.285391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.325330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.331321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.342089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.348744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.356592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.369866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.377809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792588046183193:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.377828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.377833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792588046183198:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.378239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T17:54:56.383202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439792588046183200:2429], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T17:54:56.573653Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2024-11-21T17:54:56.576993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25192, MsgBus: 61834 2024-11-21T17:54:56.716612Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439792585647800372:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:56.716641Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f07/r3tmp/tmpvO03yn/pdisk_1.dat 2024-11-21T17:54:56.726719Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25192, node 2 2024-11-21T17:54:56.737542Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:56.737553Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:56.737556Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:56.737586Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61834 TClient is connected to server localhost:61834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.771236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.784755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.816684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:56.816711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:56.817801Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:54:56.845236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.862924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:56.875521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:54:57.007777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439792589942769070:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have ... TTableExistsActor;event=undelivered;self_id=[4:7439792640158015252:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:09.976261Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f07/r3tmp/tmptyT6dC/pdisk_1.dat 2024-11-21T17:55:09.983442Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9166, node 4 2024-11-21T17:55:09.991049Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:09.991059Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:09.991060Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:09.991080Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6285 TClient is connected to server localhost:6285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:10.076551Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:10.076576Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:10.077626Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:10.078255Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:10.203789Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792644452983140:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:10.203825Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:10.205605Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792644452983161:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:10.205636Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:10.207977Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439792644452983175:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:10.207989Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 23500, MsgBus: 26134 2024-11-21T17:55:10.451287Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439792648141771138:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:10.451478Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f07/r3tmp/tmpoc03Ph/pdisk_1.dat 2024-11-21T17:55:10.463935Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23500, node 5 2024-11-21T17:55:10.470940Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:55:10.470953Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:55:10.470955Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:55:10.470994Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26134 TClient is connected to server localhost:26134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:10.551767Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:10.551809Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:10.552956Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T17:55:10.554031Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:55:10.558418Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:10.567555Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:10.584642Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:10.595018Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:55:10.718843Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792648141772673:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:10.718866Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:10.723154Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T17:55:10.729251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T17:55:10.742028Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T17:55:10.755517Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T17:55:10.810983Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T17:55:10.818075Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T17:55:10.834193Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792648141773190:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:10.834222Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:10.834227Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792648141773195:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:10.834887Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T17:55:10.838057Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439792648141773197:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T17:55:10.985302Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-empty] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_empty_tables_format >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-std] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-invalid] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Debug] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Plan] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] Test command err: 2024-11-21T17:54:11.527058Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:54:11.528205Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:54:11.528281Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:54:11.528444Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [3:65:2071] ControllerId# 72057594037932033 2024-11-21T17:54:11.528449Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:54:11.528488Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:54:11.528557Z node 3 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:54:11.528680Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:54:11.528686Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:54:11.528951Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:71:2075] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.528972Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:72:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.528988Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:73:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.529014Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:74:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.529027Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:75:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.529042Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:76:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.529078Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:77:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.529085Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:54:11.529094Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:65:2071] 2024-11-21T17:54:11.529098Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:65:2071] 2024-11-21T17:54:11.529103Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:54:11.529108Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:54:11.529163Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:54:11.529212Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:54:11.529608Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:54:11.529649Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:54:11.529761Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:54:11.529945Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T17:54:11.529953Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:54:11.530046Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:87:2075] ControllerId# 72057594037932033 2024-11-21T17:54:11.530048Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:54:11.530059Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:54:11.530088Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:54:11.530769Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:61:2065] 2024-11-21T17:54:11.530774Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:61:2065] 2024-11-21T17:54:11.530782Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:54:11.533136Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:54:11.533145Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:54:11.533393Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:94:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.533421Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:95:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.533436Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:96:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.533451Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:97:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.533470Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:98:2084] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.533492Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:99:2085] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.533507Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:100:2086] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.533510Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:54:11.533517Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:87:2075] 2024-11-21T17:54:11.533520Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:87:2075] 2024-11-21T17:54:11.533525Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:54:11.533530Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:54:11.533649Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:54:11.533662Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:54:11.534542Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:54:11.534561Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:54:11.534668Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:108:2072] ControllerId# 72057594037932033 2024-11-21T17:54:11.534672Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:54:11.534679Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:54:11.534704Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:54:11.534791Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:54:11.534812Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:54:11.534815Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:54:11.534987Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:114:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.535002Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:115:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.535017Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:116:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.535035Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:117:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.535050Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:118:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.535071Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:119:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.535085Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:120:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T17:54:11.535087Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:54:11.535092Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:108:2072] 2024-11-21T17:54:11.535095Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:108:2072] 2024-11-21T17:54:11.535099Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:54:11.535102Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:54:11.535170Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:54:11.535202Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:65:2071] 2024-11-21T17:54:11.535210Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:54:11.535214Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:54:11.535220Z node 3 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:54:11.535243Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:11.535250Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:61:2065] 2024-11-21T17:54:11.538491Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:87:2075] 2024-11-21T17:54:11.538509Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:54:11.538515Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:54:11.538838Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:54:11.538863Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:54:11.538894Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:108:2072] 2024-11-21T17:54:11.538899Z node 2 :BS_NO ... 17:55:11.359495Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [45:308:2288] 2024-11-21T17:55:11.359500Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [45:308:2288] 2024-11-21T17:55:11.359516Z node 45 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:55:11.359525Z node 45 :TABLET_RESOLVER DEBUG: SelectForward node 45 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [45:263:2256] 2024-11-21T17:55:11.359533Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [45:308:2288] 2024-11-21T17:55:11.359539Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [45:308:2288] 2024-11-21T17:55:11.359547Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [45:308:2288] 2024-11-21T17:55:11.359552Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [45:308:2288] 2024-11-21T17:55:11.359565Z node 45 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [45:308:2288] 2024-11-21T17:55:11.359584Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [45:308:2288] 2024-11-21T17:55:11.359589Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [45:308:2288] 2024-11-21T17:55:11.359593Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [45:308:2288] 2024-11-21T17:55:11.359598Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [45:308:2288] 2024-11-21T17:55:11.359602Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [45:308:2288] 2024-11-21T17:55:11.359608Z node 45 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [45:307:2287] EventType# 268697621 2024-11-21T17:55:11.359626Z node 45 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([45:308:2288]) [45:309:2289] 2024-11-21T17:55:11.359636Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::Handle::TEvGetTabletStorageInfo TabletId=72075186224037888 2024-11-21T17:55:11.359688Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [45:311:2291] 2024-11-21T17:55:11.359692Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [45:311:2291] 2024-11-21T17:55:11.359699Z node 45 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:55:11.359707Z node 45 :TABLET_RESOLVER DEBUG: SelectForward node 45 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [45:263:2256] 2024-11-21T17:55:11.359714Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [45:311:2291] 2024-11-21T17:55:11.359719Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [45:311:2291] 2024-11-21T17:55:11.359725Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [45:311:2291] 2024-11-21T17:55:11.359729Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [45:311:2291] 2024-11-21T17:55:11.359740Z node 45 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [45:311:2291] 2024-11-21T17:55:11.359753Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [45:311:2291] 2024-11-21T17:55:11.359757Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [45:311:2291] 2024-11-21T17:55:11.359760Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [45:311:2291] 2024-11-21T17:55:11.359765Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [45:311:2291] 2024-11-21T17:55:11.359768Z node 45 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [45:311:2291] 2024-11-21T17:55:11.359774Z node 45 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [45:310:2290] EventType# 268697615 2024-11-21T17:55:11.359782Z node 45 :HIVE TRACE: HIVE#72057594037927937 Handle TEvTabletPipe::TEvServerConnected([45:311:2291]) [45:312:2292] 2024-11-21T17:55:11.359798Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} queued, type NKikimr::NHive::TTxDeleteTablet 2024-11-21T17:55:11.359805Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:55:11.359823Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594037927937 ShardLocalIdx: 0 TxId_Deprecated: 0 2024-11-21T17:55:11.359830Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTablet::Execute Tablet 72075186224037888 2024-11-21T17:55:11.359867Z node 45 :HIVE DEBUG: HIVE#72057594037927937 Tablet(Dummy.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2024-11-21T17:55:11.359882Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037927937 TxId_Deprecated: 0 ShardOwnerId: 72057594037927937 ShardLocalIdx: 0 2024-11-21T17:55:11.359900Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} hope 1 -> done Change{5, redo 102b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2024-11-21T17:55:11.359909Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxDeleteTablet} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:55:11.370273Z node 45 :BS_PROXY_PUT INFO: [9521640286a8eda0] bootstrap ActorId# [45:314:2294] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:104:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T17:55:11.370329Z node 45 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] Id# [72057594037927937:2:5:0:0:104:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:55:11.370337Z node 45 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] restore Id# [72057594037927937:2:5:0:0:104:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:55:11.370348Z node 45 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:104:1] Marker# BPG33 2024-11-21T17:55:11.370356Z node 45 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:104:1] Marker# BPG32 2024-11-21T17:55:11.370389Z node 45 :BS_PROXY DEBUG: Send to queueActorId# [45:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:104:1] FDS# 104 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:55:11.370844Z node 45 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:104:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 80818 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T17:55:11.370879Z node 45 :BS_PROXY_PUT DEBUG: [9521640286a8eda0] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T17:55:11.370888Z node 45 :BS_PROXY_PUT INFO: [9521640286a8eda0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T17:55:11.370925Z node 45 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T17:55:11.370956Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 2024-11-21T17:55:11.370972Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10050004 [45:270:2260],0x10040206 [45:310:2290]} 2024-11-21T17:55:11.371028Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} queued, type NKikimr::NHive::TTxDeleteTabletResult 2024-11-21T17:55:11.371051Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:55:11.371060Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2024-11-21T17:55:11.371128Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} hope 1 -> done Change{6, redo 106b alter 0b annex 0, ~{ 16, 1 } -{ }, 0 gb} 2024-11-21T17:55:11.371136Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} Tx{6, NKikimr::NHive::TTxDeleteTabletResult} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:55:11.381522Z node 45 :BS_PROXY_PUT INFO: [758a346c7e0f5aa1] bootstrap ActorId# [45:316:2296] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:6:0:0:104:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T17:55:11.381574Z node 45 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] Id# [72057594037927937:2:6:0:0:104:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:55:11.381581Z node 45 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] restore Id# [72057594037927937:2:6:0:0:104:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:55:11.381592Z node 45 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:6:0:0:104:1] Marker# BPG33 2024-11-21T17:55:11.381597Z node 45 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:6:0:0:104:1] Marker# BPG32 2024-11-21T17:55:11.381629Z node 45 :BS_PROXY DEBUG: Send to queueActorId# [45:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:6:0:0:104:1] FDS# 104 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:55:11.382082Z node 45 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] received {EvVPutResult Status# OK ID# [72057594037927937:2:6:0:0:104:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 20 } Cost# 80818 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 21 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T17:55:11.382111Z node 45 :BS_PROXY_PUT DEBUG: [758a346c7e0f5aa1] Result# TEvPutResult {Id# [72057594037927937:2:6:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T17:55:11.382119Z node 45 :BS_PROXY_PUT INFO: [758a346c7e0f5aa1] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:6:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T17:55:11.382152Z node 45 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:6:0:0:104:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T17:55:11.382180Z node 45 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} commited cookie 1 for step 6 2024-11-21T17:55:11.382195Z node 45 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {Notifications: 0x1004020B [45:307:2287]} >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-std] >> test_queue_counters.py::TestSqsGettingCounters::test_purge_queue_counters [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_attempts_are_counted_separately_for_messages_in_one_batch >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v0] [GOOD] >> test.py::test[select-two_selects_with_diff_fields-default.txt-Results] [GOOD] >> test.py::test[select-values-default.txt-Debug] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_attempts_are_counted_separately_for_messages_in_one_batch [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v1] >> test_queue_counters.py::TestSqsGettingCounters::test_receive_message_immediate_duration_counter >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_with_invalid_name[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] [GOOD] >> test.py::test[window-yql-14479-default.txt-Debug] [GOOD] >> test.py::test[window-yql-14479-default.txt-Plan] [GOOD] >> test.py::test[window-yql-14479-default.txt-Results] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-std] >> test.py::test[select-values-default.txt-Debug] [GOOD] >> test.py::test[select-values-default.txt-Plan] [GOOD] >> test.py::test[select-values-default.txt-Results] >> BasicStatistics::TwoServerlessDbs [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-std] >> SubDomainWithReboots::RootWithStoragePools [GOOD] >> test.py::test[select-values-default.txt-Results] [GOOD] >> test.py::test[select-where_not_null--Debug] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_delete_message] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: 2024-11-21T17:52:15.574723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:15.574776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:15.574788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00166d/r3tmp/tmpkH5Ymw/pdisk_1.dat 2024-11-21T17:52:15.687815Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15444, node 1 2024-11-21T17:52:15.789989Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:15.790007Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:15.790009Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:15.790070Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:15.794406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:15.870704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:15.870744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:15.882407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32475 2024-11-21T17:52:16.284029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:17.045906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:17.045932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:17.079487Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:17.080491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:17.132000Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:17.141895Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:52:17.141916Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:52:17.147685Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:52:17.147804Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:52:17.147824Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:52:17.147829Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:52:17.147835Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:52:17.147841Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:52:17.147846Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:52:17.147852Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:52:17.147944Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:52:17.323152Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:17.323189Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:17.324543Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T17:52:17.326157Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T17:52:17.326230Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T17:52:17.326843Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T17:52:17.331221Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:52:17.331236Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:52:17.331244Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T17:52:17.332660Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:17.332680Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:17.333706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:52:17.334989Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:52:17.335016Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:52:17.337221Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:52:17.349209Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:17.371151Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:52:17.486327Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:52:17.642056Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:52:18.376224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:52:18.978995Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:19.092100Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T17:52:19.092126Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T17:52:19.092141Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2489:2902], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T17:52:19.092413Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2490:2903] 2024-11-21T17:52:19.092464Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2490:2903], schemeshard id = 72075186224037899 2024-11-21T17:52:19.801602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:52:20.208010Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:20.396990Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2024-11-21T17:52:20.397015Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037905 2024-11-21T17:52:20.397029Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2974:3106], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037905 2024-11-21T17:52:20.397295Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2975:3107] 2024-11-21T17:52:20.397403Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2975:3107], schemeshard id = 72075186224037905 2024-11-21T17:52:21.193687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3099:3357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.193719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.196382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2024-11-21T17:52:21.250426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3247:3393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.250456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:21.251858Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3252:3397]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:52:21.251899Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:52:21.251922Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2024-11-21T17:52:21.251927Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3255:3400] 2024-11-21T17:52:21.251936Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3255:3400] 2024-11-21T17:52:21.252068Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3256:3234] 2024-11-21T17:52:21.252124Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3255:3400], server id = [2:3256:3234], tablet id = 72075186224037897, status = OK 2024-11-21T17:52:21.252149Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:3256:3234], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T17:52:21.252162Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T17:52:21.252204Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:52:21.252215Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3252:3397], StatRequests.size() = 1 2024-11-21T17:52:21.254099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3260:3404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Erro ... -11-21T17:55:05.374016Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:8799:6297], StatRequests.size() = 1 2024-11-21T17:55:05.821626Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:55:05.821648Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:55:05.821656Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T17:55:05.821660Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:55:05.821749Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T17:55:05.824285Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:55:05.825307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8822:6316], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:05.825325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8833:6321], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:05.825349Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:05.828210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T17:55:05.839031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8836:6324], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T17:55:06.107859Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:8957:6391]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:06.107928Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2024-11-21T17:55:06.108000Z node 2 :STATISTICS DEBUG: [72075186224037897] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2024-11-21T17:55:06.108004Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T17:55:06.108028Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:55:06.108036Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:8957:6391], StatRequests.size() = 1 2024-11-21T17:55:06.120990Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2JjYzk4OGQtZjIwOWQ2NDgtMTAwZTEwMTYtZDM5ZWU4OTk=, TxId: 2024-11-21T17:55:06.121010Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2JjYzk4OGQtZjIwOWQ2NDgtMTAwZTEwMTYtZDM5ZWU4OTk=, TxId: 2024-11-21T17:55:06.121119Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:55:06.142974Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:55:06.142991Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:55:06.236238Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T17:55:06.236259Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:55:06.298151Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3600:3348], schemeshard count = 1 2024-11-21T17:55:06.756213Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2024-11-21T17:55:06.756257Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 234.000000s, at schemeshard: 72075186224037899 2024-11-21T17:55:06.756345Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2024-11-21T17:55:06.778174Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T17:55:06.989548Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:9007:6419]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:06.989648Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T17:55:06.989656Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:9007:6419], StatRequests.size() = 1 2024-11-21T17:55:08.454344Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:9054:6449]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:08.454459Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T17:55:08.454467Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:9054:6449], StatRequests.size() = 1 2024-11-21T17:55:08.948383Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T17:55:08.969550Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:55:08.969570Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:55:08.969577Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2024-11-21T17:55:08.969581Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T17:55:08.969675Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T17:55:08.970345Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:55:08.974210Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWZhMjhlYWItZTYxMjBlZjMtYmU3OWMxMzEtYWRkOGQ3NDI=, TxId: 2024-11-21T17:55:08.974223Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWZhMjhlYWItZTYxMjBlZjMtYmU3OWMxMzEtYWRkOGQ3NDI=, TxId: 2024-11-21T17:55:08.974340Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:55:08.985845Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T17:55:08.985863Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:55:09.113717Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2024-11-21T17:55:09.113735Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 215.000000s, at schemeshard: 72075186224037905 2024-11-21T17:55:09.113782Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 26 2024-11-21T17:55:09.125367Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T17:55:10.026020Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:9136:6503]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:10.026119Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T17:55:10.026124Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:9136:6503], StatRequests.size() = 1 2024-11-21T17:55:11.583490Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:9192:6535]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:11.583558Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T17:55:11.583564Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:9192:6535], StatRequests.size() = 1 2024-11-21T17:55:12.022833Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 3 2024-11-21T17:55:12.022992Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:55:12.023095Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:55:12.054566Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:55:12.054587Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:55:12.054596Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is data table. 2024-11-21T17:55:12.054612Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037905, LocalPathId: 2] 2024-11-21T17:55:12.054721Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T17:55:12.055279Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:55:12.058641Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjEyYzk5YWEtNGYwNGE4NWItY2ZlMzFmMWEtNTE1MjMxNTA=, TxId: 2024-11-21T17:55:12.058655Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjEyYzk5YWEtNGYwNGE4NWItY2ZlMzFmMWEtNTE1MjMxNTA=, TxId: 2024-11-21T17:55:12.058843Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:55:12.070455Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037905, LocalPathId: 2] 2024-11-21T17:55:12.070478Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:55:13.099370Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:9265:6582]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:13.099466Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T17:55:13.099471Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:9265:6582], StatRequests.size() = 1 2024-11-21T17:55:13.099583Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:9267:6584]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:13.100299Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T17:55:13.100306Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:9267:6584], StatRequests.size() = 1 >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-no] [GOOD] >> test.py::test[window-yql-14479-default.txt-Results] [GOOD] >> test.py::test[window-yql-14738-default.txt-Debug] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::RootWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:04.668317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:04.668334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:04.668337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:04.668340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:04.668352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:04.668355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:04.668361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:04.668415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:04.675400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:04.675416Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:04.676881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:04.676936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:04.676952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:04.678566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:04.678623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:04.678704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:04.678888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:04.679366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:04.679557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:04.679563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:04.679571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:04.679575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:04.679579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:04.679607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:04.680447Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:04.691279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:04.691353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.691393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:04.691437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:04.691443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.691941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:04.691970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:04.692015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.692028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:04.692032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:04.692036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:04.692711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.692724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:04.692727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:04.693020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.693026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.693030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:04.693034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:04.693448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:04.693750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:04.693781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:04.693894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:04.693909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:04.693921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:04.693957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:04.693961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:04.693978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:04.693987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:04.694294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:04.694300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:04.694321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:04.694326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:04.694369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.694374Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:04.694382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:04.694385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:04.694389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:04.694392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:04.694395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:04.694398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:04.694405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:04.694408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:04.694411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 38 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T17:55:13.599787Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } } } TxId: 1002 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:13.599811Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:55:13.599848Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:13.599884Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1002:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:13.599890Z node 38 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:55:13.599992Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T17:55:13.599998Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2024-11-21T17:55:13.600003Z node 38 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T17:55:13.600325Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1002, response: Status: StatusAccepted TxId: 1002 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:13.600343Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1002, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:13.600370Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:55:13.600379Z node 38 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1002:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:13.600383Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1002:0 ProgressState no shards to create, do next state 2024-11-21T17:55:13.600387Z node 38 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 2 -> 3 2024-11-21T17:55:13.600696Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:55:13.600705Z node 38 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1002:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:13.600709Z node 38 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 3 -> 128 2024-11-21T17:55:13.601057Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:55:13.601069Z node 38 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:55:13.601075Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1002:0, at tablet 72057594046678944 2024-11-21T17:55:13.601081Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T17:55:13.601105Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:13.601463Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T17:55:13.601489Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000003 2024-11-21T17:55:13.601551Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:13.601573Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 163208759400 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:13.601579Z node 38 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet 72057594046678944 2024-11-21T17:55:13.601632Z node 38 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 240 2024-11-21T17:55:13.601640Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet 72057594046678944 2024-11-21T17:55:13.601663Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:13.601675Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T17:55:13.602109Z node 38 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:13.602117Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:13.602148Z node 38 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:13.602152Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [38:202:2205], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2024-11-21T17:55:13.602206Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:55:13.602212Z node 38 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T17:55:13.602224Z node 38 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:55:13.602227Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:55:13.602232Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T17:55:13.602237Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:55:13.602241Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:55:13.602244Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:55:13.602255Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T17:55:13.602259Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 1, subscribers: 1 2024-11-21T17:55:13.602262Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2024-11-21T17:55:13.602337Z node 38 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:55:13.602348Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:55:13.602353Z node 38 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:55:13.602358Z node 38 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2024-11-21T17:55:13.602362Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:13.602374Z node 38 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T17:55:13.602379Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [38:298:2290] 2024-11-21T17:55:13.602930Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:55:13.602950Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:55:13.602955Z node 38 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [38:299:2291] TestWaitNotification: OK eventTxId 1002 2024-11-21T17:55:13.603040Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:13.603066Z node 38 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 33us result status StatusSuccess 2024-11-21T17:55:13.603159Z node 38 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v0] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v1] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v0] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-std] >> test_counters.py::TestSqsCountersFeatures::test_updates_status_code_counters_when_parsing_errors_occur[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-std] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> SubDomainWithReboots::DeleteWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v1] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-no] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-fifo] >> test_acl.py::TestSqsACLWithTenant::test_apply_permissions[tables_format_v1] [GOOD] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v1] >> test.py::test[select-where_not_null--Debug] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::DeleteWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:02.435326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:02.435353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:02.435360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:02.435364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:02.435382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:02.435386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:02.435397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:02.435498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:02.448498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:02.448518Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:02.450568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:02.450670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:02.450694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:02.453643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:02.453710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:02.455681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.456914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:02.458436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:02.464441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:02.464446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:02.464481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:02.465804Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:02.485406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:02.485487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.485533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:02.485602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:02.485609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.486292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.486325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:02.486377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.486385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:02.486389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:02.486413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:02.486805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.486814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:02.486817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:02.487131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.487139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.487144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.487150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.487745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:02.488523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:02.488566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:02.488704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.488723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:02.488734Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.488798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:02.488804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.488824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:02.488833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:02.489156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.489163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:02.489192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.489197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:02.489267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.489272Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:02.489280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:02.489283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.489287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:02.489290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.489293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:02.489295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:02.489303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:02.489306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:02.489309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 024-11-21T17:55:14.334482Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:55:14.334489Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T17:55:14.334495Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:55:14.334499Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:55:14.334504Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:55:14.334508Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:55:14.334511Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:55:14.334515Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:55:14.334542Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:55:14.334547Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2024-11-21T17:55:14.334550Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2024-11-21T17:55:14.334554Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:55:14.334675Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:14.334686Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:14.334690Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:55:14.334694Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T17:55:14.334698Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:55:14.334800Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:14.334810Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:14.334813Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:55:14.334817Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:55:14.334821Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:55:14.334828Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T17:55:14.335659Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:14.335672Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:14.335770Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:55:14.335785Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:55:14.335812Z node 50 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:55:14.336030Z node 50 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:55:14.336115Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:14.336169Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409547 2024-11-21T17:55:14.336367Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:55:14.336390Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:55:14.336453Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:14.336457Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:14.336474Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:55:14.336565Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:14.336571Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:14.336579Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:55:14.336942Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:55:14.336957Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:55:14.337392Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:55:14.337405Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:55:14.337443Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:14.337454Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:55:14.337495Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:55:14.337500Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:55:14.337550Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:55:14.337564Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:55:14.337568Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [50:462:2429] TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:55:14.337610Z node 50 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:55:14.337620Z node 50 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T17:55:14.337685Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:14.337714Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 50us result status StatusPathDoesNotExist 2024-11-21T17:55:14.337744Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:55:14.337789Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:14.337806Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 18us result status StatusSuccess 2024-11-21T17:55:14.337860Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 >> BasicStatistics::Serverless [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: 2024-11-21T17:52:20.285070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:20.285115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:20.285126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00163a/r3tmp/tmpqZ5Ea4/pdisk_1.dat 2024-11-21T17:52:20.359700Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63899, node 1 2024-11-21T17:52:20.452057Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:20.452091Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:20.452096Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:20.452174Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:20.456746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:20.532857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:20.532893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:20.544882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5353 2024-11-21T17:52:20.944998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:21.700788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:21.700814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:21.733657Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:21.734709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:21.782216Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:21.790399Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:52:21.790419Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:52:21.796296Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:52:21.796398Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:52:21.796414Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:52:21.796418Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:52:21.796424Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:52:21.796445Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:52:21.796450Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:52:21.796457Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:52:21.796554Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:52:21.971261Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:21.971307Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:21.972269Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T17:52:21.974426Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T17:52:21.974524Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T17:52:21.975247Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T17:52:21.980066Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:52:21.980079Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:52:21.980087Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T17:52:21.981490Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:21.981517Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:21.982481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:52:21.983633Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:52:21.983656Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:52:21.986481Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:52:21.998502Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:22.020325Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:52:22.134261Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:52:22.289940Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:52:23.014983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:52:23.605217Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:23.710253Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T17:52:23.710271Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T17:52:23.710284Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2489:2902], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T17:52:23.710453Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2490:2903] 2024-11-21T17:52:23.710491Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2490:2903], schemeshard id = 72075186224037899 2024-11-21T17:52:24.405496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2619:3193], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.405533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.408259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2024-11-21T17:52:24.470670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2772:3229], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.470703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.472501Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2777:3233]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:52:24.472530Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:52:24.472563Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2024-11-21T17:52:24.472570Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2780:3236] 2024-11-21T17:52:24.472579Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2780:3236] 2024-11-21T17:52:24.472747Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2781:3034] 2024-11-21T17:52:24.472813Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2780:3236], server id = [2:2781:3034], tablet id = 72075186224037897, status = OK 2024-11-21T17:52:24.472853Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2781:3034], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T17:52:24.472866Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T17:52:24.472913Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:52:24.472921Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2777:3233], StatRequests.size() = 1 2024-11-21T17:52:24.475021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2785:3240], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.475053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.475113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2790:3245], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.476317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T17:52:24.584635Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T17:52:24.584659Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:52:24.615427Z node 1 :STATISTICS DEB ... 040788Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 116 ], ReplyToActorId[ [2:7460:5371]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:04.040875Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 116 ] 2024-11-21T17:55:04.040883Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 116, ReplyToActorId = [2:7460:5371], StatRequests.size() = 1 2024-11-21T17:55:05.365383Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T17:55:05.365411Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T17:55:05.365414Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T17:55:05.365417Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T17:55:05.647919Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:7506:5392]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:05.648004Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2024-11-21T17:55:05.648010Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:7506:5392], StatRequests.size() = 1 2024-11-21T17:55:06.792850Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T17:55:06.793038Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:55:06.793123Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:55:06.856196Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037889 2024-11-21T17:55:06.856249Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 225.000000s, at schemeshard: 72075186224037889 2024-11-21T17:55:06.856383Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 25 2024-11-21T17:55:06.867891Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T17:55:07.161468Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7545:5413]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:07.161556Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2024-11-21T17:55:07.161567Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7545:5413], StatRequests.size() = 1 2024-11-21T17:55:08.221352Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:55:08.221378Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:55:08.221388Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T17:55:08.221392Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:55:08.221501Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T17:55:08.224038Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:55:08.225192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7577:5435], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:08.225218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587:5440], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:08.225260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:08.228316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T17:55:08.239362Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591:5443], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T17:55:08.496961Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7712:5509]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:08.497008Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T17:55:08.497056Z node 2 :STATISTICS DEBUG: [72075186224037897] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2024-11-21T17:55:08.497061Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T17:55:08.497090Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:55:08.497105Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7712:5509], StatRequests.size() = 1 2024-11-21T17:55:08.509996Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzBkODVjNDktYzY4MDkyNTYtYjliY2I5OC1jY2Q4MTY4MQ==, TxId: 2024-11-21T17:55:08.510018Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzBkODVjNDktYzY4MDkyNTYtYjliY2I5OC1jY2Q4MTY4MQ==, TxId: 2024-11-21T17:55:08.510122Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:55:08.521415Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:55:08.521437Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:55:08.605001Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7742:5523]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:08.605094Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T17:55:08.605104Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7742:5523], StatRequests.size() = 1 2024-11-21T17:55:08.615519Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T17:55:08.615543Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:55:08.677689Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:2954:3067], schemeshard count = 1 2024-11-21T17:55:09.137154Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2024-11-21T17:55:09.137180Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 194.000000s, at schemeshard: 72075186224037899 2024-11-21T17:55:09.137251Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2024-11-21T17:55:09.158984Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T17:55:10.169102Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7794:5558]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:10.169195Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T17:55:10.169201Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7794:5558], StatRequests.size() = 1 2024-11-21T17:55:11.298441Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T17:55:11.298526Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:55:11.298533Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:55:11.298546Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2024-11-21T17:55:11.298552Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T17:55:11.298742Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T17:55:11.299805Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:55:11.304670Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTI4MzU2NzgtZWVlN2Q0YmItOWU4NmI1OGEtYzA1MmI3NTA=, TxId: 2024-11-21T17:55:11.304691Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTI4MzU2NzgtZWVlN2Q0YmItOWU4NmI1OGEtYzA1MmI3NTA=, TxId: 2024-11-21T17:55:11.304829Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:55:11.316596Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T17:55:11.316619Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:55:11.687783Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7865:5599]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:11.687879Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T17:55:11.687888Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7865:5599], StatRequests.size() = 1 2024-11-21T17:55:13.157495Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7917:5631]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:13.157572Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T17:55:13.157581Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7917:5631], StatRequests.size() = 1 2024-11-21T17:55:14.175530Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 2 2024-11-21T17:55:14.175625Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:55:14.175632Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:55:14.175682Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:55:14.175854Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:55:14.513085Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7953:5650]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:14.513174Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T17:55:14.513180Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7953:5650], StatRequests.size() = 1 >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v0] [GOOD] >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateExternalTablet >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[select-where_not_null--Debug] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v1] >> THiveTest::TestCreateExternalTablet [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_empty_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_incorrect_tables_format >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-no] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] [GOOD] >> SubDomainWithReboots::Delete [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[std] >> KqpOlapScheme::DropThenAddColumnCompaction [GOOD] >> KqpOlapScheme::DropColumnTableStoreErrors >> test.py::test[window-yql-14738-default.txt-Debug] [GOOD] >> test.py::test[window-yql-14738-default.txt-Plan] [GOOD] >> test.py::test[window-yql-14738-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::Delete [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:02.435320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:02.435349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:02.435355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:02.435359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:02.435386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:02.435406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:02.435422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:02.435504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:02.449040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:02.449059Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:02.451063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:02.451150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:02.451172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:02.453688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:02.453755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:02.455660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.456903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:02.458377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:02.464417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:02.464422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:02.464453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:02.465691Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:02.486404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:02.486506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.486583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:02.486670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:02.486678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.487479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.487515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:02.487566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.487578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:02.487583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:02.487588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:02.488122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.488134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:02.488139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:02.488552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.488563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.488571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.488578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.489209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:02.489746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:02.489794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:02.489986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.490012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:02.490028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.490100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:02.490108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.490136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:02.490149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:02.490634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.490645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:02.490684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.490689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:02.490767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.490775Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:02.490786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:02.490791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.490796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:02.490801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.490806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:02.490813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:02.490824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:02.490830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:02.490833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... AT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [63:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 2 2024-11-21T17:55:17.454984Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [63:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:55:17.455014Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:55:17.455020Z node 63 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T17:55:17.455028Z node 63 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:55:17.455031Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:55:17.455037Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:55:17.455040Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:55:17.455044Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:55:17.455048Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:55:17.455072Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:55:17.455077Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 1 2024-11-21T17:55:17.455080Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2024-11-21T17:55:17.455084Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:55:17.455214Z node 63 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:17.455244Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:17.455248Z node 63 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:55:17.455253Z node 63 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2024-11-21T17:55:17.455257Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:55:17.455512Z node 63 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:17.455523Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:17.455530Z node 63 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:55:17.455534Z node 63 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:55:17.455538Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:55:17.455547Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T17:55:17.455552Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [63:424:2391] 2024-11-21T17:55:17.455883Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:17.455894Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:17.456035Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:55:17.456072Z node 63 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T17:55:17.456371Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:17.456429Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:55:17.456517Z node 63 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2024-11-21T17:55:17.456875Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:55:17.456917Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:55:17.457021Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:17.457027Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:17.457049Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:55:17.457170Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:17.457177Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:17.457187Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:55:17.457270Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:55:17.457315Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:55:17.457325Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [63:429:2396] 2024-11-21T17:55:17.457555Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:55:17.457565Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:55:17.457923Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:55:17.457933Z node 63 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:55:17.457967Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:17.457977Z node 63 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:55:17.458034Z node 63 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:55:17.458045Z node 63 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T17:55:17.458104Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:17.458136Z node 63 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 41us result status StatusPathDoesNotExist 2024-11-21T17:55:17.458169Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:55:17.458211Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:17.458225Z node 63 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 15us result status StatusSuccess 2024-11-21T17:55:17.458274Z node 63 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 >> KqpOlapScheme::DropThenAddColumnIndexation [GOOD] >> KqpOlapScheme::DropTtlColumn >> KqpOlapScheme::DropColumnTableStoreErrors [GOOD] >> KqpOlapScheme::DropTableAfterInsert >> KqpOlapScheme::DropTtlColumn [GOOD] >> KqpOlapScheme::InsertAddInsertDrop >> test_queue_counters.py::TestSqsGettingCounters::test_receive_message_immediate_duration_counter [GOOD] >> BasicStatistics::TwoDatabases [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters >> ForceDropWithReboots::ForceDeleteCreateSubdomainInfly [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] Test command err: 2024-11-21T17:52:22.805362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:453:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:22.805386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:22.805390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00161e/r3tmp/tmplbvVbN/pdisk_1.dat 2024-11-21T17:52:22.884286Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20506, node 1 2024-11-21T17:52:22.979123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:22.979140Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:22.979142Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:22.979214Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:22.984691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:23.060255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:23.060283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:23.071637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3443 2024-11-21T17:52:23.487628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:24.233870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:24.233900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:24.267454Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:52:24.268123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:24.318367Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:24.324926Z node 3 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:52:24.324946Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:52:24.329336Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:52:24.329400Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:52:24.329413Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:52:24.329417Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:52:24.329421Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:52:24.329425Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:52:24.329429Z node 3 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:52:24.329434Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:52:24.329554Z node 3 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:52:24.492287Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:24.492310Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1816:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:24.493461Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:1826:2561] 2024-11-21T17:52:24.494603Z node 3 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [3:1861:2578] 2024-11-21T17:52:24.494636Z node 3 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [3:1861:2578], schemeshard id = 72075186224037889 2024-11-21T17:52:24.495202Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database1 2024-11-21T17:52:24.498905Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:52:24.498917Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:52:24.498925Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database1/.metadata/_statistics 2024-11-21T17:52:24.499774Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:24.499793Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:24.501650Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:52:24.502951Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:52:24.502971Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:52:24.504954Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:52:24.517036Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:24.560574Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:52:24.648029Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:52:24.804164Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:52:25.468832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:52:26.171204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:26.171246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:26.194604Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:26.195630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:26.276758Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:26.287141Z node 2 :STATISTICS INFO: [72075186224038907] OnActivateExecutor 2024-11-21T17:52:26.287172Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInitSchema::Execute 2024-11-21T17:52:26.294392Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInitSchema::Complete 2024-11-21T17:52:26.294649Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInit::Execute 2024-11-21T17:52:26.294676Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:52:26.294682Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ColumnStatistics: column count# 0 2024-11-21T17:52:26.294689Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:52:26.294696Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:52:26.294702Z node 2 :STATISTICS DEBUG: [72075186224038907] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:52:26.294710Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxInit::Complete 2024-11-21T17:52:26.294893Z node 2 :STATISTICS INFO: [72075186224038907] Subscribed for config changes 2024-11-21T17:52:26.397165Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:3031:2537] 2024-11-21T17:52:26.397321Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxConfigure::Execute: database# /Root/Database2 2024-11-21T17:52:26.398545Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:52:26.398554Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:52:26.398562Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database2/.metadata/_statistics 2024-11-21T17:52:26.399640Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038907, at schemeshard: 72075186224038899 2024-11-21T17:52:26.399656Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3075:2560], at schemeshard: 72075186224038899, StatisticsAggregatorId: 72075186224038907, at schemeshard: 72075186224038899 2024-11-21T17:52:26.402409Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:3104:2578] 2024-11-21T17:52:26.402462Z node 2 :STATISTICS DEBUG: [72075186224038907] EvConnectSchemeShard, pipe server id = [2:3104:2578], schemeshard id = 72075186224038899 2024-11-21T17:52:26.408073Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:26.408104Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:26.409472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730657:1, at schemeshard: 72075186224038899 2024-11-21T17:52:26.410353Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976730657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224038899 PathId: 3 } 2024-11-21T17:52:26.410372Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976730657 2024-11-21T17:52:26.412955Z node 2 :HIVE WARN: HIVE#72075186224038898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:52:26.424500Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxConfigure::Complete 2024-11-21T17:52:26.424891Z node 2 :HIVE WARN: HIVE#72075186224038898 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:26.554379Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976730657. Doublechecking... 2024-11-21T17:52:26.722363Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:52:27.610791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3457:3324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:27.610843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:27.614242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Ope ... 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:14.274137Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T17:55:14.274147Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [3:10086:4704], StatRequests.size() = 1 2024-11-21T17:55:14.741325Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224038899 2024-11-21T17:55:14.741374Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 189.000000s, at schemeshard: 72075186224038899 2024-11-21T17:55:14.741480Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038899, stats size# 49 2024-11-21T17:55:14.763742Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxSchemeShardStats::Complete 2024-11-21T17:55:15.521252Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:10147:4730]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:15.521330Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T17:55:15.521336Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:10147:4730], StatRequests.size() = 1 2024-11-21T17:55:15.826511Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal 2024-11-21T17:55:15.826542Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. No force traversals. 2024-11-21T17:55:15.826553Z node 2 :STATISTICS DEBUG: [72075186224038907] IsColumnTable. Path [OwnerId: 72075186224038899, LocalPathId: 3] is data table. 2024-11-21T17:55:15.826558Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038899, LocalPathId: 3] 2024-11-21T17:55:15.826670Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2024-11-21T17:55:15.827443Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:55:15.828461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10163:4375], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:15.828488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10174:4380], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:15.828510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:15.831427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038899 2024-11-21T17:55:15.843368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:10177:4383], DatabaseId: /Root/Database2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2024-11-21T17:55:16.062081Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:10303:4449]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:16.062141Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:55:16.062153Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:10305:4451] 2024-11-21T17:55:16.062165Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:10305:4451] 2024-11-21T17:55:16.062335Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:10305:4451], server id = [2:10306:4452], tablet id = 72075186224038907, status = OK 2024-11-21T17:55:16.062348Z node 2 :STATISTICS DEBUG: [72075186224038907] EvServerConnected, pipe server id = [2:10306:4452] 2024-11-21T17:55:16.062369Z node 2 :STATISTICS DEBUG: [72075186224038907] EvConnectNode, pipe server id = [2:10306:4452], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T17:55:16.062378Z node 2 :STATISTICS DEBUG: [72075186224038907] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T17:55:16.062397Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:55:16.062407Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:10303:4449], StatRequests.size() = 1 2024-11-21T17:55:16.076039Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWM3MWUxZi1hY2VhNmU4YS1hMWQxY2JlMS03MzgyYjc3Zg==, TxId: 2024-11-21T17:55:16.076066Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWM3MWUxZi1hY2VhNmU4YS1hMWQxY2JlMS03MzgyYjc3Zg==, TxId: 2024-11-21T17:55:16.076267Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Execute 2024-11-21T17:55:16.087708Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038899, LocalPathId: 3] 2024-11-21T17:55:16.087726Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:55:16.129495Z node 2 :STATISTICS DEBUG: [72075186224038907] EvFastPropagateCheck 2024-11-21T17:55:16.129535Z node 2 :STATISTICS DEBUG: [72075186224038907] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:55:16.181818Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:10305:4451], schemeshard count = 1 2024-11-21T17:55:16.451029Z node 3 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T17:55:16.461552Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:55:16.461582Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:55:16.461594Z node 3 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T17:55:16.461599Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:55:16.461705Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database1 2024-11-21T17:55:16.462416Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:55:16.467992Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YzcyZmM2ZDEtNTk0OWJjNTgtMmY3ZTQwYzAtYTdjMzRlMGU=, TxId: 2024-11-21T17:55:16.468023Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YzcyZmM2ZDEtNTk0OWJjNTgtMmY3ZTQwYzAtYTdjMzRlMGU=, TxId: 2024-11-21T17:55:16.468184Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:55:16.480785Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:55:16.480807Z node 3 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:55:16.793240Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [3:10401:4766]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:16.793326Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T17:55:16.793332Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [3:10401:4766], StatRequests.size() = 1 2024-11-21T17:55:18.012172Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [3:10470:4794]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:18.012268Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T17:55:18.012274Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [3:10470:4794], StatRequests.size() = 1 2024-11-21T17:55:18.285032Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal 2024-11-21T17:55:18.285058Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. No force traversals. 2024-11-21T17:55:18.285070Z node 2 :STATISTICS DEBUG: [72075186224038907] IsColumnTable. Path [OwnerId: 72075186224038899, LocalPathId: 4] is data table. 2024-11-21T17:55:18.285076Z node 2 :STATISTICS DEBUG: [72075186224038907] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038899, LocalPathId: 4] 2024-11-21T17:55:18.285179Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2024-11-21T17:55:18.285895Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T17:55:18.290656Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDE3ZDU5NzItNjNiNzhjNzAtODIyMTQ4NGItNGVlMzQ3Ng==, TxId: 2024-11-21T17:55:18.290680Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDE3ZDU5NzItNjNiNzhjNzAtODIyMTQ4NGItNGVlMzQ3Ng==, TxId: 2024-11-21T17:55:18.290825Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Execute 2024-11-21T17:55:18.302716Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038899, LocalPathId: 4] 2024-11-21T17:55:18.302735Z node 2 :STATISTICS DEBUG: [72075186224038907] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:55:18.827669Z node 3 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 2 2024-11-21T17:55:18.827830Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T17:55:18.827941Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2024-11-21T17:55:18.848953Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:55:18.848978Z node 3 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:55:19.105653Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [3:10556:4808]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:19.105764Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T17:55:19.105773Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [3:10556:4808], StatRequests.size() = 1 2024-11-21T17:55:19.105896Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:10558:4537]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:19.106829Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T17:55:19.106838Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:10558:4537], StatRequests.size() = 1 >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v0] [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_reading_from_empty_queue [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_duplicates >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDeleteCreateSubdomainInfly [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:04.345798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:04.345814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:04.345817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:04.345820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:04.345831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:04.345834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:04.345840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:04.345890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:04.352937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:04.352951Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:04.354488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:04.354580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:04.354614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:04.356715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:04.356787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:04.356897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:04.357107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:04.357689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:04.357890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:04.357896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:04.357904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:04.357908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:04.357913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:04.357940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:04.358994Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:04.371095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:04.371158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.371191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:04.371228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:04.371233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.371717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:04.371730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:04.371765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.371770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:04.371773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:04.371775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:04.372083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.372090Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:04.372093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:04.372368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.372375Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.372378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:04.372381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:04.372722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:04.373022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:04.373051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:04.373154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:04.373171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:04.373182Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:04.373213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:04.373217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:04.373231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:04.373238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:04.373540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:04.373546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:04.373565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:04.373568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:04.373608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.373611Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:04.373618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:04.373620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:04.373624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:04.373626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:04.373629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:04.373631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:04.373638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:04.373640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:04.373643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:55:19.804073Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:55:19.804165Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:55:19.804439Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:55:19.804463Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:55:19.804467Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [65:296:2288] 2024-11-21T17:55:19.804821Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:55:19.804836Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:55:19.804890Z node 65 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:19.804893Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:55:19.804909Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:55:19.804924Z node 65 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:19.804927Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [65:197:2200], at schemeshard: 72057594046678944, txId: 1003, path id: 2 2024-11-21T17:55:19.804930Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [65:197:2200], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:55:19.804957Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:55:19.804962Z node 65 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 1003:0 ProgressState 2024-11-21T17:55:19.804968Z node 65 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:55:19.804970Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:55:19.804974Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:55:19.804978Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:55:19.804981Z node 65 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:55:19.804983Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:55:19.805002Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:55:19.805005Z node 65 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 2, subscribers: 1 2024-11-21T17:55:19.805007Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 2], 6 2024-11-21T17:55:19.805009Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T17:55:19.805118Z node 65 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:19.805128Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:19.805131Z node 65 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:55:19.805134Z node 65 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2024-11-21T17:55:19.805136Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:55:19.805222Z node 65 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:19.805228Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:19.805230Z node 65 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:55:19.805232Z node 65 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:55:19.805234Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:55:19.805239Z node 65 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 1 2024-11-21T17:55:19.805242Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [65:295:2287] 2024-11-21T17:55:19.805797Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:19.805805Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:19.806002Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 Leader for TabletID 72057594037968897 is [65:209:2209] sender: [65:331:2058] recipient: [65:15:2062] 2024-11-21T17:55:19.806085Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:55:19.806114Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:55:19.806119Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [65:296:2288] 2024-11-21T17:55:19.806161Z node 65 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T17:55:19.806196Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:19.806237Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:55:19.806279Z node 65 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T17:55:19.806296Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:55:19.806310Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:55:19.806329Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:19.806333Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:19.806353Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:55:19.806392Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:19.806396Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:19.806404Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:55:19.806743Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:55:19.807010Z node 65 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:55:19.807039Z node 65 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:19.807047Z node 65 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T17:55:19.807089Z node 65 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:55:19.807096Z node 65 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 2024-11-21T17:55:19.807142Z node 65 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:19.807169Z node 65 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 33us result status StatusPathDoesNotExist 2024-11-21T17:55:19.807196Z node 65 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_duplicates [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_reading_deleting >> test_acl.py::TestSqsACLWithPath::test_apply_permissions[tables_format_v1] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v0] >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_reading_deleting [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_ya_count_queues[tables_format_v1] [GOOD] >> SubDomainWithReboots::CreateTabletInsideWithStoragePools [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_incorrect_tables_format [GOOD] >> test.py::test[window-yql-14738-default.txt-Results] [GOOD] >> test.py::test[ypath-limit_with_range-default.txt-Debug] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::CreateTabletInsideWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:02.874081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:02.874105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:02.874110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:02.874115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:02.874130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:02.874134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:02.874142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:02.874238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:02.886811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:02.886835Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:02.889290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:02.889389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:02.889421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:02.892133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:02.892217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:02.892356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.892578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:02.893400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.893680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.893693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.893705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:02.893712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:02.893717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:02.893760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:02.894988Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:02.913342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:02.913428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.913477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:02.913539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:02.913548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.914166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.914200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:02.914240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.914248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:02.914253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:02.914257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:02.914650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.914660Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:02.914665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:02.914976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.914986Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.914991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.914997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.915589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:02.915973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:02.916014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:02.916166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.916188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:02.916202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.916268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:02.916275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.916297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:02.916307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:02.916715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.916725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:02.916750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.916755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:02.916812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.916818Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:02.916827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:02.916830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.916835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:02.916840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.916844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:02.916847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:02.916857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:02.916862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:02.916865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... AT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T17:55:20.571176Z node 73 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:20.571187Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:20.571190Z node 73 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:55:20.571193Z node 73 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T17:55:20.571197Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:55:20.571275Z node 73 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:20.571282Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:20.571284Z node 73 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:55:20.571286Z node 73 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T17:55:20.571288Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T17:55:20.571296Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T17:55:20.571493Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 1003 Step: 150 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 202 } } 2024-11-21T17:55:20.571500Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409548, partId: 0 2024-11-21T17:55:20.571511Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 1003 Step: 150 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 202 } } 2024-11-21T17:55:20.571518Z node 73 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 1003 Step: 150 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 202 } } 2024-11-21T17:55:20.571570Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 453 RawX2: 313532615027 } Origin: 72075186233409548 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:55:20.571573Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409548, partId: 0 2024-11-21T17:55:20.571580Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 453 RawX2: 313532615027 } Origin: 72075186233409548 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:55:20.571583Z node 73 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:55:20.571587Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 453 RawX2: 313532615027 } Origin: 72075186233409548 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T17:55:20.571593Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:20.571596Z node 73 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:55:20.571598Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:55:20.571601Z node 73 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T17:55:20.572046Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:55:20.572114Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T17:55:20.572388Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:55:20.572428Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:55:20.572492Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:55:20.572499Z node 73 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:55:20.572511Z node 73 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:55:20.572515Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:55:20.572520Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T17:55:20.572531Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [73:423:2392] message: TxId: 1003 2024-11-21T17:55:20.572537Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:55:20.572542Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:55:20.572546Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:55:20.572567Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T17:55:20.572967Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:55:20.572976Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [73:496:2449] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:55:20.573083Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:20.573113Z node 73 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 38us result status StatusSuccess 2024-11-21T17:55:20.573187Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:20.573237Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:20.573257Z node 73 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 21us result status StatusSuccess 2024-11-21T17:55:20.573305Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 150 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "storage-pool-number-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "storage-pool-number-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v0] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-30] >> test.py::test[ypath-limit_with_range-default.txt-Debug] [GOOD] >> test.py::test[ypath-limit_with_range-default.txt-Plan] [GOOD] >> test.py::test[ypath-limit_with_range-default.txt-Results] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-no] [GOOD] >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-std] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-empty] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-invalid] >> test.py::test[ypath-limit_with_range-default.txt-Results] [GOOD] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-invalid] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v0] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_receive_message_immediate_duration_counter [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2024-11-21T17:51:10.850256Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:51:10.851105Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:51:10.851211Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:51:10.851351Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:51:10.851564Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T17:51:10.851574Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:51:10.851695Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:25:2072] ControllerId# 72057594037932033 2024-11-21T17:51:10.851698Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:51:10.851732Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:51:10.851790Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:51:10.853760Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:51:10.853785Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:51:10.854082Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:33:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T17:51:10.854114Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:51:10.854141Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:51:10.854173Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:51:10.854204Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:51:10.854226Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T17:51:10.854248Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T17:51:10.854251Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:51:10.854262Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:25:2072] 2024-11-21T17:51:10.854265Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:25:2072] 2024-11-21T17:51:10.854271Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:51:10.854278Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:51:10.854393Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:51:10.857664Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:51:10.857690Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:51:10.857696Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:51:10.858026Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:51:10.858085Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:51:10.858141Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:51:10.858149Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:51:10.858156Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:51:10.858734Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:51:10.858831Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:51:10.858838Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:51:10.859329Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T17:51:10.859349Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:51:10.859354Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:51:10.859397Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2090] 2024-11-21T17:51:10.859413Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:51:10.859447Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:29:2063] 2024-11-21T17:51:10.859451Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:29:2063] 2024-11-21T17:51:10.859577Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:51:10.859630Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T17:51:10.859638Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T17:51:10.859641Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T17:51:10.859647Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:51:10.859679Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:51:10.859688Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T17:51:10.859715Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2090] 2024-11-21T17:51:10.859718Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2090] 2024-11-21T17:51:10.859722Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:51:10.859778Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:51:10.859811Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:51:10.859826Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:51:10.860350Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2090] 2024-11-21T17:51:10.860492Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:51:10.860517Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:51:10.860522Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2024-11-21T17:51:10.861514Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:25:2072] 2024-11-21T17:51:10.861525Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:25:2072] 2024-11-21T17:51:10.861548Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T17:51:10.861747Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:51:10.861835Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:51:10.861860Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2024-11-21T17:51:10.861866Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2024-11-21T17:51:10.861870Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:51:10.861928Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2024-11-21T17:51:10.861939Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2024-11-21T17:51:10.861943Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2024-11-21T17:51:10.861954Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2024-11-21T17:51:10.861959Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:51:10.861969Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:51:10.861981Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:51:10.861994Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:51:10.862000Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2024-11-21T17:51:10.862009Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:29:2063] 2024-11-21T17:51:10.862013Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:29:2063] 2024-11-21T17:51:10.862024Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2024-11-21T17:51:10.862064Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtaine ... 4-11-21T17:55:16.062043Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T17:55:16.062077Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "def1" } } GroupParameters { StoragePoolSpecifier { Name: "def2" } } GroupParameters { StoragePoolSpecifier { Name: "def3" } } ReturnAllMatchingGroups: true 2024-11-21T17:55:16.062111Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{4, redo 1145b alter 0b annex 0, ~{ 14, 0, 1, 2 } -{ }, 0 gb} 2024-11-21T17:55:16.062120Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:55:16.062161Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [147:303:2283] 2024-11-21T17:55:16.062164Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [147:303:2283] 2024-11-21T17:55:16.062174Z node 147 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:55:16.062178Z node 147 :TABLET_RESOLVER DEBUG: SelectForward node 147 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [147:92:2120] 2024-11-21T17:55:16.062183Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [147:303:2283] 2024-11-21T17:55:16.062188Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result local node, try to connect [147:303:2283] 2024-11-21T17:55:16.062191Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [147:303:2283] 2024-11-21T17:55:16.062203Z node 147 :PIPE_SERVER DEBUG: [72057594037932033] Accept Connect Originator# [147:303:2283] 2024-11-21T17:55:16.062214Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [147:303:2283] 2024-11-21T17:55:16.062217Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [147:303:2283] 2024-11-21T17:55:16.062219Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [147:303:2283] 2024-11-21T17:55:16.062222Z node 147 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [147:270:2260] EventType# 268637702 2024-11-21T17:55:16.062239Z node 147 :HIVE DEBUG: HIVE#72057594037927937 Connected to tablet 72057594037932033 from tablet 72057594037927937 2024-11-21T17:55:16.062253Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{26, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2024-11-21T17:55:16.062257Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{26, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:55:16.062279Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{26, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T17:55:16.062283Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{26, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:55:16.062314Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2147483648 StoragePoolName: "def1" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2147483649 StoragePoolName: "def2" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2147483650 StoragePoolName: "def3" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2024-11-21T17:55:16.062331Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2024-11-21T17:55:16.062334Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:55:16.062340Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{22871770384416}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2024-11-21T17:55:16.062348Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{22871770384416}: tablet 72075186224037888 channel 0 assigned to group 2147483648 2024-11-21T17:55:16.062367Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{22871770384416}: tablet 72075186224037888 channel 1 assigned to group 2147483649 2024-11-21T17:55:16.062372Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{22871770384416}: tablet 72075186224037888 channel 2 assigned to group 2147483650 2024-11-21T17:55:16.062386Z node 147 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{22871770384416}(72075186224037888)::Execute - TryToBoot was not successfull 2024-11-21T17:55:16.062393Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2024-11-21T17:55:16.062397Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:55:16.072730Z node 147 :BS_PROXY_PUT INFO: [ef85ab71b8475fcf] bootstrap ActorId# [147:306:2286] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:703:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T17:55:16.072773Z node 147 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] Id# [72057594037927937:2:4:0:0:703:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:55:16.072779Z node 147 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] restore Id# [72057594037927937:2:4:0:0:703:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:55:16.072786Z node 147 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:703:1] Marker# BPG33 2024-11-21T17:55:16.072790Z node 147 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:703:1] Marker# BPG32 2024-11-21T17:55:16.072815Z node 147 :BS_PROXY DEBUG: Send to queueActorId# [147:33:2077] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:703:1] FDS# 703 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:55:16.073320Z node 147 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:703:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 18 } Cost# 85535 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 19 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T17:55:16.073366Z node 147 :BS_PROXY_PUT DEBUG: [ef85ab71b8475fcf] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:703:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T17:55:16.073378Z node 147 :BS_PROXY_PUT INFO: [ef85ab71b8475fcf] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:703:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T17:55:16.073430Z node 147 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:703:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T17:55:16.073460Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2024-11-21T17:55:16.073476Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxCreateTablet::Complete (72057594037927937,0) TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040201 [147:259:2254]} 2024-11-21T17:55:16.073503Z node 147 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{22871770384416}(72075186224037888)::Complete SideEffects: {Notifications: 0x10040207 [147:259:2254] Callbacks: 1 Actions: NKikimr::TTabletKillRequest} 2024-11-21T17:55:16.073553Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:55:16.073574Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T17:55:16.073581Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T17:55:16.073586Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T17:55:16.073594Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:55:16.073601Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:55:16.073607Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:55:16.073656Z node 147 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [147:310:2289] 2024-11-21T17:55:16.073661Z node 147 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [147:310:2289] 2024-11-21T17:55:16.073677Z node 147 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:55:16.073692Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:55:16.073701Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T17:55:16.073705Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T17:55:16.073709Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T17:55:16.073713Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:55:16.073720Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:55:16.073725Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2024-11-21T17:55:16.073737Z node 147 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:55:16.073742Z node 147 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T17:55:16.073756Z node 147 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [147:310:2289] 2024-11-21T17:55:16.073761Z node 147 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [147:310:2289] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v1] >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v1] [GOOD] >> test_queue_counters.py::TestSqsGettingCounters::test_sqs_action_counters [GOOD] >> ForceDropWithReboots::ForceDropDeleteInFly [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDropDeleteInFly [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:04.531283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:04.531304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:04.531309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:04.531314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:04.531329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:04.531334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:04.531343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:04.531405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:04.538854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:04.538870Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:04.540597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:04.540661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:04.540684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:04.542667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:04.542722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:04.542811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:04.542961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:04.543418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:04.543620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:04.543626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:04.543636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:04.543640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:04.543645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:04.543674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:04.544796Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:04.557959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:04.558038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.558085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:04.558140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:04.558145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.558753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:04.558783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:04.558823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.558831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:04.558835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:04.558839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:04.559204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.559217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:04.559222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:04.559540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.559551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.559557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:04.559564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:04.560121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:04.560531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:04.560580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:04.560738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:04.560759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:04.560774Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:04.560817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:04.560821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:04.560843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:04.560853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:04.561264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:04.561276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:04.561312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:04.561316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:04.561391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.561398Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:04.561409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:04.561413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:04.561418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:04.561423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:04.561427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:04.561431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:04.561442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:04.561448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:04.561451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... aths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:23.182287Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:23.182298Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:55:23.182319Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:55:23.182325Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:55:23.182366Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:23.182662Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T17:55:23.182739Z node 76 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:55:23.182750Z node 76 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:55:23.182758Z node 76 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2024-11-21T17:55:23.182824Z node 76 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:23.182853Z node 76 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 37us result status StatusPathDoesNotExist 2024-11-21T17:55:23.182885Z node 76 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:55:23.182935Z node 76 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:23.182951Z node 76 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 18us result status StatusSuccess 2024-11-21T17:55:23.183001Z node 76 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:23.183175Z node 76 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [76:122:2148] sender: [76:609:2058] recipient: [76:99:2134] Leader for TabletID 72057594046678944 is [76:122:2148] sender: [76:612:2058] recipient: [76:611:2559] Leader for TabletID 72057594046678944 is [76:122:2148] sender: [76:613:2058] recipient: [76:15:2062] Leader for TabletID 72057594046678944 is [76:614:2560] sender: [76:615:2058] recipient: [76:611:2559] 2024-11-21T17:55:23.190157Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:23.190180Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:23.190186Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:23.190190Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:23.190195Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:23.190198Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:23.190207Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:23.190258Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:23.191365Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:23.191684Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:23.191718Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:23.191739Z node 76 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:23.191744Z node 76 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:55:23.191765Z node 76 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:23.191846Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T17:55:23.191861Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: DirA, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T17:55:23.191871Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.191878Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.191928Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.191953Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.191966Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.191976Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.191990Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192001Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192021Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192053Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192065Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192108Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192117Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192144Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192156Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192168Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192191Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192200Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192222Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192264Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192279Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192285Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.192290Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T17:55:23.193654Z node 76 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:23.193666Z node 76 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:23.193761Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:23.193769Z node 76 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:23.193775Z node 76 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:23.194008Z node 76 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Waiting until shard idx 72057594046678944:3 is deleted Leader for TabletID 72057594046678944 is [76:614:2560] sender: [76:669:2058] recipient: [76:15:2062] Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 Deleted shard idx 72057594046678944:3 >> ForceDropWithReboots::ForceDeleteCreateTableInFlyWithRebootAtCommit [GOOD] |86.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[ypath-limit_with_range-default.txt-Results] [GOOD] >> KqpOlapScheme::DropTableAfterInsert [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[std] >> HttpRequest::ProbeServerless [GOOD] |86.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_counters_when_sending_reading_deleting [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDeleteCreateTableInFlyWithRebootAtCommit [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:02.435323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:02.435349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:02.435354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:02.435374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:02.435388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:02.435392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:02.435400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:02.435504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:02.447552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:02.447574Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:02.449527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:02.449586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:02.449606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:02.453914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:02.453982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:02.455634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.456893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:02.458258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:02.464417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:02.464422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:02.464457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:02.465690Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:02.480862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:02.480937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.480977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:02.481020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:02.481025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.481609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.481638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:02.481679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.481685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:02.481693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:02.481696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:02.482023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.482033Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:02.482036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:02.482250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.482256Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.482259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.482264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.482689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:02.483008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:02.483826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:02.483990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.484072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:02.484077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.484096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:02.484105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:02.484491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:02.484525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:02.484581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484586Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:02.484594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:02.484597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.484600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:02.484604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.484607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:02.484609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:02.484617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:02.484621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:02.484623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... tion: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:55:23.632378Z node 89 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:55:23.632381Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:55:23.632384Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:55:23.632575Z node 89 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:55:23.632581Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:55:23.632584Z node 89 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:55:23.632586Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:55:23.632588Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:55:23.632592Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 1 2024-11-21T17:55:23.632595Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [89:300:2292] 2024-11-21T17:55:23.632714Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:23.632719Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:23.632721Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:23.632748Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 Leader for TabletID 72057594037968897 is [89:212:2212] sender: [89:348:2058] recipient: [89:15:2062] 2024-11-21T17:55:23.632972Z node 89 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T17:55:23.633016Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:23.633052Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:55:23.633076Z node 89 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2024-11-21T17:55:23.633104Z node 89 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T17:55:23.633116Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:55:23.633131Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:55:23.633144Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:55:23.633176Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:55:23.633188Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:55:23.633302Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:55:23.633328Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:23.633331Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:55:23.633337Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:55:23.633340Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:55:23.633343Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:55:23.633355Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:23.633357Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:23.633373Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:55:23.633442Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:55:23.633456Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:55:23.633460Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [89:301:2293] 2024-11-21T17:55:23.633937Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:55:23.633958Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:55:23.633964Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:55:23.633976Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:23.634002Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:23.634007Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:23.634018Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:55:23.634060Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:23.634296Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 Ok notification wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T17:55:23.634352Z node 89 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:55:23.634361Z node 89 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:55:23.634366Z node 89 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2024-11-21T17:55:23.634411Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:23.634430Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 25us result status StatusPathDoesNotExist 2024-11-21T17:55:23.634452Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:55:23.634481Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:23.634490Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 10us result status StatusSuccess 2024-11-21T17:55:23.634523Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Waiting until shard idx 72057594046678944:3 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 Deleted shard idx 72057594046678944:3 >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::DropTableAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 3730, MsgBus: 3620 2024-11-21T17:54:55.098850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439792583799373296:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:54:55.098873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/000f1b/r3tmp/tmpg5mROw/pdisk_1.dat 2024-11-21T17:54:55.359582Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:54:55.369857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:54:55.369886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:54:55.392522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3730, node 1 2024-11-21T17:54:55.402373Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.402382Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:54:55.766343Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:54:55.766357Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:54:55.766358Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:54:55.766401Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3620 TClient is connected to server localhost:3620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:54:56.174452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16); 2024-11-21T17:54:56.198123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439792588094341019:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.198141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:54:56.326582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T17:54:56.343445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439792588094341214:2304];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:54:56.343494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439792588094341214:2304];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:54:56.343509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792588094341237:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:54:56.343533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439792588094341214:2304];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:56.343545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792588094341237:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:54:56.343557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439792588094341214:2304];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:56.343577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439792588094341214:2304];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:56.343590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792588094341237:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:54:56.343596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439792588094341214:2304];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:56.343606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439792588094341214:2304];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:56.343624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439792588094341214:2304];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:54:56.343628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792588094341237:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:54:56.343640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439792588094341214:2304];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:54:56.343654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792588094341237:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:54:56.343656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439792588094341214:2304];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:54:56.343676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439792588094341214:2304];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:54:56.343678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792588094341237:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:54:56.343695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439792588094341214:2304];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:54:56.343699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792588094341237:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:54:56.343723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792588094341237:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:54:56.343752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792588094341237:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:54:56.343777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792588094341237:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:54:56.343799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792588094341237:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:54:56.343824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439792588094341237:2316];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:54:56.344051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:54:56.344067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:54:56.344078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:54:56.344091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:54:56.344108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:54:56.344117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:54:56.344126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUp ... scription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:55:18.494859Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:55:18.494887Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:55:18.495911Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:55:18.497111Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... CREATE TABLE `/Root/ColumnTableTest` (id Uint64 NOT NULL, int_column Int32, PRIMARY KEY (id)) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2024-11-21T17:55:18.671095Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792680460964656:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:18.671115Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:18.674016Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T17:55:18.679459Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792680460964702:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:55:18.679486Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792680460964702:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:55:18.679515Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792680460964702:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:55:18.679534Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792680460964702:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:55:18.679552Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792680460964702:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:55:18.679572Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792680460964702:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:55:18.679587Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792680460964702:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:55:18.679607Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792680460964702:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:55:18.679627Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792680460964702:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:55:18.679646Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792680460964702:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:55:18.679666Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792680460964702:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:55:18.679683Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[5:7439792680460964702:2303];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:55:18.680104Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:55:18.680117Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:55:18.680126Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:55:18.680135Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:55:18.680146Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:55:18.680153Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T17:55:18.680161Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T17:55:18.680173Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T17:55:18.680183Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T17:55:18.680194Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T17:55:18.680200Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T17:55:18.680211Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T17:55:18.680280Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T17:55:18.680298Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T17:55:18.680322Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T17:55:18.680336Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T17:55:18.680356Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T17:55:18.680360Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T17:55:18.680377Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T17:55:18.680381Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T17:55:18.680392Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T17:55:18.680402Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T17:55:18.681519Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:55:18.728267Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=1392;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=1392;columns=2; 2024-11-21T17:55:18.740826Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439792680460964792:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:18.740850Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:55:18.742681Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:55:18.745645Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T17:55:18.746839Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037888 not found WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T17:55:23.394521Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439792680460964058:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:55:23.394565Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: 2024-11-21T17:52:21.002865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:21.002905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:21.002913Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001635/r3tmp/tmpCieKOx/pdisk_1.dat 2024-11-21T17:52:21.073898Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15011, node 1 2024-11-21T17:52:21.165126Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:21.165142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:21.165145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:21.165211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:21.170391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:21.246276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:21.246306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:21.257970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2290 2024-11-21T17:52:21.662024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:22.416882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:22.416908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:22.450095Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:22.451051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:22.503722Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:22.515214Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:52:22.515247Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:52:22.522461Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:52:22.522654Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:52:22.522677Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:52:22.522683Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:52:22.522689Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:52:22.522695Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:52:22.522700Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:52:22.522707Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:52:22.522828Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:52:22.696830Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:22.696863Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1758:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:22.697757Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1765:2556] 2024-11-21T17:52:22.698990Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1797:2574] 2024-11-21T17:52:22.699213Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1797:2574], schemeshard id = 72075186224037889 2024-11-21T17:52:22.699610Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T17:52:22.703800Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:52:22.703816Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:52:22.703825Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T17:52:22.704584Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:22.704604Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:22.706543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:52:22.707782Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:52:22.707807Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:52:22.710194Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:52:22.721928Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:22.743878Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:52:22.850242Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:52:23.027449Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:52:23.772984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:52:24.225828Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:24.309992Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T17:52:24.310010Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T17:52:24.310018Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2494:2903], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T17:52:24.310151Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2495:2904] 2024-11-21T17:52:24.310178Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2495:2904], schemeshard id = 72075186224037899 2024-11-21T17:52:24.993451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2622:3196], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.993486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:24.996064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2024-11-21T17:52:25.040462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2770:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:52:25.040511Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2770:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:52:25.040537Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2770:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:52:25.040552Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2770:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:52:25.040566Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2770:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:52:25.040577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2770:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:52:25.040589Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2770:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:52:25.040600Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2770:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:52:25.040613Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2770:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:52:25.040624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2770:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:52:25.040636Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2770:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:52:25.040663Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2770:3039];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:52:25.046286Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:2779:3042];tablet_id=72075186224037906;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:52:25.046309Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:2779: ... 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:55:21.711544Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T17:55:21.711567Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T17:55:22.370964Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze 2024-11-21T17:55:22.370988Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=OڦVX*gG 2024-11-21T17:55:22.370993Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2024-11-21T17:55:23.737910Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T17:55:23.737973Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T17:55:23.769024Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:55:23.769071Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2024-11-21T17:55:23.769078Z node 2 :STATISTICS DEBUG: [72075186224037897] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T17:55:23.769264Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T17:55:23.780444Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T17:55:23.780558Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T17:55:23.780568Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T17:55:23.780691Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T17:55:23.791810Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T17:55:23.791872Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2024-11-21T17:55:23.792055Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9697:7242], server id = [2:9702:7247], tablet id = 72075186224037905, status = OK 2024-11-21T17:55:23.792076Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9697:7242], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T17:55:23.792092Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9698:7243], server id = [2:9703:7248], tablet id = 72075186224037906, status = OK 2024-11-21T17:55:23.792096Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9698:7243], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T17:55:23.792226Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9699:7244], server id = [2:9704:7249], tablet id = 72075186224037907, status = OK 2024-11-21T17:55:23.792253Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9699:7244], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T17:55:23.792275Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9700:7245], server id = [2:9705:7250], tablet id = 72075186224037908, status = OK 2024-11-21T17:55:23.792282Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9700:7245], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T17:55:23.792357Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T17:55:23.792393Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T17:55:23.792430Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9701:7246], server id = [2:9706:7251], tablet id = 72075186224037909, status = OK 2024-11-21T17:55:23.792435Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9701:7246], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T17:55:23.792456Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T17:55:23.792519Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T17:55:23.792531Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9697:7242], server id = [2:9702:7247], tablet id = 72075186224037905 2024-11-21T17:55:23.792534Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:55:23.792572Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9698:7243], server id = [2:9703:7248], tablet id = 72075186224037906 2024-11-21T17:55:23.792574Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:55:23.792591Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9700:7245], server id = [2:9705:7250], tablet id = 72075186224037908 2024-11-21T17:55:23.792595Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:55:23.792619Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9707:7252], server id = [2:9709:7254], tablet id = 72075186224037910, status = OK 2024-11-21T17:55:23.792623Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9707:7252], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T17:55:23.792655Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2024-11-21T17:55:23.792685Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9699:7244], server id = [2:9704:7249], tablet id = 72075186224037907 2024-11-21T17:55:23.792687Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:55:23.792711Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9708:7253], server id = [2:9711:7256], tablet id = 72075186224037911, status = OK 2024-11-21T17:55:23.792714Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9708:7253], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T17:55:23.792750Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9710:7255], server id = [2:9713:7258], tablet id = 72075186224037912, status = OK 2024-11-21T17:55:23.792754Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9710:7255], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T17:55:23.792819Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9712:7257], server id = [2:9714:7259], tablet id = 72075186224037913, status = OK 2024-11-21T17:55:23.792824Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9712:7257], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T17:55:23.792838Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2024-11-21T17:55:23.792871Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9701:7246], server id = [2:9706:7251], tablet id = 72075186224037909 2024-11-21T17:55:23.792873Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:55:23.792900Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2024-11-21T17:55:23.792930Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9715:7260], server id = [2:9716:7261], tablet id = 72075186224037914, status = OK 2024-11-21T17:55:23.792934Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9715:7260], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T17:55:23.792963Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2024-11-21T17:55:23.792978Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9707:7252], server id = [2:9709:7254], tablet id = 72075186224037910 2024-11-21T17:55:23.792979Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:55:23.793007Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2024-11-21T17:55:23.793024Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9708:7253], server id = [2:9711:7256], tablet id = 72075186224037911 2024-11-21T17:55:23.793026Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:55:23.793047Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2024-11-21T17:55:23.793051Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T17:55:23.793060Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9710:7255], server id = [2:9713:7258], tablet id = 72075186224037912 2024-11-21T17:55:23.793061Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:55:23.793074Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T17:55:23.793090Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T17:55:23.793127Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T17:55:23.793163Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9712:7257], server id = [2:9714:7259], tablet id = 72075186224037913 2024-11-21T17:55:23.793165Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:55:23.793505Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T17:55:23.793558Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9715:7260], server id = [2:9716:7261], tablet id = 72075186224037914 2024-11-21T17:55:23.793560Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:55:23.806129Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjNjNmU5ZjMtZmNmYWQ5NDYtM2JiYjM2OGYtN2I4MDM3MzI=, TxId: 2024-11-21T17:55:23.806148Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjNjNmU5ZjMtZmNmYWQ5NDYtM2JiYjM2OGYtN2I4MDM3MzI=, TxId: 2024-11-21T17:55:23.806268Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:55:23.817678Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T17:55:23.817701Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=OڦVX*gG, ActorId=[1:3928:3443] 2024-11-21T17:55:23.817951Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:9734:5738]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:23.817996Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:55:23.818000Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T17:55:23.818045Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:55:23.818052Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T17:55:23.818060Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 2 ] 2024-11-21T17:55:23.820289Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> SubDomainWithReboots::DeclareAndDefine [GOOD] >> SubDomainWithReboots::CreateWithStoragePools [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-std] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::DeclareAndDefine [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:03.513765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:03.513783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:03.513787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:03.513790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:03.513799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:03.513802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:03.513808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:03.513872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:03.521496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:03.521518Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:03.523240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:03.523304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:03.523325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:03.525202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:03.525249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:03.525362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:03.525564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:03.526188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:03.526413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:03.526421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:03.526430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:03.526434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:03.526438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:03.526465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:03.527510Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:03.538823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:03.538901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.538952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:03.538998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:03.539004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.539661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:03.539679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:03.539722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.539731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:03.539733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:03.539737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:03.540141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.540152Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:03.540156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:03.540492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.540499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.540502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:03.540506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:03.540890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:03.541240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:03.541282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:03.541431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:03.541454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:03.541470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:03.541520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:03.541524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:03.541545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:03.541553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:03.541924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:03.541932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:03.541967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:03.541974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:03.542049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.542055Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:03.542065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:03.542069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:03.542074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:03.542079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:03.542083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:03.542088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:03.542097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:03.542102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:03.542106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... : TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409548, partId: 0 2024-11-21T17:55:24.743710Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409548 2024-11-21T17:55:24.743715Z node 89 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1003:0 HandleReply TEvConfigureStatus operationId:1003:0 at schemeshard:72057594046678944 2024-11-21T17:55:24.743721Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#1003:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2024-11-21T17:55:24.743725Z node 89 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 3 -> 128 2024-11-21T17:55:24.744194Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:55:24.744223Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:55:24.744242Z node 89 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:55:24.744247Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1003:0, at tablet 72057594046678944 2024-11-21T17:55:24.744252Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1003 ready parts: 1/1 2024-11-21T17:55:24.744275Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1003 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:24.744599Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1003 msg type: 269090816 2024-11-21T17:55:24.744618Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1003 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2024-11-21T17:55:24.744671Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:24.744690Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 382252091497 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:24.744697Z node 89 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T17:55:24.744750Z node 89 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 240 2024-11-21T17:55:24.744758Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1003:0, at tablet 72057594046678944 2024-11-21T17:55:24.744777Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:55:24.744787Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T17:55:24.745182Z node 89 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:24.745190Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:55:24.745218Z node 89 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:24.745222Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [89:200:2203], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T17:55:24.745274Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T17:55:24.745280Z node 89 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T17:55:24.745290Z node 89 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T17:55:24.745294Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:55:24.745298Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2024-11-21T17:55:24.745303Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T17:55:24.745307Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T17:55:24.745310Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T17:55:24.745336Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T17:55:24.745340Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1003, publications: 1, subscribers: 0 2024-11-21T17:55:24.745344Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2024-11-21T17:55:24.745427Z node 89 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:24.745436Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T17:55:24.745440Z node 89 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T17:55:24.745444Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T17:55:24.745451Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:55:24.745463Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2024-11-21T17:55:24.746129Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T17:55:24.746168Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T17:55:24.746173Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T17:55:24.746221Z node 89 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T17:55:24.746233Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T17:55:24.746236Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [89:497:2452] TestWaitNotification: OK eventTxId 1003 2024-11-21T17:55:24.746282Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:24.746319Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 47us result status StatusSuccess 2024-11-21T17:55:24.746377Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:24.746428Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:24.746441Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 15us result status StatusSuccess 2024-11-21T17:55:24.746481Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::CreateWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:02.449014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:02.449035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:02.449040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:02.449045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:02.449066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:02.449070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:02.449079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:02.449160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:02.456916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:02.456936Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:02.458899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:02.458968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:02.458991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:02.460821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:02.460871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:02.460961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.461164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:02.461713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:02.464413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:02.464417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:02.464444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:02.465628Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:02.482826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:02.482891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.482935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:02.482989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:02.482996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.483641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.483672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:02.483712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.483720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:02.483724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:02.483728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:02.484135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484145Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:02.484568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484578Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.484589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.485153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:02.485597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:02.485635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:02.485771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.485808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:02.485820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.485882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:02.485890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.485910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:02.485921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:02.486733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.486758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:02.486811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.486818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:02.486937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.486948Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:02.486964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:02.486969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.486975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:02.486980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.486986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:02.486990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:02.487008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:02.487014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:02.487018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 72057594046678944 2024-11-21T17:55:24.883264Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T17:55:24.883290Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:24.883632Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T17:55:24.883651Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000003 2024-11-21T17:55:24.883697Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:24.883710Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 403726927976 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:24.883714Z node 94 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet 72057594046678944 2024-11-21T17:55:24.883770Z node 94 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 240 2024-11-21T17:55:24.883775Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet 72057594046678944 2024-11-21T17:55:24.883797Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:55:24.883803Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:55:24.883809Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T17:55:24.884146Z node 94 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:24.884152Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:55:24.884180Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:55:24.884193Z node 94 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:24.884196Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [94:200:2203], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2024-11-21T17:55:24.884199Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [94:200:2203], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T17:55:24.884271Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:55:24.884280Z node 94 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T17:55:24.884290Z node 94 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:55:24.884294Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:55:24.884298Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T17:55:24.884303Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:55:24.884307Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:55:24.884310Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:55:24.884335Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:55:24.884339Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 2, subscribers: 1 2024-11-21T17:55:24.884343Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T17:55:24.884346Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:55:24.884461Z node 94 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:55:24.884474Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:55:24.884478Z node 94 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:55:24.884482Z node 94 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:55:24.884486Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:55:24.884609Z node 94 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:55:24.884618Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:55:24.884621Z node 94 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:55:24.884624Z node 94 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:55:24.884631Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:55:24.884639Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T17:55:24.884643Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [94:297:2289] 2024-11-21T17:55:24.885370Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:55:24.885473Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:55:24.885504Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:55:24.885509Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [94:298:2290] TestWaitNotification: OK eventTxId 1002 2024-11-21T17:55:24.885607Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:24.885664Z node 94 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 63us result status StatusSuccess 2024-11-21T17:55:24.885741Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:24.885807Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:24.885825Z node 94 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 19us result status StatusSuccess 2024-11-21T17:55:24.885867Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-fifo] |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v1] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v1] [GOOD] >> SubDomainWithReboots::Create [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[queue] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::Create [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:03.567849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:03.567871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:03.567876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:03.567881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:03.567896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:03.567900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:03.567909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:03.567983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:03.578880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:03.578905Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:03.581335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:03.581444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:03.581478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:03.584315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:03.584395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:03.584526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:03.584718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:03.585437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:03.585719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:03.585733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:03.585748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:03.585756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:03.585763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:03.585806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:03.587193Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:03.605178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:03.605272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.605323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:03.605385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:03.605394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.605998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:03.606033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:03.606083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.606093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:03.606097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:03.606101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:03.606495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.606509Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:03.606513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:03.606894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.606906Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.606911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:03.606916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:03.607455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:03.607876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:03.607925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:03.608091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:03.608116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:03.608133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:03.608178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:03.608184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:03.608206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:03.608218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:03.608663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:03.608675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:03.608704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:03.608712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:03.608779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.608786Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:03.608796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:03.608800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:03.608805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:03.608810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:03.608814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:03.608817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:03.608827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:03.608831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:03.608835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... perationId 1002:0, at tablet 72057594046678944 2024-11-21T17:55:25.996019Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2024-11-21T17:55:25.996038Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:25.996349Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2024-11-21T17:55:25.996365Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000003 2024-11-21T17:55:25.996407Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:25.996420Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 403726927976 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:25.996426Z node 94 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet 72057594046678944 2024-11-21T17:55:25.996470Z node 94 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 240 2024-11-21T17:55:25.996475Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1002:0, at tablet 72057594046678944 2024-11-21T17:55:25.996492Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:55:25.996499Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:55:25.996504Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T17:55:25.996831Z node 94 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:25.996836Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T17:55:25.996855Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T17:55:25.996867Z node 94 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:25.996870Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [94:200:2203], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2024-11-21T17:55:25.996873Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [94:200:2203], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T17:55:25.996914Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T17:55:25.996918Z node 94 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T17:55:25.996925Z node 94 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T17:55:25.996928Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:55:25.996931Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2024-11-21T17:55:25.996935Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T17:55:25.996938Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T17:55:25.996940Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T17:55:25.996957Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:55:25.996960Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1002, publications: 2, subscribers: 1 2024-11-21T17:55:25.996963Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T17:55:25.996965Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T17:55:25.997059Z node 94 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:55:25.997068Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:55:25.997071Z node 94 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:55:25.997074Z node 94 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T17:55:25.997076Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T17:55:25.997159Z node 94 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:55:25.997165Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T17:55:25.997167Z node 94 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T17:55:25.997170Z node 94 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T17:55:25.997172Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:55:25.997177Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 1 2024-11-21T17:55:25.997180Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [94:297:2289] 2024-11-21T17:55:25.997964Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:55:25.998021Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T17:55:25.998034Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T17:55:25.998038Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [94:298:2290] TestWaitNotification: OK eventTxId 1002 2024-11-21T17:55:25.998115Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:25.998139Z node 94 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 32us result status StatusSuccess 2024-11-21T17:55:25.998197Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:25.998243Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:25.998251Z node 94 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 9us result status StatusSuccess 2024-11-21T17:55:25.998277Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_action_which_does_not_requere_existing_queue >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] >> ForceDropWithReboots::ForceDeleteSplitInFly [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v0] >> ForceDropWithReboots::PathsAndShardsCountersSimultaneousAlterSubDomain [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_delete_message] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDeleteSplitInFly [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:04.446753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:04.446770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:04.446773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:04.446776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:04.446786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:04.446788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:04.446795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:04.446846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:04.454221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:04.454235Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:04.455656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:04.455712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:04.455728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:04.457758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:04.457825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:04.457926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:04.458085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:04.458587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:04.458791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:04.458801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:04.458810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:04.458817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:04.458822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:04.458853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:04.459807Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:04.471137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:04.471205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.471243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:04.471287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:04.471291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.471795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:04.471812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:04.471849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.471857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:04.471859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:04.471862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:04.472174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.472183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:04.472187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:04.472523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.472533Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.472537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:04.472541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:04.472910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:04.473199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:04.473230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:04.473341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:04.473358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:04.473369Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:04.473404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:04.473408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:04.473424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:04.473433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:04.473734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:04.473741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:04.473762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:04.473766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:04.473814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:04.473818Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:04.473825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:04.473828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:04.473832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:04.473835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:04.473838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:04.473840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:04.473846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:04.473850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:04.473852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... 4-11-21T17:55:27.267113Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:27.267178Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:55:27.267387Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:55:27.267450Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 2024-11-21T17:55:27.267500Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T17:55:27.267551Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T17:55:27.267596Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:55:27.267692Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:27.267727Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 Forgetting tablet 72075186233409546 2024-11-21T17:55:27.267951Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:55:27.268507Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:55:27.268542Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T17:55:27.268642Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2024-11-21T17:55:27.268693Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2024-11-21T17:55:27.268751Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:55:27.268774Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:55:27.269047Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:55:27.269072Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409547 2024-11-21T17:55:27.269145Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:27.269151Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:55:27.269161Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:55:27.269166Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:55:27.269171Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:55:27.269255Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:27.269260Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:27.269281Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:55:27.269396Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:55:27.269415Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:55:27.269433Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T17:55:27.269437Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [94:573:2521] 2024-11-21T17:55:27.270001Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T17:55:27.270043Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:55:27.270049Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:55:27.270059Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:55:27.270062Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:55:27.270132Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:55:27.270137Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:55:27.270147Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:55:27.270316Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:27.270366Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:27.270372Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:27.270383Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:55:27.270428Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:27.270733Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 TestWaitNotification: OK eventTxId 1006 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted 2024-11-21T17:55:27.270808Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:55:27.270819Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:55:27.270826Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:55:27.270833Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T17:55:27.270840Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 2024-11-21T17:55:27.270920Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:27.270953Z node 94 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 41us result status StatusPathDoesNotExist 2024-11-21T17:55:27.270983Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:55:27.271022Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:27.271037Z node 94 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 15us result status StatusSuccess 2024-11-21T17:55:27.271078Z node 94 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Waiting until shard idx 72057594046678944:3 is deleted Waiting until shard idx 72057594046678944:4 is deleted Waiting until shard idx 72057594046678944:5 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 Deleted shard idx 72057594046678944:3 Deleted shard idx 72057594046678944:4 Deleted shard idx 72057594046678944:5 >> test_counters.py::TestSqsCountersFeatures::test_disables_user_counters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::PathsAndShardsCountersSimultaneousAlterSubDomain [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:02.435320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:02.435343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:02.435348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:02.435352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:02.435420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:02.435425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:02.435450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:02.435524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:02.448630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:02.448648Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:02.450804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:02.450879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:02.450895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:02.453544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:02.453603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:02.455665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.456837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:02.458346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.464452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:02.464456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:02.464459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:02.464482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:02.465555Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:02.481018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:02.481080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.481116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:02.481154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:02.481158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.481706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.481732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:02.481772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.481781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:02.481789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:02.481793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:02.482174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.482184Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:02.482189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:02.482544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.482554Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.482558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.482564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.483105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:02.483507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:02.483839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:02.484037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.484127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:02.484134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:02.484156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:02.484167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:02.484639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:02.484677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:02.484746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:02.484753Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:02.484762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:02.484767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.484772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:02.484777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:02.484782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:02.484785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:02.484795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:02.484801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:02.484805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:55:27.695913Z node 103 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:55:27.695917Z node 103 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T17:55:27.695921Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T17:55:27.695933Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2024-11-21T17:55:27.696030Z node 103 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:55:27.696040Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 9 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:55:27.696048Z node 103 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:55:27.696052Z node 103 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 9 2024-11-21T17:55:27.696056Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 7 2024-11-21T17:55:27.696144Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 1005 Step: 200 OrderId: 1005 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 239 } } 2024-11-21T17:55:27.696151Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409549, partId: 0 2024-11-21T17:55:27.696168Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 1005 Step: 200 OrderId: 1005 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 239 } } 2024-11-21T17:55:27.696180Z node 103 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 1005 Step: 200 OrderId: 1005 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 239 } } 2024-11-21T17:55:27.696245Z node 103 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:55:27.696256Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:55:27.696260Z node 103 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:55:27.696264Z node 103 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2024-11-21T17:55:27.696268Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:55:27.696277Z node 103 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 1 2024-11-21T17:55:27.696282Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [103:426:2393] 2024-11-21T17:55:27.696356Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 572 RawX2: 442381634007 } Origin: 72075186233409549 State: 2 TxId: 1005 Step: 0 Generation: 2 2024-11-21T17:55:27.696361Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409549, partId: 0 2024-11-21T17:55:27.696373Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Source { RawX1: 572 RawX2: 442381634007 } Origin: 72075186233409549 State: 2 TxId: 1005 Step: 0 Generation: 2 2024-11-21T17:55:27.696380Z node 103 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1005:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T17:55:27.696388Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1005:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 572 RawX2: 442381634007 } Origin: 72075186233409549 State: 2 TxId: 1005 Step: 0 Generation: 2 2024-11-21T17:55:27.696398Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1005:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:27.696402Z node 103 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:55:27.696406Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1005:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T17:55:27.696411Z node 103 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 240 2024-11-21T17:55:27.697009Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:55:27.697090Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:55:27.697428Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:55:27.697458Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:55:27.697474Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:55:27.697485Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:55:27.697490Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [103:620:2560] 2024-11-21T17:55:27.697514Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:55:27.697577Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2024-11-21T17:55:27.697583Z node 103 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:0 ProgressState 2024-11-21T17:55:27.697592Z node 103 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2024-11-21T17:55:27.697597Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:55:27.697602Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2024-11-21T17:55:27.697612Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [103:426:2393] message: TxId: 1005 2024-11-21T17:55:27.697618Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2024-11-21T17:55:27.697623Z node 103 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2024-11-21T17:55:27.697626Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2024-11-21T17:55:27.697646Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T17:55:27.698050Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:55:27.698060Z node 103 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [103:620:2560] TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 2024-11-21T17:55:27.698161Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:27.698188Z node 103 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 34us result status StatusSuccess 2024-11-21T17:55:27.698276Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 150 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "Table2" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1005 CreateStep: 200 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "dir" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1004 CreateStep: 200 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[queue] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] >> test_queue_counters.py::TestSqsGettingCounters::test_action_duration_being_not_immediate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v1-invalid] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/grpcio/py3/grpc/_channel.py:1407: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/typing.py:1601: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_action_which_does_not_requere_existing_queue [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_that_queue_can_be_created_despite_lack_of_throttling_budget >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v0] >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v0] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v1] |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-fifo] [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_that_queue_can_be_created_despite_lack_of_throttling_budget [GOOD] >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[with_queues-tables_format_v1] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v0] >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v0] [GOOD] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-empty] >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-std] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-fifo] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_and_create_queue[fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v1] [SKIPPED] |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v0] [GOOD] |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-fifo] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_disables_user_counters [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_incorrect_tables_format [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue[tables_format_v1-std] [GOOD] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] >> ForceDropWithReboots::ForceDeleteCreateTableInFly [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] [SKIPPED] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[std] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-30] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-200] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::ForceDeleteCreateTableInFly [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:03.723458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:03.723475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:03.723479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:03.723483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:03.723494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:03.723497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:03.723504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:03.723562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:03.731044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:03.731064Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:03.732783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:03.732859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:03.732884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:03.734710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:03.734761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:03.734849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:03.735011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:03.735538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:03.735764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:03.735771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:03.735781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:03.735786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:03.735790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:03.735821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:03.736930Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:03.748529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:03.748613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.748668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:03.748716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:03.748725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.749343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:03.749363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:03.749414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.749428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:03.749432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:03.749436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:03.749787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.749797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:03.749802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:03.750036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.750043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.750047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:03.750052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:03.750409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:03.750713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:03.750760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:03.750936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:03.750954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:03.750967Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:03.751006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:03.751011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:03.751033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:03.751042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:03.751569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:03.751576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:03.751610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:03.751615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:03.751685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:03.751690Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:03.751699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:03.751702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:03.751706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:03.751709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:03.751713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:03.751716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:03.751724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:03.751728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:03.751730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... d: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:55:33.697324Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:55:33.697500Z node 125 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:55:33.697510Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2024-11-21T17:55:33.697512Z node 125 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2024-11-21T17:55:33.697515Z node 125 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T17:55:33.697517Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T17:55:33.697523Z node 125 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 1 2024-11-21T17:55:33.697526Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [125:457:2424] 2024-11-21T17:55:33.697595Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:33.697599Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:33.697602Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:33.697750Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:55:33.697815Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:55:33.697830Z node 125 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:55:33.697869Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:33.697906Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T17:55:33.697950Z node 125 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 Forgetting tablet 72075186233409546 2024-11-21T17:55:33.705870Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:55:33.705943Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T17:55:33.706033Z node 125 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:55:33.706193Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:33.706198Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:55:33.706210Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:55:33.706214Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:55:33.706218Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409547 2024-11-21T17:55:33.706307Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:55:33.706325Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:55:33.706766Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:33.706781Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:33.706822Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:55:33.707104Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:55:33.707134Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2024-11-21T17:55:33.707158Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2024-11-21T17:55:33.707163Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [125:458:2425] 2024-11-21T17:55:33.707567Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:55:33.707579Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:55:33.707593Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:55:33.707605Z node 125 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:33.707616Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:33.707620Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:33.707630Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T17:55:33.707652Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:55:33.707656Z node 125 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:55:33.707675Z node 125 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:33.707903Z node 125 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 Ok notification wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T17:55:33.707976Z node 125 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:55:33.707984Z node 125 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:55:33.707989Z node 125 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2024-11-21T17:55:33.708044Z node 125 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:33.708097Z node 125 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 60us result status StatusPathDoesNotExist 2024-11-21T17:55:33.708130Z node 125 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1135" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T17:55:33.708167Z node 125 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:33.708183Z node 125 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 17us result status StatusSuccess 2024-11-21T17:55:33.708277Z node 125 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Waiting until shard idx 72057594046678944:1 is deleted Waiting until shard idx 72057594046678944:2 is deleted Waiting until shard idx 72057594046678944:3 is deleted Deleted shard idx 72057594046678944:1 Deleted shard idx 72057594046678944:2 Deleted shard idx 72057594046678944:3 >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[fifo] >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[fifo] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-fifo] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsACLWithTenant::test_modify_permissions[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v0] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v0] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v1] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-std] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v0-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-fifo] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v1] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-empty] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue_with_unsupported_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[fifo] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[fifo] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[queue] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[std] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_throttling.py::TestSqsThrottlingOnNonexistentQueue::test_throttling_on_nonexistent_queue [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-fifo] >> SubDomainWithReboots::SplitTabletInsideWithStoragePools [GOOD] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v0] [GOOD] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v1] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-std] >> test_queue_counters.py::TestSqsGettingCounters::test_action_duration_being_not_immediate [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> SubDomainWithReboots::SplitTabletInsideWithStoragePools [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:05.606941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:05.606959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:05.606962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:05.606966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:05.606977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:05.606980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:05.606986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:05.607045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:05.614242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:05.614258Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:05.615819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:05.615875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:05.615895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:05.618125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:05.618213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:05.618320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:05.618471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:05.619017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:05.619217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:05.619225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:05.619233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:05.619238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:05.619241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:05.619263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:05.620100Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:05.630161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:05.630223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:05.630258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:05.630297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:05.630301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:05.630778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:05.630810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:05.630843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:05.630850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:05.630852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:05.630855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:05.631144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:05.631154Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:05.631158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:05.631431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:05.631439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:05.631443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:05.631448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:05.631848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:05.632118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:05.632146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:05.632265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:05.632279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:05.632289Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:05.632319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:05.632323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:05.632336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:05.632343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:05.632588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:05.632594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:05.632614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:05.632618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:05.632661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:05.632665Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:05.632673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:05.632675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:05.632678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:05.632681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:05.632683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:05.632685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:05.632691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:05.632694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:05.632696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... TabletId: 72075186233409548 2024-11-21T17:55:38.716822Z node 135 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId#1004:0 HandleReply TEvSplitAck, at schemeshard: 72057594046678944, message: OperationCookie: 1004 TabletId: 72075186233409548 2024-11-21T17:55:38.716931Z node 135 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 131 -> 132 2024-11-21T17:55:38.716975Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 6 2024-11-21T17:55:38.717533Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:55:38.717593Z node 135 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:38.717599Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T17:55:38.717692Z node 135 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:38.717697Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [135:201:2204], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2024-11-21T17:55:38.717784Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:55:38.717791Z node 135 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 1004:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:38.717797Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186233409548 on partitioning changed splitOp# 1004 at tablet 72057594046678944 2024-11-21T17:55:38.717903Z node 135 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:55:38.717912Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1004 2024-11-21T17:55:38.717915Z node 135 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2024-11-21T17:55:38.717919Z node 135 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2024-11-21T17:55:38.717923Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 7 2024-11-21T17:55:38.717939Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2024-11-21T17:55:38.718548Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269553158 2024-11-21T17:55:38.718960Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2024-11-21T17:55:38.719495Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: OperationCookie: 1004 TabletId: 72075186233409548 2024-11-21T17:55:38.719511Z node 135 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 1004:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T17:55:38.719527Z node 135 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2024-11-21T17:55:38.719530Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:55:38.719536Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2024-11-21T17:55:38.719545Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2024-11-21T17:55:38.719550Z node 135 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2024-11-21T17:55:38.719553Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2024-11-21T17:55:38.719592Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 6 2024-11-21T17:55:38.720293Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2024-11-21T17:55:38.720303Z node 135 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 1004:0 2024-11-21T17:55:38.720485Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 459 RawX2: 579820587383 } TabletId: 72075186233409548 State: 4 2024-11-21T17:55:38.720506Z node 135 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409548, state: Offline, at schemeshard: 72057594046678944 2024-11-21T17:55:38.721089Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:38.721254Z node 135 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:55:38.721346Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:55:38.721412Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 Forgetting tablet 72075186233409548 2024-11-21T17:55:38.722663Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:55:38.722679Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2024-11-21T17:55:38.722749Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2024-11-21T17:55:38.722754Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2024-11-21T17:55:38.722799Z node 135 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2024-11-21T17:55:38.722814Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2024-11-21T17:55:38.722817Z node 135 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [135:755:2667] TestWaitNotification: OK eventTxId 1004 2024-11-21T17:55:38.722885Z node 135 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:38.722931Z node 135 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 59us result status StatusSuccess 2024-11-21T17:55:38.723015Z node 135 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 150 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "storage-pool-number-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "storage-pool-number-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409548 is deleted 2024-11-21T17:55:38.723061Z node 135 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409548 2024-11-21T17:55:38.723093Z node 135 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T17:55:38.723101Z node 135 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 10us result status StatusSuccess 2024-11-21T17:55:38.723127Z node 135 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 150 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "storage-pool-number-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "storage-pool-number-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsACLWithPath::test_modify_permissions[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-fifo] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_incorrect_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_unsupported_tables_format >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[std] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-std] >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-std] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v0] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[with_queues-tables_format_v1] [GOOD] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v1] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v0] [GOOD] >> test_ping.py::TestPing::test_ping >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[queue] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[std] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_empty_tables_format >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v1-invalid] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/tools/python3/Lib/threading.py:135: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[std] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-empty] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_counters.py::TestSqsGettingCounters::test_action_duration_being_not_immediate [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[std] >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_visibility_change_cleanups_proper_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=976351) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation tra ... rceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/pool.py:268: ResourceWarning: unclosed running multiprocessing pool ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_read_message[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] Test command err: 2024-11-21T17:52:15.879849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:509:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:15.879928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:15.879945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001668/r3tmp/tmplOOnqe/pdisk_1.dat 2024-11-21T17:52:15.970322Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19684, node 1 2024-11-21T17:52:16.067745Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:16.067768Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:16.067772Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:16.073213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:16.073445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:16.151267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:16.151309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:16.163888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31805 2024-11-21T17:52:16.614142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:17.393171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:17.393198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:17.436565Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T17:52:17.437523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:17.491168Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:17.505190Z node 4 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T17:52:17.505221Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T17:52:17.517555Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T17:52:17.517601Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T17:52:17.517617Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T17:52:17.517621Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T17:52:17.517625Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T17:52:17.517629Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T17:52:17.517632Z node 4 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T17:52:17.517637Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T17:52:17.517754Z node 4 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T17:52:17.703969Z node 4 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:17.704002Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:1875:2553], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T17:52:17.704963Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:1879:2556] 2024-11-21T17:52:17.707744Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T17:52:17.707945Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:1927:2582] 2024-11-21T17:52:17.708032Z node 4 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [4:1927:2582], schemeshard id = 72075186224037889 2024-11-21T17:52:17.712157Z node 4 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T17:52:17.712172Z node 4 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T17:52:17.712183Z node 4 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T17:52:17.714397Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:17.714426Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:17.715730Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T17:52:17.717340Z node 4 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T17:52:17.717371Z node 4 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T17:52:17.720282Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T17:52:17.732508Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:17.755259Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T17:52:17.901996Z node 4 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T17:52:18.080585Z node 4 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T17:52:18.827598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T17:52:19.656592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:19.656631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:19.656738Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:19.656746Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:19.678856Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:52:19.679473Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:19.691544Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:19.717043Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:52:19.717219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:19.816731Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T17:52:19.816754Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T17:52:19.816768Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:2938:2908], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T17:52:19.817352Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:2940:2909] 2024-11-21T17:52:19.817406Z node 4 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [4:2940:2909], schemeshard id = 72075186224037899 2024-11-21T17:52:20.725218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T17:52:21.371770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:21.371815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:21.371891Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:21.371913Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:21.394516Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:21.395021Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:21.407395Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:21.438218Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:21.438457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:21.653469Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2024-11-21T17:52:21.653494Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037905 2024-11-21T17:52:21.653503Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3765:3112], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037905 2024-11-21T17:52:21.653734Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:3768:3115] 2024-11-21T17:52:21.653769Z node 4 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [4:3768:3115], schemeshard id = 72075186224037905 2024-11-21T17:52:22.737842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3903:3380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:22.737892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:22.741557Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2024-11-21T17:52:22.776481Z node 4 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[4:3988:3214];tablet_id=72075186224037911;process=TTxInitSchema::Execute; ... :55:35.706756Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T17:55:35.706784Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:55:35.791054Z node 4 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T17:55:35.791084Z node 4 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T17:55:35.843215Z node 4 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [4:11931:7504], schemeshard count = 1 2024-11-21T17:55:36.301821Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2024-11-21T17:55:36.301856Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 229.000000s, at schemeshard: 72075186224037899 2024-11-21T17:55:36.302017Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2024-11-21T17:55:36.324441Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T17:55:38.761441Z node 4 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:55:38.761468Z node 4 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:55:38.761480Z node 4 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2024-11-21T17:55:38.761485Z node 4 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T17:55:38.762851Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T17:55:38.774271Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2024-11-21T17:55:38.774304Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 224.000000s, at schemeshard: 72075186224037905 2024-11-21T17:55:38.774420Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 28 2024-11-21T17:55:38.796896Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T17:55:38.796968Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T17:55:38.797133Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T17:55:38.797153Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T17:55:38.797566Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T17:55:38.809658Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T17:55:38.809774Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 2, current Round: 0 2024-11-21T17:55:38.810076Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:12104:7602], server id = [4:12105:7603], tablet id = 72075186224037911, status = OK 2024-11-21T17:55:38.810225Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:12104:7602], path = { OwnerId: 72075186224037899 LocalId: 2 } 2024-11-21T17:55:38.810499Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2024-11-21T17:55:38.810512Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2024-11-21T17:55:38.810600Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:12104:7602], server id = [4:12105:7603], tablet id = 72075186224037911 2024-11-21T17:55:38.810604Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:55:38.810627Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T17:55:38.810677Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T17:55:38.810763Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T17:55:38.811491Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T17:55:38.815953Z node 4 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [4:12122:7620]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:38.816037Z node 4 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T17:55:38.816044Z node 4 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [4:12122:7620], StatRequests.size() = 1 2024-11-21T17:55:38.864460Z node 4 :SYSTEM_VIEWS WARN: [72075186224037896] TEvIntervalQuerySummary, wrong stage: node id# 4 2024-11-21T17:55:38.864649Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=YTlmY2JmZWItMzk1MzQzYWItZDE0NjA0OGYtMjE3NDU0NmI=, TxId: 2024-11-21T17:55:38.864659Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=YTlmY2JmZWItMzk1MzQzYWItZDE0NjA0OGYtMjE3NDU0NmI=, TxId: 2024-11-21T17:55:38.864833Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:55:38.888877Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T17:55:38.888901Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T17:55:39.609062Z node 4 :STATISTICS DEBUG: Event round 2 is different from the current 0 2024-11-21T17:55:39.609095Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T17:55:41.890725Z node 4 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count = 1 2024-11-21T17:55:41.890819Z node 4 :STATISTICS DEBUG: EvPropagateStatistics, node id = 4 2024-11-21T17:55:41.955060Z node 4 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T17:55:41.955093Z node 4 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T17:55:41.955101Z node 4 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is column table. 2024-11-21T17:55:41.955104Z node 4 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037905, LocalPathId: 2] 2024-11-21T17:55:41.956299Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T17:55:41.969753Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T17:55:41.969966Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T17:55:41.969989Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T17:55:41.970205Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T17:55:41.982058Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T17:55:41.982141Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 3, current Round: 0 2024-11-21T17:55:41.982365Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:12269:7695], server id = [4:12270:7696], tablet id = 72075186224037912, status = OK 2024-11-21T17:55:41.982400Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:12269:7695], path = { OwnerId: 72075186224037905 LocalId: 2 } 2024-11-21T17:55:41.982598Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2024-11-21T17:55:41.982636Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2024-11-21T17:55:41.982707Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T17:55:41.982739Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T17:55:41.982833Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T17:55:41.982931Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:12269:7695], server id = [4:12270:7696], tablet id = 72075186224037912 2024-11-21T17:55:41.982934Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T17:55:41.983592Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T17:55:41.997943Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=YTkyMDFkMTYtYzlhMGQyZDItM2RmY2U1ZmQtYjcwMjVlZjQ=, TxId: 2024-11-21T17:55:41.997971Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=YTkyMDFkMTYtYzlhMGQyZDItM2RmY2U1ZmQtYjcwMjVlZjQ=, TxId: 2024-11-21T17:55:41.998219Z node 4 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T17:55:41.998556Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:12285:6043]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:41.998622Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:55:41.998628Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T17:55:42.000792Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T17:55:42.000812Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T17:55:42.000823Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T17:55:42.004016Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2024-11-21T17:55:42.004286Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:12285:6043]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T17:55:42.004326Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T17:55:42.004334Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T17:55:42.004383Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T17:55:42.004389Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T17:55:42.004395Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T17:55:42.005003Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v0-std] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-fifo] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v0] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] [SKIPPED] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-200] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_unsupported_tables_format [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[fifo] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-30] >> test_ping.py::TestPing::test_ping [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-fifo] |86.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-empty] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-std] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-invalid] >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-invalid] [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[fifo] [GOOD] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[std] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_and_create_queue[std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_empty_tables_format [GOOD] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v1] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_ya_count_queues[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create_old[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[fifo] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[fifo] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v0] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_detailed_counters[user] [GOOD] >> ForceDropWithReboots::DoNotLostDeletedTablets [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_account_actions.py::TestAccountActionsWithPath::test_manage_account[without_queues-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain_reboots/unittest >> ForceDropWithReboots::DoNotLostDeletedTablets [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T17:55:06.755073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T17:55:06.755091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:06.755096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T17:55:06.755100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T17:55:06.755113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T17:55:06.755117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T17:55:06.755125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T17:55:06.755187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T17:55:06.765802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T17:55:06.765826Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T17:55:06.768038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T17:55:06.768118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T17:55:06.768142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T17:55:06.770307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T17:55:06.770369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T17:55:06.770466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:06.770675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:06.771283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:06.771483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:06.771491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:06.771503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T17:55:06.771509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:06.771514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T17:55:06.771545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T17:55:06.772587Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T17:55:06.783678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T17:55:06.783740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:06.783773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T17:55:06.783813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T17:55:06.783818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:06.784302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:06.784321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T17:55:06.784357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:06.784365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T17:55:06.784368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T17:55:06.784371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T17:55:06.784789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:06.784803Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T17:55:06.784808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T17:55:06.785228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:06.785238Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:06.785243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:06.785249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T17:55:06.785831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T17:55:06.786223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T17:55:06.786262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T17:55:06.786416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:06.786438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T17:55:06.786454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:06.786499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T17:55:06.786506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T17:55:06.786526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T17:55:06.786537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T17:55:06.786931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T17:55:06.786939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T17:55:06.786964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T17:55:06.786971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T17:55:06.787032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T17:55:06.787038Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T17:55:06.787047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T17:55:06.787051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:06.787056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T17:55:06.787061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T17:55:06.787066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T17:55:06.787070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T17:55:06.787080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:06.787084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T17:55:06.787096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... ublication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2024-11-21T17:55:48.358371Z node 94 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T17:55:48.358374Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T17:55:48.358382Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2024-11-21T17:55:48.358637Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:48.358647Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:48.358650Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:48.358665Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:48.358669Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:48.358672Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T17:55:48.358804Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:55:48.359096Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2024-11-21T17:55:48.359600Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T17:55:48.359643Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T17:55:48.359689Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 Forgetting tablet 72075186233409546 2024-11-21T17:55:48.359834Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T17:55:48.359856Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T17:55:48.359904Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T17:55:48.360103Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2024-11-21T17:55:48.360503Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T17:55:48.360539Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 Forgetting tablet 72075186233409548 2024-11-21T17:55:48.361008Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T17:55:48.361048Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T17:55:48.361075Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409551 2024-11-21T17:55:48.361268Z node 94 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 2024-11-21T17:55:48.361390Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T17:55:48.361418Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 Forgetting tablet 72075186233409549 2024-11-21T17:55:48.361987Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:48.361996Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T17:55:48.362006Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T17:55:48.362057Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:55:48.362130Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T17:55:48.362156Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T17:55:48.362191Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2024-11-21T17:55:48.362223Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T17:55:48.362229Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2024-11-21T17:55:48.362910Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T17:55:48.362929Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T17:55:48.363027Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T17:55:48.363037Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T17:55:48.363288Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T17:55:48.363298Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T17:55:48.363326Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T17:55:48.363330Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T17:55:48.363370Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:48.363385Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:48.363390Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T17:55:48.363403Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T17:55:48.363450Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:48.363455Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:48.363473Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T17:55:48.363503Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T17:55:48.363509Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T17:55:48.364048Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:48.364070Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T17:55:48.364078Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T17:55:48.364083Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T17:55:48.364093Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T17:55:48.364439Z node 94 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2024-11-21T17:55:48.364509Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2024-11-21T17:55:48.364516Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2024-11-21T17:55:48.364593Z node 94 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2024-11-21T17:55:48.364610Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2024-11-21T17:55:48.364614Z node 94 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [94:917:2833] TestWaitNotification: OK eventTxId 1006 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted 2024-11-21T17:55:48.364681Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T17:55:48.364695Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T17:55:48.364699Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T17:55:48.364709Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2024-11-21T17:55:48.364716Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 2024-11-21T17:55:48.364721Z node 94 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409551 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_with_invalid_name[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] [GOOD] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_queue_generates_event[tables_format_v0] [SKIPPED] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_fifo_queue_wo_postfix[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[std] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-fifo] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-fifo] |86.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_format_without_version.py::TestQueueWithoutVersionWithPath::test_common[fifo] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ping.py::TestPing::test_error_on_cgi_parameters |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_default_attributes[tables_format_v1-fifo] [GOOD] |86.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersExportDelay::test_export_delay[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/clients/kikimr_monitoring.py:75: ResourceWarning: unclosed (key, value) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_counters.py::TestSqsCountersFeatures::test_aggregates_transaction_counters[user] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v0] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_create_queue[fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-30] [GOOD] >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v0] [GOOD] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[fifo] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v1-std] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[fifo] [GOOD] >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_double_create[std] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-fifo] [GOOD] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-std] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] [GOOD] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_removes_user_counters_after_user_deletion[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v0] >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v0] [SKIPPED] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] >> test_self_heal.py::TestEnableSelfHeal::test_replication >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] [SKIPPED] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ping.py::TestPing::test_error_on_cgi_parameters [GOOD] |86.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_ping.py::TestPing::test_error_on_non_ping_path >> test_ping.py::TestPing::test_error_on_non_ping_path [GOOD] |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithTenant::test_invalid_token[tables_format_v0-invalid] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( contrib/python/PyYAML/py3/yaml/error.py:6: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/public/api/grpc/ydb_table_v1__intpy3___pb2_grpc.py:11: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue_batch[tables_format_v1] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v0] |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_set_queue_attributes[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_create_fifo_queue_wo_postfix[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue_batch[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-std] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-fifo] >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create_old[std] [GOOD] |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_double_create[fifo] [GOOD] |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_send_message[fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_request_to_deleted_queue[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_queues_count_over_limit[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_purge_queue[tables_format_v1-std] [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestSetClientOffset >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-fifo] [GOOD] >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v0] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v1] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_account_actions.py::TestAccountActionsWithTenant::test_manage_account[without_queues-tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_format_without_version.py::TestQueueWithoutVersionWithTenant::test_common[std] [GOOD] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_create_queue_generates_event[tables_format_v1] [SKIPPED] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_removes_messages_by_retention_time[tables_format_v1-fifo] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1001187) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation tr ... 14221, 0, 0)> send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_acl.py::TestSqsWithForceAuthorizationWithPath::test_invalid_token[tables_format_v0-invalid] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] [GOOD] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_ping.py::TestPing::test_error_on_non_ping_path [GOOD] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join0.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-select_distinct.test] >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-abstime.test] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join3.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-create_table.test] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-1.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_read_message[fifo] [GOOD] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithTenant::test_create_queue_with_empty_tables_format [GOOD] |86.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> ExternalIndex::Simple [GOOD] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_purge_queue[tables_format_v0-std] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-1.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-abstime.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-boolean.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-boolean.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_deduplication_table[tables_format_v1] [GOOD] >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-create_table.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/coalesce-and-join.test] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] [GOOD] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_stream_query.py::TestStreamQuery::test_sql_suite[plan-window.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join3.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join4.test] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-insert.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: 2024-11-21T17:52:42.902141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:91:2137], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T17:52:42.902218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T17:52:42.902281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; 2024-11-21T17:52:42.902312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/0012d2/r3tmp/tmpqw9M2o/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12476, node 1 TClient is connected to server localhost:1730 2024-11-21T17:52:44.473293Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvGetProxyServicesRequest 2024-11-21T17:52:44.473371Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvGetProxyServicesRequest 2024-11-21T17:52:44.534275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T17:52:44.611882Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:52:44.612576Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:52:44.620906Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:52:44.620940Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:52:44.621135Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:44.681046Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T17:52:44.681302Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T17:52:44.681364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:44.681394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:44.691972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:44.823332Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvProposeTransaction 2024-11-21T17:52:44.823358Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T17:52:44.823423Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:643:2537] 2024-11-21T17:52:44.831648Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T17:52:44.831873Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T17:52:44.831886Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T17:52:44.831940Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T17:52:44.840576Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T17:52:44.840632Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T17:52:44.860086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:44.860356Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T17:52:44.860559Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T17:52:44.860571Z node 1 :TX_PROXY DEBUG: Actor# [1:643:2537] txid# 281474976715657 SEND to# [1:642:2536] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T17:52:44.951161Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:711:2597];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T17:52:44.954474Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:711:2597];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T17:52:44.954552Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T17:52:44.959721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T17:52:44.959787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T17:52:44.959860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T17:52:44.959880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T17:52:44.959899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T17:52:44.959918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T17:52:44.959941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T17:52:44.959963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T17:52:44.959984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T17:52:44.960003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T17:52:44.960024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T17:52:44.960043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:711:2597];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T17:52:44.963412Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2024-11-21T17:52:44.963456Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T17:52:44.963462Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T17:52:44.963486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:52:44.963509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T17:52:44.963520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T17:52:44.963523Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T17:52:44.963529Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T17:52:44.963535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T17:52:44.963542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T17:52:44.963546Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T17:52:44.963562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T17:52:44.963570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T17:52:44.963577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888; ... oFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '335) '('"_id" '"31cffd9b-fb9008f3-b33b427e-739a44e9")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) ) 2024-11-21T17:56:06.322625Z node 1 :KQP_YQL TRACE: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.322 TRACE ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [KQP] kqp_transform.cpp:33: PhysicalPeepholeTransformer: ( (let $1 (KqpTable '"//Root/.metadata/initialization/migrations" '"72057594046644480:6" '"" '1)) (let $2 '('"componentId" '"instant" '"modificationId")) (let $3 (Uint64 '"1001")) (let $4 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) (Void) '())) (let $5 (OptionalType (DataType 'Utf8))) (let $6 (StructType '('"componentId" $5) '('"instant" (OptionalType (DataType 'Uint32))) '('"modificationId" $5))) (let $7 '('('"_logical_id" '322) '('"_id" '"2835c0f6-667a51d3-5d79ecba-2b4d5673") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $4)) (lambda '($12) (block '( (let $13 (lambda '($14) (Member $14 '"componentId") (Member $14 '"instant") (Member $14 '"modificationId"))) (return (FromFlow (ExpandMap (Take (ToFlow $12) $3) $13))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($15) (FromFlow (NarrowMap (Take (ToFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '335) '('"_id" '"31cffd9b-fb9008f3-b33b427e-739a44e9")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) 2024-11-21T17:56:06.324261Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:466: Register async execution for node #260 2024-11-21T17:56:06.324305Z node 1 :KQP_YQL TRACE: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 TRACE ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:387: {3}, callable #269 2024-11-21T17:56:06.324327Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:577: Node #269 finished execution 2024-11-21T17:56:06.324334Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:594: Node #269 created 0 trackable nodes: 2024-11-21T17:56:06.324345Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:87: Finish, output #272, status: Async 2024-11-21T17:56:06.324570Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:133: Completed async execution for node #260 2024-11-21T17:56:06.324577Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:153: State is ExecutionRequired after apply async changes for node #260 2024-11-21T17:56:06.324582Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:59: Begin, root #272 2024-11-21T17:56:06.324587Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #272, status: Ok 2024-11-21T17:56:06.324590Z node 1 :KQP_YQL TRACE: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 TRACE ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:387: {0}, callable #272 2024-11-21T17:56:06.324593Z node 1 :KQP_YQL TRACE: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 TRACE ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:387: {1}, callable #271 2024-11-21T17:56:06.324597Z node 1 :KQP_YQL TRACE: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 TRACE ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:387: {2}, callable #270 2024-11-21T17:56:06.324611Z node 1 :KQP_YQL TRACE: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 TRACE ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:387: {3}, callable #267 2024-11-21T17:56:06.324614Z node 1 :KQP_YQL TRACE: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 TRACE ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:387: {4}, callable #260 2024-11-21T17:56:06.324639Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:577: Node #260 finished execution 2024-11-21T17:56:06.324645Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:594: Node #260 created 0 trackable nodes: 2024-11-21T17:56:06.324648Z node 1 :KQP_YQL TRACE: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 TRACE ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:387: {3}, callable #267 2024-11-21T17:56:06.324651Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:577: Node #267 finished execution 2024-11-21T17:56:06.324655Z node 1 :KQP_YQL TRACE: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 TRACE ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:387: {2}, callable #270 2024-11-21T17:56:06.324684Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:577: Node #270 finished execution 2024-11-21T17:56:06.324688Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:594: Node #270 created 0 trackable nodes: 2024-11-21T17:56:06.324691Z node 1 :KQP_YQL TRACE: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 TRACE ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:387: {1}, callable #271 2024-11-21T17:56:06.324699Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:577: Node #271 finished execution 2024-11-21T17:56:06.324703Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:594: Node #271 created 0 trackable nodes: 2024-11-21T17:56:06.324706Z node 1 :KQP_YQL TRACE: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 TRACE ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:387: {0}, callable #272 2024-11-21T17:56:06.324710Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:577: Node #272 finished execution 2024-11-21T17:56:06.324713Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:594: Node #272 created 0 trackable nodes: 2024-11-21T17:56:06.324717Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:87: Finish, output #272, status: Ok 2024-11-21T17:56:06.324720Z node 1 :KQP_YQL INFO: TraceId: 01jd7xvx0wew5tc4bw8cs2gjgb, SessionId: CompileActor 2024-11-21 17:56:06.324 INFO ydb-services-ext_index-ut(pid=827441, tid=0x00007FD020822BC0) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #272 2024-11-21T17:56:06.336792Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvExecuteKqpTransaction 2024-11-21T17:56:06.336827Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] TxId# 281474976716246 ProcessProposeKqpTransaction 2024-11-21T17:56:06.339380Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] Handle TEvExecuteKqpTransaction 2024-11-21T17:56:06.339409Z node 1 :TX_PROXY DEBUG: actor# [1:53:2100] TxId# 281474976716247 ProcessProposeKqpTransaction 2024-11-21T17:56:06.521792Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:711:2597];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:56:06.521891Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:719:2599];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:56:06.521901Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:722:2601];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:56:06.521909Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:725:2604];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T17:56:06.532737Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:711:2597];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T17:56:06.532786Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:719:2599];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2024-11-21T17:56:06.532796Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:722:2601];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2024-11-21T17:56:06.532804Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:725:2604];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/coalesce-and-join.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join-group-by-with-null.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_request_to_deleted_queue[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-14.test] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-5.test] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |86.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_remove_queue_generates_event[tables_format_v1] [SKIPPED] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-1.test] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v1-30] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=995964) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/co ... et the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_counters.py::TestSqsCountersFeatures::test_creates_counter[tables_format_v0] [GOOD] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-10.test] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_multiplexing_tables_format.py::TestMultiplexingTablesFormatWithPath::test_send_message[std] [GOOD] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesInCompatibilityMode::test_set_queue_attributes_no_validation[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_queues_count_over_limit[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join4.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] >> TPQTest::TestSetClientOffset [GOOD] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-create_table.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-insert.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-1.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select_distinct.test] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/unittest >> TPQTest::TestSetClientOffset [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T17:53:28.311969Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:28.312007Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:28.325795Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:28.329372Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T17:53:28.329754Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T17:53:28.330464Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T17:53:28.330908Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T17:53:28.331203Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:28.338103Z node 1 :PERSQUEUE INFO: new Cookie owner1|da78e9ee-15d41fe7-2cc90cd0-24682c13_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2024-11-21T17:53:28.338326Z node 1 :PERSQUEUE INFO: new Cookie owner2|4dbb7e16-f4b01c6-46560a00-ccae7c35_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:28.348798Z node 1 :PERSQUEUE INFO: new Cookie owner1|38cc2813-1cad5492-c309311a-50e027a8_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T17:53:28.548333Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:28.548360Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:28.553269Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:28.553463Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2024-11-21T17:53:28.553578Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2024-11-21T17:53:28.554017Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] 2024-11-21T17:53:28.554298Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2024-11-21T17:53:28.554646Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:28.556101Z node 2 :PERSQUEUE INFO: new Cookie owner1|2f893db7-cb0576f5-3d7d0986-be89642d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2024-11-21T17:53:28.556192Z node 2 :PERSQUEUE INFO: new Cookie owner2|4498dd5e-7161e75e-f54ac469-6d8e9b41_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:28.558127Z node 2 :PERSQUEUE INFO: new Cookie owner1|c6ecd4d9-29e22d1c-c0f87829-e9f00f59_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] 2024-11-21T17:53:28.788449Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:28.788474Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:147:2057] recipient: [3:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:147:2057] recipient: [3:145:2168] Leader for TabletID 72057594037927938 is [3:151:2172] sender: [3:152:2057] recipient: [3:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:177:2057] recipient: [3:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:28.791995Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:28.792193Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2024-11-21T17:53:28.792337Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:184:2197] 2024-11-21T17:53:28.792888Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:53:28.793163Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:185:2198] 2024-11-21T17:53:28.793442Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:28.794884Z node 3 :PERSQUEUE INFO: new Cookie owner1|11a95846-a5489860-e6a7aa99-7a8d40d2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2024-11-21T17:53:28.794967Z node 3 :PERSQUEUE INFO: new Cookie owner2|f155ea8b-76735498-28c24e8e-715bdb04_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:53:28.797074Z node 3 :PERSQUEUE INFO: new Cookie owner1|3cb8e3db-28fa8ae2-ece22aef-f62fbdb7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] 2024-11-21T17:53:29.047241Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:53:29.047268Z node 4 :PERSQUEUE ... RSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [157:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:56:12.556276Z node 157 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic rt3.dc1--asdfgs--topic partition 0 client user1 EndOffset 0 offset 100 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR !Reboot 72057594037927937 (actor [157:105:2137]) on event NKikimr::TEvPersQueue::TEvRequest ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [157:105:2137] sender: [157:235:2057] recipient: [157:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [157:105:2137] sender: [157:238:2057] recipient: [157:14:2061] Leader for TabletID 72057594037927937 is [157:105:2137] sender: [157:239:2057] recipient: [157:237:2239] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [157:240:2240] sender: [157:241:2057] recipient: [157:237:2239] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:56:12.566552Z node 157 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:56:12.566572Z node 157 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T17:56:12.566722Z node 157 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [157:291:2283] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:56:12.567406Z node 157 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [157:292:2284] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:56:12.569097Z node 157 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [157:291:2283] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T17:56:12.569681Z node 157 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [157:292:2284] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR !Reboot 72057594037927937 (actor [157:105:2137]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [157:105:2137]) tablet resolver refreshed! new actor is[157:240:2240] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [157:240:2240] sender: [157:324:2057] recipient: [157:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [158:101:2057] recipient: [158:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [158:101:2057] recipient: [158:99:2133] Leader for TabletID 72057594037927937 is [158:105:2137] sender: [158:106:2057] recipient: [158:99:2133] 2024-11-21T17:56:13.224044Z node 158 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:56:13.224071Z node 158 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [158:147:2057] recipient: [158:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [158:147:2057] recipient: [158:145:2168] Leader for TabletID 72057594037927938 is [158:151:2172] sender: [158:152:2057] recipient: [158:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [158:105:2137] sender: [158:177:2057] recipient: [158:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:56:13.229113Z node 158 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:56:13.229352Z node 158 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 158 actor [158:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 158 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 158 ReadRuleGenerations: 158 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 158 Important: false } Consumers { Name: "user1" Generation: 158 Important: false } 2024-11-21T17:56:13.229519Z node 158 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [158:184:2197] 2024-11-21T17:56:13.230161Z node 158 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [158:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:56:13.230880Z node 158 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [158:185:2198] 2024-11-21T17:56:13.231367Z node 158 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [158:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:56:13.234215Z node 158 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic rt3.dc1--asdfgs--topic partition 0 client user1 EndOffset 0 offset 100 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [159:101:2057] recipient: [159:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [159:101:2057] recipient: [159:99:2133] Leader for TabletID 72057594037927937 is [159:105:2137] sender: [159:106:2057] recipient: [159:99:2133] 2024-11-21T17:56:13.548143Z node 159 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:56:13.548173Z node 159 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [159:147:2057] recipient: [159:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [159:147:2057] recipient: [159:145:2168] Leader for TabletID 72057594037927938 is [159:151:2172] sender: [159:152:2057] recipient: [159:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [159:105:2137] sender: [159:177:2057] recipient: [159:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:56:13.554332Z node 159 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T17:56:13.554572Z node 159 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 159 actor [159:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 159 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 159 ReadRuleGenerations: 159 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 159 Important: false } Consumers { Name: "user1" Generation: 159 Important: false } 2024-11-21T17:56:13.554759Z node 159 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [159:184:2197] 2024-11-21T17:56:13.555384Z node 159 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [159:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T17:56:13.556010Z node 159 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [159:185:2198] 2024-11-21T17:56:13.556507Z node 159 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [159:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T17:56:13.561869Z node 159 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic rt3.dc1--asdfgs--topic partition 0 client user1 EndOffset 0 offset 100 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> RetryPolicy::RetryWithBatching [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-1.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-6.test] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-15.test] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_stream_query.py::TestStreamQuery::test_sql_suite[results-window.test] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |86.9%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2024-11-21T17:52:15.205948Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:15.205959Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:15.205963Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T17:52:15.206135Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:52:15.206152Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:15.206155Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:15.206712Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007466s 2024-11-21T17:52:15.206849Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:52:15.206860Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:15.206863Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:15.206880Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005045s 2024-11-21T17:52:15.206976Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T17:52:15.206987Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:15.206989Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T17:52:15.207004Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007426s 2024-11-21T17:52:15.230117Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1732211535230108 2024-11-21T17:52:15.330762Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791895057546818:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:15.330838Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:52:15.333652Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791896205751610:2233];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:52:15.353391Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:52:15.356360Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T17:52:15.357631Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/00182e/r3tmp/tmpwDbTOt/pdisk_1.dat 2024-11-21T17:52:15.379557Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3503, node 1 2024-11-21T17:52:15.395111Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/6fwh/00182e/r3tmp/yandex0v3xFH.tmp 2024-11-21T17:52:15.395125Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/6fwh/00182e/r3tmp/yandex0v3xFH.tmp 2024-11-21T17:52:15.395183Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/6fwh/00182e/r3tmp/yandex0v3xFH.tmp 2024-11-21T17:52:15.395225Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T17:52:15.399153Z INFO: TTestServer started on Port 31595 GrpcPort 3503 TClient is connected to server localhost:31595 PQClient connected to localhost:3503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:52:15.430791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:15.430828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:15.432214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:52:15.459322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:52:15.459353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:52:15.460506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2024-11-21T17:52:15.460643Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:52:15.460940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... waiting... waiting... 2024-11-21T17:52:15.664099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791895057547700:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:15.664123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:15.664244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791895057547729:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:15.664860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2024-11-21T17:52:15.666756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439791895057547763:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:15.666772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T17:52:15.670976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439791895057547731:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2024-11-21T17:52:15.694753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T17:52:15.723253Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439791896205751770:2285], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:52:15.723392Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmViMDczMTMtYTQzZjI3NGEtNzk5MWEwNjAtODIwZTk4Njc=, ActorId: [2:7439791896205751717:2278], ActorState: ExecuteState, TraceId: 01jd7xmvv29q3dhb716k6hyds4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:52:15.724036Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:52:15.756138Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439791895057547892:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T17:52:15.756205Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjRkMGFjNjUtMzU3Yjc0YmQtM2Y4MTA4MDUtODYwNTVlNw==, ActorId: [1:7439791895057547697:2299], ActorState: ExecuteState, TraceId: 01jd7xmvsfa537knnbf1dk4xhh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T17:52:15.756342Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T17:52:15.763840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T17:52:15.829140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:3503", true, true, 1000); 2024-11-21T17:52:15.862666Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jd7xmvz3d0dpvf4yat4hchg7, Database: , Data ... 17:56:14.636152Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1454 count 9 nextOffset 9 batches 1 2024-11-21T17:56:14.636160Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0test-message-group-id' seqNo 10 partNo 0 2024-11-21T17:56:14.636164Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1612 count 10 nextOffset 10 batches 1 2024-11-21T17:56:14.636215Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,10 HeadOffset 0 endOffset 0 curOffset 10 d0000000000_00000000000000000000_00000_0000000010_00000| size 1208 WTime 1732211774633 2024-11-21T17:56:14.636297Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T17:56:14.648467Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 2024-11-21T17:56:14.648503Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:56:14.648524Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T17:56:14.649098Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 14 } 2024-11-21T17:56:14.648535Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:56:14.648541Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2024-11-21T17:56:14.648544Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:56:14.649113Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: acknoledged message 1 2024-11-21T17:56:14.649119Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: acknoledged message 2 2024-11-21T17:56:14.648550Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T17:56:14.649122Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: acknoledged message 3 2024-11-21T17:56:14.648552Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:56:14.649124Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: acknoledged message 4 2024-11-21T17:56:14.648557Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2024-11-21T17:56:14.649127Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: acknoledged message 5 2024-11-21T17:56:14.648559Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:56:14.649130Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: acknoledged message 6 2024-11-21T17:56:14.648570Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2024-11-21T17:56:14.648572Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:56:14.648577Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2024-11-21T17:56:14.648580Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:56:14.648585Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2024-11-21T17:56:14.648589Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:56:14.648604Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2024-11-21T17:56:14.648606Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:56:14.648612Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2024-11-21T17:56:14.648614Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T17:56:14.648619Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2024-11-21T17:56:14.648658Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T17:56:14.648663Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T17:56:14.648708Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T17:56:14.648747Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T17:56:14.648946Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2024-11-21T17:56:14.648951Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T17:56:14.648992Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T17:56:14.648997Z node 17 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T17:56:14.649015Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1732211774633 queuesize 0 startOffset 0 2024-11-21T17:56:14.649132Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: acknoledged message 7 2024-11-21T17:56:14.649200Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: acknoledged message 8 2024-11-21T17:56:14.649205Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: acknoledged message 9 2024-11-21T17:56:14.649212Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: acknoledged message 10 2024-11-21T17:56:14.649325Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: close. Timeout = 0 ms 2024-11-21T17:56:14.649334Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session will now close 2024-11-21T17:56:14.649340Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: aborting 2024-11-21T17:56:14.649503Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: gracefully shut down, all writes complete 2024-11-21T17:56:14.649509Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0] Write session: destroy 2024-11-21T17:56:14.654060Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0 grpc read done: success: 0 data: 2024-11-21T17:56:14.654082Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0 grpc read failed 2024-11-21T17:56:14.654089Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0 grpc closed 2024-11-21T17:56:14.654096Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|47802aea-3478fc84-4a3385f9-82632cae_0 is DEAD 2024-11-21T17:56:14.654347Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T17:56:14.656986Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T17:56:14.657013Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7439792921388803451:2626] destroyed 2024-11-21T17:56:14.657033Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-create_table.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join3.test] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/coalesce-and-join.test] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-fifo] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join0.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/coalesce-and-join.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-abstime.test] >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |86.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] [GOOD] >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] [GOOD] |87.0%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-case.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_postgres.py::TestPGSQL::test_sql_suite[results-abstime.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-boolean.test] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] >> test_postgres.py::TestPGSQL::test_sql_suite[results-boolean.test] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join3.test] [FAIL] >> test_postgres.py::TestPGSQL::test_sql_suite[results-jointest/join4.test] >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-jointest/join-group-by-with-null.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-2.test] >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] [GOOD] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] [GOOD] >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-15.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-2.test] |87.0%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |87.0%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> TStorageBalanceTest::TestScenario3 [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queue_attributes_validation.py::TestQueueAttributesValidation::test_create_queue_with_custom_attributes[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] [GOOD] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[results-case.test] [GOOD] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: 2024-11-21T17:53:47.145201Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T17:53:47.145850Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:47.145912Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T17:53:47.146032Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T17:53:47.146234Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T17:53:47.146243Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T17:53:47.146364Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:25:2072] ControllerId# 72057594037932033 2024-11-21T17:53:47.146367Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T17:53:47.146396Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T17:53:47.146448Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T17:53:47.148130Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T17:53:47.148147Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T17:53:47.148403Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:33:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.148428Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.148444Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.148461Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.148475Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.148490Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.148508Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T17:53:47.148511Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T17:53:47.148519Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:25:2072] 2024-11-21T17:53:47.148523Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:25:2072] 2024-11-21T17:53:47.148528Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T17:53:47.148533Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T17:53:47.148633Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T17:53:47.151959Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:53:47.151974Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:47.151978Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T17:53:47.152202Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2024-11-21T17:53:47.152262Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:47.152304Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:53:47.152309Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:47.152314Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T17:53:47.152729Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T17:53:47.152810Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T17:53:47.152815Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2024-11-21T17:53:47.153153Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594037927937 at domain dc-1 (allocated resources: ) 2024-11-21T17:53:47.153166Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2024-11-21T17:53:47.153168Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2024-11-21T17:53:47.153197Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:50:2090] 2024-11-21T17:53:47.153207Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:47.153230Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:29:2063] 2024-11-21T17:53:47.153232Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:29:2063] 2024-11-21T17:53:47.153347Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T17:53:47.153392Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T17:53:47.153398Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T17:53:47.153401Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T17:53:47.153405Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:53:47.153430Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:47.153437Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T17:53:47.153459Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2090] 2024-11-21T17:53:47.153462Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2090] 2024-11-21T17:53:47.153467Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:53:47.153507Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T17:53:47.153530Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T17:53:47.153539Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T17:53:47.153866Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2090] 2024-11-21T17:53:47.153974Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T17:53:47.153988Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:53:47.153992Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2024-11-21T17:53:47.154667Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:25:2072] 2024-11-21T17:53:47.154676Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:25:2072] 2024-11-21T17:53:47.154694Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T17:53:47.154874Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T17:53:47.154928Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:47.154941Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2024-11-21T17:53:47.154944Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2024-11-21T17:53:47.154947Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T17:53:47.154986Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2024-11-21T17:53:47.154994Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2024-11-21T17:53:47.154997Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2024-11-21T17:53:47.155005Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2024-11-21T17:53:47.155008Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:53:47.155014Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:53:47.155022Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T17:53:47.155029Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T17:53:47.155033Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2024-11-21T17:53:47.155038Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:29:2063] 2024-11-21T17:53:47.155041Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:29:2063] 2024-11-21T17:53:47.155048Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2024-11-21T17:53:47.155072Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtaine ... .044 } AllocatedSize: 2200000000 } Groups { GroupID: 2147483650 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2800000000 Occupancy: 0.056 } AllocatedSize: 2800000000 } Groups { GroupID: 2147483651 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3000000000 Occupancy: 0.06 } AllocatedSize: 3000000000 } Groups { GroupID: 2147483652 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3100000000 Occupancy: 0.062 } AllocatedSize: 3100000000 } Groups { GroupID: 2147483653 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2500000000 Occupancy: 0.05 } AllocatedSize: 2500000000 } Groups { GroupID: 2147483654 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3000000000 Occupancy: 0.06 } AllocatedSize: 3000000000 } Groups { GroupID: 2147483655 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2600000000 Occupancy: 0.052 } AllocatedSize: 2600000000 } Groups { GroupID: 2147483656 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2400000000 Occupancy: 0.048 } AllocatedSize: 2400000000 } Groups { GroupID: 2147483657 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3300000000 Occupancy: 0.066 } AllocatedSize: 3300000000 } Groups { GroupID: 2147483658 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2300000000 Occupancy: 0.046 } AllocatedSize: 2300000000 } Groups { GroupID: 2147483659 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2800000000 Occupancy: 0.056 } AllocatedSize: 2800000000 } } 2024-11-21T17:56:33.672828Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:497} Tx{1510, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2024-11-21T17:56:33.672839Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:497} Tx{1510, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T17:56:33.672857Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4728586254720}(72075186224037954,HIVE_REASSIGN_REASON_BALANCE,[]) 2024-11-21T17:56:33.672885Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4728586254720}: tablet 72075186224037954 channel 0 assigned to group 2147483659 2024-11-21T17:56:33.672890Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4728586254720}: tablet 72075186224037954 skipped reassign of channel 0 2024-11-21T17:56:33.672896Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4728586254720}: tablet 72075186224037954 channel 1 assigned to group 2147483654 2024-11-21T17:56:33.672899Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4728586254720}: tablet 72075186224037954 skipped reassign of channel 1 2024-11-21T17:56:33.672904Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4728586254720}: tablet 72075186224037954 channel 2 assigned to group 2147483649 2024-11-21T17:56:33.672908Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4728586254720}: tablet 72075186224037954 skipped reassign of channel 2 2024-11-21T17:56:33.672911Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4728586254720}: tablet 72075186224037954 wasn't changed 2024-11-21T17:56:33.672916Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4728586254720}: tablet 72075186224037954 skipped channel 0 2024-11-21T17:56:33.672958Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4728586254720}: tablet 72075186224037954 skipped channel 1 2024-11-21T17:56:33.672964Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{4728586254720}: tablet 72075186224037954 skipped channel 2 2024-11-21T17:56:33.672984Z node 25 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{4728586254720}(72075186224037954)::Execute - TryToBoot was not successfull 2024-11-21T17:56:33.673006Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:497} Tx{1510, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{1003, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2024-11-21T17:56:33.673017Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:497} Tx{1510, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T17:56:33.689307Z node 25 :BS_PROXY_PUT INFO: [8fcb48bc03120098] bootstrap ActorId# [25:11753:6244] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:496:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T17:56:33.689385Z node 25 :BS_PROXY_PUT DEBUG: [8fcb48bc03120098] Id# [72057594037927937:2:496:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T17:56:33.689394Z node 25 :BS_PROXY_PUT DEBUG: [8fcb48bc03120098] restore Id# [72057594037927937:2:496:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T17:56:33.689407Z node 25 :BS_PROXY_PUT DEBUG: [8fcb48bc03120098] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:496:0:0:246:1] Marker# BPG33 2024-11-21T17:56:33.689412Z node 25 :BS_PROXY_PUT DEBUG: [8fcb48bc03120098] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:496:0:0:246:1] Marker# BPG32 2024-11-21T17:56:33.689454Z node 25 :BS_PROXY DEBUG: Send to queueActorId# [25:352:2087] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:496:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T17:56:33.697084Z node 25 :BS_PROXY_PUT DEBUG: [8fcb48bc03120098] received {EvVPutResult Status# OK ID# [72057594037927937:2:496:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 511 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 512 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T17:56:33.697158Z node 25 :BS_PROXY_PUT DEBUG: [8fcb48bc03120098] Result# TEvPutResult {Id# [72057594037927937:2:496:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T17:56:33.697177Z node 25 :BS_PROXY_PUT INFO: [8fcb48bc03120098] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:496:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T17:56:33.697316Z node 25 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:496:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T17:56:33.697387Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:497} commited cookie 1 for step 496 2024-11-21T17:56:33.697404Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxReassignGroups(72075186224037954)::Complete 2024-11-21T17:56:33.697419Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{4728586254720}(72075186224037954)::Complete SideEffects: {Notifications: 0x7FF0000F [25:5463:2599]} 2024-11-21T17:56:33.697483Z node 25 :HIVE DEBUG: HIVE#72057594037927937 StorageBalancer received RestartCancelled for tablet (72075186224037954,0) 2024-11-21T17:56:33.697489Z node 25 :HIVE INFO: HIVE#72057594037927937 StorageBalancer finished 2024-11-21T17:56:33.697620Z node 25 :HIVE WARN: HIVE#72057594037927937 THive::StateWork unhandled event type: 2146435089 event: NKikimr::NHive::TEvPrivate::TEvStorageBalancerOut 2024-11-21T17:56:33.745032Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::RequestPoolsInformation() 2024-11-21T17:56:33.745097Z node 25 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [25:1267:2637] 2024-11-21T17:56:33.745104Z node 25 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [25:1267:2637] 2024-11-21T17:56:33.745117Z node 25 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [25:1210:2599] EventType# 268637702 c[def1] ****------------------------------------------------------------------------------------------------ (0.044) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.06) ******---------------------------------------------------------------------------------------------- (0.062) *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.06) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.048) *******--------------------------------------------------------------------------------------------- (0.066) *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.056) 2024-11-21T17:56:33.848452Z node 25 :HIVE DEBUG: HIVE#72057594037927937 THive::Handle TEvControllerSelectGroupsResult: success Status: OK MatchingGroups { Groups { GroupID: 2147483649 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2200000000 Occupancy: 0.044 } AllocatedSize: 2200000000 } Groups { GroupID: 2147483650 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2800000000 Occupancy: 0.056 } AllocatedSize: 2800000000 } Groups { GroupID: 2147483651 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3000000000 Occupancy: 0.06 } AllocatedSize: 3000000000 } Groups { GroupID: 2147483652 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3100000000 Occupancy: 0.062 } AllocatedSize: 3100000000 } Groups { GroupID: 2147483653 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2500000000 Occupancy: 0.05 } AllocatedSize: 2500000000 } Groups { GroupID: 2147483654 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3000000000 Occupancy: 0.06 } AllocatedSize: 3000000000 } Groups { GroupID: 2147483655 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2600000000 Occupancy: 0.052 } AllocatedSize: 2600000000 } Groups { GroupID: 2147483656 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2400000000 Occupancy: 0.048 } AllocatedSize: 2400000000 } Groups { GroupID: 2147483657 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 3300000000 Occupancy: 0.066 } AllocatedSize: 3300000000 } Groups { GroupID: 2147483658 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2300000000 Occupancy: 0.046 } AllocatedSize: 2300000000 } Groups { GroupID: 2147483659 StoragePoolName: "def1" AssuredResources { Space: 50000000000 Occupancy: 0 } CurrentResources { Space: 2800000000 Occupancy: 0.056 } AllocatedSize: 2800000000 } } >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-2.test] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-10.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-11.test] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithTenant::test_delete_queue[tables_format_v1-std] [GOOD] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_queues_managing.py::TestQueuesManagingWithPathTestQueuesManagingWithPath::test_delete_queue[tables_format_v0-std] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-6.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-7.test] |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown |87.0%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/py3test >> test_garbage_collection.py::TestSqsGarbageCollection::test_cleanups_reads_table[tables_format_v0-200] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=1034048) is multi-threaded, use of fork() may lead to deadlocks in the child. ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:37: ResourceWarning: unclosed send_message(server, username, queue_url, sqs_port, body, seq_no, group_id) ResourceWarning: Enable tracemalloc to get the object allocation tr ... e object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback ydb/tests/functional/sqs/common/test_garbage_collection.py:53: ResourceWarning: unclosed delete_message(server, username, queue_url, sqs_port, receipt_handle) ResourceWarning: Enable tracemalloc to get the object allocation traceback >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill >> LabeledDbCounters::OneTabletRestart [GOOD] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |87.1%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut_kqp/unittest >> LabeledDbCounters::OneTabletRestart [GOOD] Test command err: 2024-11-21T17:51:41.229790Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439791747754168893:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:41.229941Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/6fwh/001947/r3tmp/tmpAvjpVT/pdisk_1.dat 2024-11-21T17:51:41.290700Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19321, node 1 2024-11-21T17:51:41.296058Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:51:41.296189Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T17:51:41.305954Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T17:51:41.305968Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T17:51:41.305971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T17:51:41.306009Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T17:51:41.329049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:41.329084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:41.330891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T17:51:41.362088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:41.368671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:41.386383Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439791749618226959:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T17:51:41.390934Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2024-11-21T17:51:41.401602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:41.401627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:41.403243Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T17:51:41.403593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:41.415141Z node 3 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2024-11-21T17:51:41.415159Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2024-11-21T17:51:41.419399Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [3:7439791749618226809:2055], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2024-11-21T17:51:41.419576Z node 3 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2024-11-21T17:51:41.419594Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:41.422077Z node 3 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [3:7439791749618226809:2055], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Database1 2024-11-21T17:51:41.422154Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7439791749618226809:2055], database# /Root/Database1, no sysview processor 2024-11-21T17:51:41.423131Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2024-11-21T17:51:41.423152Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2024-11-21T17:51:41.423249Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2024-11-21T17:51:41.423260Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2024-11-21T17:51:41.423265Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2024-11-21T17:51:41.423269Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2024-11-21T17:51:41.423277Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2024-11-21T17:51:41.423280Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2024-11-21T17:51:41.423283Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2024-11-21T17:51:41.423286Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2024-11-21T17:51:41.423291Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2024-11-21T17:51:41.423303Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2024-11-21T17:51:41.423311Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2024-11-21T17:51:41.423314Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2024-11-21T17:51:41.423331Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2024-11-21T17:51:41.423338Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2024-11-21T17:51:41.423341Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2024-11-21T17:51:41.423344Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2024-11-21T17:51:41.423348Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2024-11-21T17:51:41.423350Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2024-11-21T17:51:41.423370Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2024-11-21T17:51:41.000000Z 2024-11-21T17:51:41.426894Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2024-11-21T17:51:41.443472Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Database1 2024-11-21T17:51:41.444857Z node 3 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2024-11-21T17:51:41.445443Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2024-11-21T17:51:41.448976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T17:51:41.457563Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439791746810971787:2191];send_to=[0:7307199536658146131:7762515]; waiting... 2024-11-21T17:51:41.462094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T17:51:41.462117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T17:51:41.462136Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T17:51:41.465646Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T17:51:41.466352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T17:51:41.479405Z node 2 :SYSTEM_VIEWS INFO: [72075186224037899] OnActivateExecutor 2024-11-21T17:51:41.479423Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInitSchema::Execute 2024-11-21T17:51:41.483216Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2024-11-21T17:51:41.483304Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T17:51:41.488408Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [2:7439791746810971651:2057], path id# [OwnerId: 72057594046644480, LocalPathId: 3], service# 2 2024-11-21T17:51:41.490359Z node 2 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [2:7439791746810971651:2057], path id# [OwnerId: 72057594046644480, LocalPathId: 3], database# /Root/Database2 2024-11-21T17:51:41.492311Z node 2 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [2:7439791746810971651:2057], database# /Root/Database2, no sysview processor 2024-11-21T17:51:41.492964Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInitSchema::Complete 2024-11-21T17:51:41.492991Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInit::Execute 2024-11-21T17:51:41.493079Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2024-11-21T17:51:41.493084Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading interval metrics: query count# 0 2024-11-21T17:51:41.493090Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading interval query tops: total query count# 0 2024-11-21T17:51:41.493095Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading nodes to request: nodes count# 0, hashes count# 0 2024-11-21T17:51:41.493100Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 6, result count# 0 2024-11-21T17:51:41.493104Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 7, result count# 0 2024-11-21T17:51:41.493107Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 8, result count# 0 2024-11-21T17:51:41.493114Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 9, result count# 0 2024-11-21T17:51:41.493118Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 10, result count# 0 2024-11-21T17:51:41.493123Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: table# 11, result count# 0 2024-11-21T17:51:41.493126Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading results: tabl ... 21T17:57:02.579226Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 10, interval end# 2024-11-21T17:57:02.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:02.579240Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 12, interval end# 2024-11-21T17:57:02.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:02.579243Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 14, interval end# 2024-11-21T17:57:02.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:02.579246Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 9, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:02.579248Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 11, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:02.579251Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 13, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:02.579253Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 15, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:02.585365Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxAggregate::Complete 2024-11-21T17:57:03.052801Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxCollect::Execute 2024-11-21T17:57:03.052841Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistPartitionTopResults: table id# 17, partition interval end# 2024-11-21T17:57:03.000000Z, partition count# 0 2024-11-21T17:57:03.052846Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistPartitionTopResults: table id# 18, partition interval end# 2024-11-21T18:00:00.000000Z, partition count# 0 2024-11-21T17:57:03.052882Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] Reset: interval end# 2024-11-21T17:57:03.000000Z 2024-11-21T17:57:03.054530Z node 11 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2024-11-21T17:57:03.059517Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxCollect::Complete 2024-11-21T17:57:03.509032Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxAggregate::Execute 2024-11-21T17:57:03.509065Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryResults: interval end# 2024-11-21T17:57:03.000000Z, query count# 0 2024-11-21T17:57:03.509072Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 8, interval end# 2024-11-21T17:57:03.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:03.509075Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 10, interval end# 2024-11-21T17:57:03.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:03.509078Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 12, interval end# 2024-11-21T17:57:03.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:03.509080Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 14, interval end# 2024-11-21T17:57:03.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:03.509084Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 9, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:03.509086Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 11, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:03.509089Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 13, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:03.509092Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 15, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:03.531374Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxAggregate::Complete 2024-11-21T17:57:04.019933Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxCollect::Execute 2024-11-21T17:57:04.019971Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistPartitionTopResults: table id# 17, partition interval end# 2024-11-21T17:57:04.000000Z, partition count# 0 2024-11-21T17:57:04.019975Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistPartitionTopResults: table id# 18, partition interval end# 2024-11-21T18:00:00.000000Z, partition count# 0 2024-11-21T17:57:04.020009Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] Reset: interval end# 2024-11-21T17:57:04.000000Z 2024-11-21T17:57:04.060990Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxCollect::Complete 2024-11-21T17:57:04.061067Z node 11 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2024-11-21T17:57:04.507761Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxAggregate::Execute 2024-11-21T17:57:04.507826Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryResults: interval end# 2024-11-21T17:57:04.000000Z, query count# 0 2024-11-21T17:57:04.507833Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 8, interval end# 2024-11-21T17:57:04.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:04.507837Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 10, interval end# 2024-11-21T17:57:04.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:04.507844Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 12, interval end# 2024-11-21T17:57:04.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:04.507848Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 14, interval end# 2024-11-21T17:57:04.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:04.507857Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 9, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:04.507860Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 11, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:04.507863Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 13, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:04.507868Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 15, interval end# 2024-11-21T18:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T17:57:04.514101Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxAggregate::Complete 2024-11-21T17:57:04.855578Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TEvApplyLabeledCounters: services count# 1 2024-11-21T17:57:04.903130Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TEvApplyCounters: services count# 1 iteration 6
name=topic.partition.alive_count: 32
name=topic.partition.init_duration_milliseconds_max: 4
name=topic.partition.producers_count_max: 0
name=topic.partition.read.inflight_throttled_microseconds_max: 0
name=topic.partition.read.speed_limit_bytes_per_second: 0
name=topic.partition.read.throttled_microseconds_max: 0
name=topic.partition.read_without_consumer.speed_limit_bytes_per_second: 0
name=topic.partition.read_without_consumer.throttled_microseconds_max: 0
name=topic.partition.storage_bytes_max: 0
name=topic.partition.total_count: 32
name=topic.partition.uptime_milliseconds_min: 59925
name=topic.partition.write.bytes_per_day_max: 0
name=topic.partition.write.bytes_per_hour_max: 0
name=topic.partition.write.bytes_per_minute_max: 0
name=topic.partition.write.idle_milliseconds_max: 59927
name=topic.partition.write.lag_milliseconds_max: 0
name=topic.partition.write.speed_limit_bytes_per_second: 50000000
name=topic.partition.write.throttled_microseconds_max: 0
name=topic.producers_count: 0
name=topic.reserve.limit_bytes: 0
name=topic.reserve.used_bytes: 0
name=topic.storage_bytes: 0
consumer=user:
    name=topic.partition.alive_count: 32
    name=topic.partition.committed_end_to_end_lag_milliseconds_max: 9855
    name=topic.partition.committed_lag_messages_max: 0
    name=topic.partition.committed_read_lag_milliseconds_max: 9855
    name=topic.partition.end_to_end_lag_milliseconds_max: 50077
    name=topic.partition.read.idle_milliseconds_max: 59924
    name=topic.partition.read.lag_messages_max: 0
    name=topic.partition.read.lag_milliseconds_max: 0
    name=topic.partition.read.speed_limit_bytes_per_second: 0
    name=topic.partition.read.throttled_microseconds_max: 0
    name=topic.partition.write.lag_milliseconds_max: 0
    name=topic.read.lag_messages: 0
CHECK COUNTER topic.partition.uptime_milliseconds_min wait less than 60200 got 59925 CHECK COUNTER topic.partition.alive_count wait 32 got 32 2024-11-21T17:57:04.926174Z node 12 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [12:7439792363453151056:2057], processor id# 72075186224037894, database# /Root/PQ 2024-11-21T17:57:04.930187Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7439792363433716007:2057], processor id# 72075186224037894, database# /Root/PQ 2024-11-21T17:57:04.931648Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7439792363433716007:2057], database# /Root/PQ, processor id# 72075186224037894 2024-11-21T17:57:04.934903Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2024-11-21T17:57:04.935143Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:57:04.935189Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2024-11-21T17:57:04.935222Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T17:57:04.935367Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7439792363300701170:2058], processor id# 72075186224037894, database# /Root/PQ 2024-11-21T17:57:04.938723Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7439792363300701170:2058], database# /Root/PQ, processor id# 72075186224037894 2024-11-21T17:57:04.935672Z node 12 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [12:7439792363453151056:2057], database# /Root/PQ, processor id# 72075186224037894 2024-11-21T17:57:05.116462Z node 12 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [12:7439792363453150998:2061] 2024-11-21T17:57:05.152770Z node 12 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [12:7439792363453150998:2061] 2024-11-21T17:57:05.277083Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [11:7439792363433716007:2057] 2024-11-21T17:57:05.452516Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [11:7439792363433715950:2061] 2024-11-21T17:57:05.525657Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [11:7439792363433715950:2061] 2024-11-21T17:57:05.907542Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [11:7439792363433716007:2057] 2024-11-21T17:57:05.907949Z node 11 :SYSTEM_VIEWS DEBUG: Send counters: service id# [11:7439792363433716007:2057], processor id# 72075186224037894, database# /Root/PQ, generation# 9790283252835103117, node id# 11, is retrying# 0, is labeled# 0 2024-11-21T17:57:05.945377Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7439792363433716007:2057], processor id# 72075186224037894, database# /Root/PQ 2024-11-21T17:57:05.945710Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7439792363433716007:2057], database# /Root/PQ, processor id# 72075186224037894 >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-5.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-6.test] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |87.1%| [TA] $(B)/ydb/core/sys_view/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-3.test] |87.1%| [TA] {RESULT} $(B)/ydb/core/sys_view/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/py3test >> test_postgres.py::TestPGSQL::test_sql_suite[plan-select.test] [GOOD] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-11.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-12.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-14.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-15.test] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-2.test] [GOOD] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-3.test] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-fifo] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-fifo] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select1-2.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-7.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-8.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-2.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-3.test] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-fifo] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[kick_tablets] [GOOD] |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-10.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-fifo] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[stop_node-std] [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |87.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_has_messages_counters[kick_tablets-std] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_disk.py::TestSafeDiskBreak::test_erase_method ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |87.2%| [TA] $(B)/ydb/tests/functional/sqs/common/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-15.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-2.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-1.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select2-2.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-4.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-12.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-13.test] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v0-std] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_sqs_writes_through_proxy_on_each_node[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-4.test] |87.2%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/common/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-8.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-9.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-3.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-4.test] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v1-fifo] [GOOD] |87.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_recompiles_requests.py::TestSqsRecompilesRequestsForOtherQueue::test_recompiles_queries[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/py3test Test command err: contrib/python/Flask/py3/flask/scaffold.py:103: DeprecationWarning: 'pkgutil.find_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec() instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1010: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1007: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/tools/python3/Lib/ast.py:587: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1014: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead contrib/python/Werkzeug/py3/werkzeug/routing.py:1015: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_ends_request_after_kill [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_full_stats >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-4.test] [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint32 >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-13.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint32 >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-6.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-7.test] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint32 [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint64_and_string >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint32 [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint64_and_string >> test_ydb_sql.py::TestExecuteSqlWithParams::test_uint64_and_string [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_full_stats [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_list >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_profile_stats >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_uint64_and_string [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_list >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_list [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_struct >> test_ydb_sql.py::TestExecuteSqlWithParams::test_list [GOOD] >> test_ydb_sql.py::TestExecuteSqlWithParams::test_struct >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_profile_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_basic_stats >> test_ydb_sql.py::TestExecuteSqlWithParams::test_struct [GOOD] >> test_ydb_scripting.py::TestExecuteScriptWithParams::test_struct [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_with_basic_stats [GOOD] >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir >> test_ydb_flame_graph.py::TestExecuteWithFlameGraph::test_fg_to_dir [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-5.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-4.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select2-5.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-1.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-9.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-5.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-10.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-insert.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[results-select3-11.test] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select3-14.test] [GOOD] >> test_sql_logic.py::TestSQLLogic::test_sql_suite[plan-select1-5.test] [GOOD]